From f69e491c56e4e87b9a35c922a55a19658bd82030 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Fri, 30 Apr 2021 22:10:58 -0500 Subject: [PATCH 01/21] Update .gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index c485f401..255b1cf6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +# User data +.DS_Store + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] From 4f4be8d264398c3251baae5edc9be37a97c3f753 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 20 Sep 2021 16:44:43 -0500 Subject: [PATCH 02/21] Move functions to `element-data-loader` --- element_array_ephys/__init__.py | 69 --------------------------------- 1 file changed, 69 deletions(-) diff --git a/element_array_ephys/__init__.py b/element_array_ephys/__init__.py index 3637d2e5..e69de29b 100644 --- a/element_array_ephys/__init__.py +++ b/element_array_ephys/__init__.py @@ -1,69 +0,0 @@ -import datajoint as dj -import pathlib -import uuid -import hashlib - - -dj.config['enable_python_native_blobs'] = True - - -def find_full_path(root_directories, relative_path): - """ - Given a relative path, search and return the full-path - from provided potential root directories (in the given order) - :param root_directories: potential root directories - :param relative_path: the relative path to find the valid root directory - :return: root_directory (pathlib.Path object) - """ - relative_path = pathlib.Path(relative_path) - - if relative_path.exists(): - return relative_path - - # turn to list if only a single root directory is provided - if isinstance(root_directories, (str, pathlib.Path)): - root_directories = [root_directories] - - for root_dir in root_directories: - if (pathlib.Path(root_dir) / relative_path).exists(): - return pathlib.Path(root_dir) / relative_path - - raise FileNotFoundError('No valid full-path found (from {})' - ' for {}'.format(root_directories, relative_path)) - - -def find_root_directory(root_directories, full_path): - """ - Given multiple potential root directories and a full-path, - search and return one directory that is the parent of the given path - :param root_directories: potential root directories - :param full_path: the relative path to search the root directory - :return: full-path (pathlib.Path object) - """ - full_path = pathlib.Path(full_path) - - if not full_path.exists(): - raise FileNotFoundError(f'{full_path} does not exist!') - - # turn to list if only a single root directory is provided - if isinstance(root_directories, (str, pathlib.Path)): - root_directories = [root_directories] - - try: - return next(pathlib.Path(root_dir) for root_dir in root_directories - if pathlib.Path(root_dir) in set(full_path.parents)) - - except StopIteration: - raise FileNotFoundError('No valid root directory found (from {})' - ' for {}'.format(root_directories, full_path)) - - -def dict_to_uuid(key): - """ - Given a dictionary `key`, returns a hash string as UUID - """ - hashed = hashlib.md5() - for k, v in sorted(key.items()): - hashed.update(str(k).encode()) - hashed.update(str(v).encode()) - return uuid.UUID(hex=hashed.hexdigest()) \ No newline at end of file From ffaf60b72b648229b47e76ff9bb75ddedd56ef13 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 27 Sep 2021 18:20:34 -0500 Subject: [PATCH 03/21] Add element_data_loader for multiple root dirs --- README.md | 27 ++++++++---- element_array_ephys/ephys.py | 64 +++++++++++++++++----------- element_array_ephys/ephys_chronic.py | 64 +++++++++++++++++----------- 3 files changed, 96 insertions(+), 59 deletions(-) diff --git a/README.md b/README.md index 802db8bb..b1f81df9 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,4 @@ # DataJoint Element - Array Electrophysiology Element -DataJoint Element for array electrophysiology. This repository features DataJoint pipeline design for extracellular array electrophysiology, with ***Neuropixels*** probe and ***kilosort*** spike sorting method. @@ -45,14 +44,24 @@ This ephys element features automatic ingestion for spike sorting results from t + ***WaveformSet*** - A set of spike waveforms for units from a given CuratedClustering ## Installation -``` -pip install element-array-ephys -``` - -If you already have an older version of ***element-array-ephys*** installed using `pip`, upgrade with -``` -pip install --upgrade element-array-ephys -``` + ++ Install `element-array-ephys` + ``` + pip install element-array-ephys + ``` + ++ Upgrade `element-array-ephys` previously installed with `pip` + ``` + pip install --upgrade element-array-ephys + ``` + ++ Install `element-data-loader` + + + `element-data-loader` is a dependency of `element-array-ephys`, however it is not contained within `requirements.txt`. + + ``` + pip install "element-data-loader @ git+https://github.com/datajoint/element-data-loader" + ``` ## Usage diff --git a/element_array_ephys/ephys.py b/element_array_ephys/ephys.py index 3eec7842..bf9fe672 100644 --- a/element_array_ephys/ephys.py +++ b/element_array_ephys/ephys.py @@ -4,9 +4,10 @@ import numpy as np import inspect import importlib +import element_data_loader from .readers import spikeglx, kilosort, openephys -from . import probe, find_full_path, find_root_directory, dict_to_uuid +from . import probe schema = dj.schema() @@ -46,7 +47,6 @@ def activate(ephys_schema_name, probe_schema_name=None, *, create_schema=True, global _linking_module _linking_module = linking_module - # activate probe.activate(probe_schema_name, create_schema=create_schema, create_tables=create_tables) schema.activate(ephys_schema_name, create_schema=create_schema, @@ -140,14 +140,16 @@ class EphysFile(dj.Part): """ def make(self, key): - sess_dir = pathlib.Path(get_session_directory(key)) + + session_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), + get_session_directory(key)) inserted_probe_serial_number = (ProbeInsertion * probe.Probe & key).fetch1('probe') # search session dir and determine acquisition software for ephys_pattern, ephys_acq_type in zip(['*.ap.meta', '*.oebin'], ['SpikeGLX', 'Open Ephys']): - ephys_meta_filepaths = [fp for fp in sess_dir.rglob(ephys_pattern)] + ephys_meta_filepaths = [fp for fp in session_dir.rglob(ephys_pattern)] if ephys_meta_filepaths: acq_software = ephys_acq_type break @@ -187,12 +189,12 @@ def make(self, key): 'acq_software': acq_software, 'sampling_rate': spikeglx_meta.meta['imSampRate']}) - root_dir = find_root_directory(get_ephys_root_data_dir(), meta_filepath) + root_dir = element_data_loader.utils.find_root_directory(get_ephys_root_data_dir(), meta_filepath) self.EphysFile.insert1({ **key, 'file_path': meta_filepath.relative_to(root_dir).as_posix()}) elif acq_software == 'Open Ephys': - dataset = openephys.OpenEphys(sess_dir) + dataset = openephys.OpenEphys(session_dir) for serial_number, probe_data in dataset.probes.items(): if str(serial_number) == inserted_probe_serial_number: break @@ -220,7 +222,7 @@ def make(self, key): 'acq_software': acq_software, 'sampling_rate': probe_data.ap_meta['sample_rate']}) - root_dir = find_root_directory( + root_dir = element_data_loader.utils.find_root_directory( get_ephys_root_data_dir(), probe_data.recording_info['recording_files'][0]) self.EphysFile.insert([{**key, @@ -290,8 +292,12 @@ def make(self, key): shank, shank_col, shank_row, _ = spikeglx_recording.apmeta.shankmap['data'][recorded_site] electrode_keys.append(probe_electrodes[(shank, shank_col, shank_row)]) elif acq_software == 'Open Ephys': - sess_dir = pathlib.Path(get_session_directory(key)) - loaded_oe = openephys.OpenEphys(sess_dir) + + session_dir = element_data_loader.utils.find_full_path( + get_ephys_root_data_dir(), + get_session_directory(key)) + + loaded_oe = openephys.OpenEphys(session_dir) oe_probe = loaded_oe.probes[probe_sn] lfp_channel_ind = np.arange( @@ -358,7 +364,7 @@ def insert_new_params(cls, processing_method: str, paramset_idx: int, 'paramset_idx': paramset_idx, 'paramset_desc': paramset_desc, 'params': params, - 'param_set_hash': dict_to_uuid(params)} + 'param_set_hash': element_data_loader.utils.dict_to_uuid(params)} param_query = cls & {'param_set_hash': param_dict['param_set_hash']} if param_query: # If the specified param-set already exists @@ -420,7 +426,7 @@ class Clustering(dj.Imported): def make(self, key): task_mode, output_dir = (ClusteringTask & key).fetch1( 'task_mode', 'clustering_output_dir') - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) if task_mode == 'load': kilosort_dataset = kilosort.Kilosort(kilosort_dir) # check if the directory is a valid Kilosort output @@ -450,8 +456,9 @@ class Curation(dj.Manual): def create1_from_clustering_task(self, key, curation_note=''): """ - A convenient function to create a new corresponding "Curation" - for a particular "ClusteringTask" + A function to create a new corresponding "Curation" for a particular + "ClusteringTask", which assumes that no curation was performed on the + dataset """ if key not in Clustering(): raise ValueError(f'No corresponding entry in Clustering available' @@ -459,14 +466,16 @@ def create1_from_clustering_task(self, key, curation_note=''): task_mode, output_dir = (ClusteringTask & key).fetch1( 'task_mode', 'clustering_output_dir') - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) creation_time, is_curated, is_qc = kilosort.extract_clustering_info(kilosort_dir) # Synthesize curation_id curation_id = dj.U().aggr(self & key, n='ifnull(max(curation_id)+1,1)').fetch1('n') self.insert1({**key, 'curation_id': curation_id, - 'curation_time': creation_time, 'curation_output_dir': output_dir, - 'quality_control': is_qc, 'manual_curation': is_curated, + 'curation_time': creation_time, + 'curation_output_dir': output_dir, + 'quality_control': is_qc, + 'manual_curation': is_curated, 'curation_note': curation_note}) @@ -493,7 +502,7 @@ class Unit(dj.Part): def make(self, key): output_dir = (Curation & key).fetch1('curation_output_dir') - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) kilosort_dataset = kilosort.Kilosort(kilosort_dir) acq_software = (EphysRecording & key).fetch1('acq_software') @@ -571,7 +580,7 @@ class Waveform(dj.Part): def make(self, key): output_dir = (Curation & key).fetch1('curation_output_dir') - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) kilosort_dataset = kilosort.Kilosort(kilosort_dir) @@ -613,8 +622,9 @@ def yield_unit_waveforms(): spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) neuropixels_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) elif acq_software == 'Open Ephys': - sess_dir = pathlib.Path(get_session_directory(key)) - openephys_dataset = openephys.OpenEphys(sess_dir) + session_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), + get_session_directory(key)) + openephys_dataset = openephys.OpenEphys(session_dir) neuropixels_recording = openephys_dataset.probes[probe_serial_number] def yield_unit_waveforms(): @@ -654,16 +664,17 @@ def get_spikeglx_meta_filepath(ephys_recording_key): & 'file_path LIKE "%.ap.meta"').fetch1('file_path') try: - spikeglx_meta_filepath = find_full_path(get_ephys_root_data_dir(), + spikeglx_meta_filepath = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), spikeglx_meta_filepath) except FileNotFoundError: # if not found, search in session_dir again if not spikeglx_meta_filepath.exists(): - sess_dir = pathlib.Path(get_session_directory(ephys_recording_key)) + session_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), + get_session_directory(ephys_recording_key)) inserted_probe_serial_number = (ProbeInsertion * probe.Probe & ephys_recording_key).fetch1('probe') - spikeglx_meta_filepaths = [fp for fp in sess_dir.rglob('*.ap.meta')] + spikeglx_meta_filepaths = [fp for fp in session_dir.rglob('*.ap.meta')] for meta_filepath in spikeglx_meta_filepaths: spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: @@ -696,8 +707,9 @@ def get_neuropixels_channel2electrode_map(ephys_recording_key, acq_software): for recorded_site, (shank, shank_col, shank_row, _) in enumerate( spikeglx_meta.shankmap['data'])} elif acq_software == 'Open Ephys': - sess_dir = pathlib.Path(get_session_directory(ephys_recording_key)) - openephys_dataset = openephys.OpenEphys(sess_dir) + session_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), + get_session_directory(ephys_recording_key)) + openephys_dataset = openephys.OpenEphys(session_dir) probe_serial_number = (ProbeInsertion & ephys_recording_key).fetch1('probe') probe_dataset = openephys_dataset.probes[probe_serial_number] @@ -723,7 +735,7 @@ def generate_electrode_config(probe_type: str, electrodes: list): :return: a dict representing a key of the probe.ElectrodeConfig table """ # compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) - electrode_config_hash = dict_to_uuid({k['electrode']: k for k in electrodes}) + electrode_config_hash = element_data_loader.utils.dict_to_uuid({k['electrode']: k for k in electrodes}) electrode_list = sorted([k['electrode'] for k in electrodes]) electrode_gaps = ([-1] diff --git a/element_array_ephys/ephys_chronic.py b/element_array_ephys/ephys_chronic.py index 95268443..cf9de6ff 100644 --- a/element_array_ephys/ephys_chronic.py +++ b/element_array_ephys/ephys_chronic.py @@ -4,9 +4,10 @@ import numpy as np import inspect import importlib +import element_data_loader from .readers import spikeglx, kilosort, openephys -from . import probe, find_full_path, find_root_directory, dict_to_uuid +from . import probe schema = dj.schema() @@ -47,7 +48,6 @@ def activate(ephys_schema_name, probe_schema_name=None, *, create_schema=True, global _linking_module _linking_module = linking_module - # activate probe.activate(probe_schema_name, create_schema=create_schema, create_tables=create_tables) schema.activate(ephys_schema_name, create_schema=create_schema, @@ -143,14 +143,15 @@ class EphysFile(dj.Part): """ def make(self, key): - sess_dir = pathlib.Path(get_session_directory(key)) + session_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), + get_session_directory(key)) inserted_probe_serial_number = (ProbeInsertion * probe.Probe & key).fetch1('probe') # search session dir and determine acquisition software for ephys_pattern, ephys_acq_type in zip(['*.ap.meta', '*.oebin'], ['SpikeGLX', 'Open Ephys']): - ephys_meta_filepaths = [fp for fp in sess_dir.rglob(ephys_pattern)] + ephys_meta_filepaths = [fp for fp in session_dir.rglob(ephys_pattern)] if ephys_meta_filepaths: acq_software = ephys_acq_type break @@ -190,12 +191,14 @@ def make(self, key): 'acq_software': acq_software, 'sampling_rate': spikeglx_meta.meta['imSampRate']}) - root_dir = find_root_directory(get_ephys_root_data_dir(), meta_filepath) + root_dir = element_data_loader.utils.find_root_directory( + get_ephys_root_data_dir(), + meta_filepath) self.EphysFile.insert1({ **key, 'file_path': meta_filepath.relative_to(root_dir).as_posix()}) elif acq_software == 'Open Ephys': - dataset = openephys.OpenEphys(sess_dir) + dataset = openephys.OpenEphys(session_dir) for serial_number, probe_data in dataset.probes.items(): if str(serial_number) == inserted_probe_serial_number: break @@ -223,7 +226,7 @@ def make(self, key): 'acq_software': acq_software, 'sampling_rate': probe_data.ap_meta['sample_rate']}) - root_dir = find_root_directory( + root_dir = element_data_loader.utils.find_root_directory( get_ephys_root_data_dir(), probe_data.recording_info['recording_files'][0]) self.EphysFile.insert([{**key, @@ -293,8 +296,10 @@ def make(self, key): shank, shank_col, shank_row, _ = spikeglx_recording.apmeta.shankmap['data'][recorded_site] electrode_keys.append(probe_electrodes[(shank, shank_col, shank_row)]) elif acq_software == 'Open Ephys': - sess_dir = pathlib.Path(get_session_directory(key)) - loaded_oe = openephys.OpenEphys(sess_dir) + session_dir = element_data_loader.utils.find_full_path( + get_ephys_root_data_dir(), + get_session_directory(key)) + loaded_oe = openephys.OpenEphys(session_dir) oe_probe = loaded_oe.probes[probe_sn] lfp_channel_ind = np.arange( @@ -361,7 +366,7 @@ def insert_new_params(cls, processing_method: str, paramset_idx: int, 'paramset_idx': paramset_idx, 'paramset_desc': paramset_desc, 'params': params, - 'param_set_hash': dict_to_uuid(params)} + 'param_set_hash': element_data_loader.utils.dict_to_uuid(params)} param_query = cls & {'param_set_hash': param_dict['param_set_hash']} if param_query: # If the specified param-set already exists @@ -423,7 +428,7 @@ class Clustering(dj.Imported): def make(self, key): task_mode, output_dir = (ClusteringTask & key).fetch1( 'task_mode', 'clustering_output_dir') - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) if task_mode == 'load': kilosort_dataset = kilosort.Kilosort(kilosort_dir) # check if the directory is a valid Kilosort output @@ -462,7 +467,7 @@ def create1_from_clustering_task(self, key, curation_note=''): task_mode, output_dir = (ClusteringTask & key).fetch1( 'task_mode', 'clustering_output_dir') - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) creation_time, is_curated, is_qc = kilosort.extract_clustering_info(kilosort_dir) # Synthesize curation_id @@ -496,7 +501,7 @@ class Unit(dj.Part): def make(self, key): output_dir = (Curation & key).fetch1('curation_output_dir') - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) kilosort_dataset = kilosort.Kilosort(kilosort_dir) acq_software = (EphysRecording & key).fetch1('acq_software') @@ -574,7 +579,9 @@ class Waveform(dj.Part): def make(self, key): output_dir = (Curation & key).fetch1('curation_output_dir') - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = element_data_loader.utils.find_full_path( + get_ephys_root_data_dir(), + output_dir) kilosort_dataset = kilosort.Kilosort(kilosort_dir) @@ -616,8 +623,10 @@ def yield_unit_waveforms(): spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) neuropixels_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) elif acq_software == 'Open Ephys': - sess_dir = pathlib.Path(get_session_directory(key)) - openephys_dataset = openephys.OpenEphys(sess_dir) + session_dir = element_data_loader.utils.find_full_path( + get_ephys_root_data_dir(), + get_session_directory(key)) + openephys_dataset = openephys.OpenEphys(session_dir) neuropixels_recording = openephys_dataset.probes[probe_serial_number] def yield_unit_waveforms(): @@ -657,16 +666,19 @@ def get_spikeglx_meta_filepath(ephys_recording_key): & 'file_path LIKE "%.ap.meta"').fetch1('file_path') try: - spikeglx_meta_filepath = find_full_path(get_ephys_root_data_dir(), - spikeglx_meta_filepath) + spikeglx_meta_filepath = element_data_loader.utils.find_full_path( + get_ephys_root_data_dir(), + spikeglx_meta_filepath) except FileNotFoundError: # if not found, search in session_dir again if not spikeglx_meta_filepath.exists(): - sess_dir = pathlib.Path(get_session_directory(ephys_recording_key)) + session_dir = element_data_loader.utils.find_full_path( + get_ephys_root_data_dir(), + get_session_directory(ephys_recording_key)) inserted_probe_serial_number = (ProbeInsertion * probe.Probe & ephys_recording_key).fetch1('probe') - spikeglx_meta_filepaths = [fp for fp in sess_dir.rglob('*.ap.meta')] + spikeglx_meta_filepaths = [fp for fp in session_dir.rglob('*.ap.meta')] for meta_filepath in spikeglx_meta_filepaths: spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: @@ -674,7 +686,8 @@ def get_spikeglx_meta_filepath(ephys_recording_key): break else: raise FileNotFoundError( - 'No SpikeGLX data found for probe insertion: {}'.format(ephys_recording_key)) + 'No SpikeGLX data found for probe insertion: {}'.format( + ephys_recording_key)) return spikeglx_meta_filepath @@ -699,8 +712,10 @@ def get_neuropixels_channel2electrode_map(ephys_recording_key, acq_software): for recorded_site, (shank, shank_col, shank_row, _) in enumerate( spikeglx_meta.shankmap['data'])} elif acq_software == 'Open Ephys': - sess_dir = pathlib.Path(get_session_directory(ephys_recording_key)) - openephys_dataset = openephys.OpenEphys(sess_dir) + session_dir = element_data_loader.utils.find_full_path( + get_ephys_root_data_dir(), + get_session_directory(ephys_recording_key)) + openephys_dataset = openephys.OpenEphys(session_dir) probe_serial_number = (ProbeInsertion & ephys_recording_key).fetch1('probe') probe_dataset = openephys_dataset.probes[probe_serial_number] @@ -726,7 +741,8 @@ def generate_electrode_config(probe_type: str, electrodes: list): :return: a dict representing a key of the probe.ElectrodeConfig table """ # compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) - electrode_config_hash = dict_to_uuid({k['electrode']: k for k in electrodes}) + electrode_config_hash = element_data_loader.utils.dict_to_uuid( + {k['electrode']: k for k in electrodes}) electrode_list = sorted([k['electrode'] for k in electrodes]) electrode_gaps = ([-1] From b6b39c093a7603eba1a40b9b3b82db1c6294aac9 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 27 Sep 2021 18:22:27 -0500 Subject: [PATCH 04/21] Update author --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index eda1c8d6..fb96d4c0 100644 --- a/setup.py +++ b/setup.py @@ -19,8 +19,8 @@ description="DataJoint Element for Extracellular Array Electrophysiology", long_description=long_description, long_description_content_type='text/markdown', - author='DataJoint NEURO', - author_email='info@vathes.com', + author='DataJoint', + author_email='info@datajoint.com', license='MIT', url=f'https://github.com/datajoint/{pkg_name.replace("_", "-")}', keywords='neuroscience electrophysiology science datajoint', From 2be1f08af1d428570f5155f7d11463646805886b Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 27 Sep 2021 19:56:32 -0500 Subject: [PATCH 05/21] Fix import --- element_array_ephys/ephys.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/ephys.py b/element_array_ephys/ephys.py index bf9fe672..bcfd1903 100644 --- a/element_array_ephys/ephys.py +++ b/element_array_ephys/ephys.py @@ -4,7 +4,7 @@ import numpy as np import inspect import importlib -import element_data_loader +import element_data_loader.utils from .readers import spikeglx, kilosort, openephys from . import probe From 68ef14b180c7fd5d61bbac1a9d6ec9d4a7c0530e Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 28 Sep 2021 10:03:41 -0500 Subject: [PATCH 06/21] [WIP] Print directory path --- element_array_ephys/readers/openephys.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/readers/openephys.py b/element_array_ephys/readers/openephys.py index 737e710c..6b9d6eb5 100644 --- a/element_array_ephys/readers/openephys.py +++ b/element_array_ephys/readers/openephys.py @@ -32,7 +32,7 @@ class OpenEphys: def __init__(self, experiment_dir): self.sess_dir = pathlib.Path(experiment_dir) - + print('Session directory parent', self.sess_dir.parent, self.sess_dir) openephys_file = pyopenephys.File(self.sess_dir.parent) # this is on the Record Node level # extract the "recordings" for this session From 2233c5ddff6351b541125b53cb6c49a424cacc72 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 28 Sep 2021 10:12:30 -0500 Subject: [PATCH 07/21] Fix OpenEphys session path --- element_array_ephys/readers/openephys.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/element_array_ephys/readers/openephys.py b/element_array_ephys/readers/openephys.py index 6b9d6eb5..18b65771 100644 --- a/element_array_ephys/readers/openephys.py +++ b/element_array_ephys/readers/openephys.py @@ -31,13 +31,12 @@ class OpenEphys: def __init__(self, experiment_dir): - self.sess_dir = pathlib.Path(experiment_dir) - print('Session directory parent', self.sess_dir.parent, self.sess_dir) - openephys_file = pyopenephys.File(self.sess_dir.parent) # this is on the Record Node level + self.session_dir = pathlib.Path(experiment_dir) + openephys_file = pyopenephys.File(self.session_dir) # this is on the Record Node level # extract the "recordings" for this session self.experiment = next(experiment for experiment in openephys_file.experiments - if pathlib.Path(experiment.absolute_foldername) == self.sess_dir) + if pathlib.Path(experiment.absolute_foldername) == self.session_dir) self.recording_time = self.experiment.datetime From ab426c1ed9ea14b960bcd3d3e1970c74ef020143 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 28 Sep 2021 11:57:02 -0500 Subject: [PATCH 08/21] Update comments --- element_array_ephys/ephys.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/element_array_ephys/ephys.py b/element_array_ephys/ephys.py index bcfd1903..1704bf4f 100644 --- a/element_array_ephys/ephys.py +++ b/element_array_ephys/ephys.py @@ -57,9 +57,10 @@ def activate(ephys_schema_name, probe_schema_name=None, *, create_schema=True, def get_ephys_root_data_dir() -> list: """ - All data paths, directories in DataJoint Elements are recommended to be stored as - relative paths, with respect to some user-configured "root" directory, - which varies from machine to machine (e.g. different mounted drive locations) + All data paths, directories in DataJoint Elements are recommended to be + stored as relative paths, with respect to some user-configured "root" + directory, which varies from machine to machine (e.g. different mounted + drive locations) get_ephys_root_data_dir() -> list This user-provided function retrieves the possible root data directories @@ -78,7 +79,7 @@ def get_session_directory(session_key: dict) -> str: Retrieve the session directory containing the recorded Neuropixels data for a given Session :param session_key: a dictionary of one Session `key` - :return: a string for full path to the session directory + :return: a string for relative or full path to the session directory """ return _linking_module.get_session_directory(session_key) @@ -448,7 +449,7 @@ class Curation(dj.Manual): curation_id: int --- curation_time: datetime # time of generation of this set of curated clustering results - curation_output_dir: varchar(255) # output directory of the curated results, relative to clustering root data directory + curation_output_dir: varchar(255) # output directory of the curated results, relative to root data directory quality_control: bool # has this clustering result undergone quality control? manual_curation: bool # has manual curation been performed on this clustering result? curation_note='': varchar(2000) From 49c554bea6a2431140b553098f889f717600da3a Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 28 Sep 2021 15:27:38 -0500 Subject: [PATCH 09/21] [WIP] Update directory path --- element_array_ephys/readers/openephys.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/readers/openephys.py b/element_array_ephys/readers/openephys.py index 18b65771..359cf40d 100644 --- a/element_array_ephys/readers/openephys.py +++ b/element_array_ephys/readers/openephys.py @@ -32,7 +32,7 @@ class OpenEphys: def __init__(self, experiment_dir): self.session_dir = pathlib.Path(experiment_dir) - openephys_file = pyopenephys.File(self.session_dir) # this is on the Record Node level + openephys_file = pyopenephys.File(self.session_dir.parent) # this is on the Record Node level # extract the "recordings" for this session self.experiment = next(experiment for experiment in openephys_file.experiments From b98192b8ca2cde9e2babbc48b383673a5ae15a94 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 28 Sep 2021 16:30:42 -0500 Subject: [PATCH 10/21] [WIP] Add print statement --- element_array_ephys/readers/openephys.py | 1 + 1 file changed, 1 insertion(+) diff --git a/element_array_ephys/readers/openephys.py b/element_array_ephys/readers/openephys.py index 359cf40d..7c815496 100644 --- a/element_array_ephys/readers/openephys.py +++ b/element_array_ephys/readers/openephys.py @@ -32,6 +32,7 @@ class OpenEphys: def __init__(self, experiment_dir): self.session_dir = pathlib.Path(experiment_dir) + print('OpenEphys session directory', self.session_dir, self.session_dir.parent) openephys_file = pyopenephys.File(self.session_dir.parent) # this is on the Record Node level # extract the "recordings" for this session From cf533a275ca1220136472bbdf30048dc9f8c92e9 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 28 Sep 2021 21:22:20 -0500 Subject: [PATCH 11/21] Remove test print statement --- element_array_ephys/readers/openephys.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/readers/openephys.py b/element_array_ephys/readers/openephys.py index 7c815496..7e8b240d 100644 --- a/element_array_ephys/readers/openephys.py +++ b/element_array_ephys/readers/openephys.py @@ -32,7 +32,7 @@ class OpenEphys: def __init__(self, experiment_dir): self.session_dir = pathlib.Path(experiment_dir) - print('OpenEphys session directory', self.session_dir, self.session_dir.parent) + openephys_file = pyopenephys.File(self.session_dir.parent) # this is on the Record Node level # extract the "recordings" for this session From 44be35568edfab666d48dcaa30a02e72ea65159f Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Thu, 30 Sep 2021 12:08:13 -0500 Subject: [PATCH 12/21] Fix module import --- element_array_ephys/ephys_chronic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/ephys_chronic.py b/element_array_ephys/ephys_chronic.py index cf9de6ff..545ca9cd 100644 --- a/element_array_ephys/ephys_chronic.py +++ b/element_array_ephys/ephys_chronic.py @@ -4,7 +4,7 @@ import numpy as np import inspect import importlib -import element_data_loader +import element_data_loader.utils from .readers import spikeglx, kilosort, openephys from . import probe From 139e99b4d8dfec9c267dd8718b533cdb5a59bc00 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 4 Oct 2021 11:40:35 -0500 Subject: [PATCH 13/21] Update module import --- element_array_ephys/ephys.py | 44 ++++++++++++++++++------------------ 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/element_array_ephys/ephys.py b/element_array_ephys/ephys.py index 1704bf4f..455afe8b 100644 --- a/element_array_ephys/ephys.py +++ b/element_array_ephys/ephys.py @@ -4,7 +4,7 @@ import numpy as np import inspect import importlib -import element_data_loader.utils +from element_data_loader.utils import find_root_directory, find_full_path, dict_to_uuid from .readers import spikeglx, kilosort, openephys from . import probe @@ -142,8 +142,8 @@ class EphysFile(dj.Part): def make(self, key): - session_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), - get_session_directory(key)) + session_dir = find_full_path(get_ephys_root_data_dir(), + get_session_directory(key)) inserted_probe_serial_number = (ProbeInsertion * probe.Probe & key).fetch1('probe') @@ -190,7 +190,8 @@ def make(self, key): 'acq_software': acq_software, 'sampling_rate': spikeglx_meta.meta['imSampRate']}) - root_dir = element_data_loader.utils.find_root_directory(get_ephys_root_data_dir(), meta_filepath) + root_dir = find_root_directory(get_ephys_root_data_dir(), + meta_filepath) self.EphysFile.insert1({ **key, 'file_path': meta_filepath.relative_to(root_dir).as_posix()}) @@ -223,8 +224,7 @@ def make(self, key): 'acq_software': acq_software, 'sampling_rate': probe_data.ap_meta['sample_rate']}) - root_dir = element_data_loader.utils.find_root_directory( - get_ephys_root_data_dir(), + root_dir = find_root_directory(get_ephys_root_data_dir(), probe_data.recording_info['recording_files'][0]) self.EphysFile.insert([{**key, 'file_path': fp.relative_to(root_dir).as_posix()} @@ -294,9 +294,8 @@ def make(self, key): electrode_keys.append(probe_electrodes[(shank, shank_col, shank_row)]) elif acq_software == 'Open Ephys': - session_dir = element_data_loader.utils.find_full_path( - get_ephys_root_data_dir(), - get_session_directory(key)) + session_dir = find_full_path(get_ephys_root_data_dir(), + get_session_directory(key)) loaded_oe = openephys.OpenEphys(session_dir) oe_probe = loaded_oe.probes[probe_sn] @@ -365,7 +364,7 @@ def insert_new_params(cls, processing_method: str, paramset_idx: int, 'paramset_idx': paramset_idx, 'paramset_desc': paramset_desc, 'params': params, - 'param_set_hash': element_data_loader.utils.dict_to_uuid(params)} + 'param_set_hash': dict_to_uuid(params)} param_query = cls & {'param_set_hash': param_dict['param_set_hash']} if param_query: # If the specified param-set already exists @@ -427,7 +426,7 @@ class Clustering(dj.Imported): def make(self, key): task_mode, output_dir = (ClusteringTask & key).fetch1( 'task_mode', 'clustering_output_dir') - kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) if task_mode == 'load': kilosort_dataset = kilosort.Kilosort(kilosort_dir) # check if the directory is a valid Kilosort output @@ -467,7 +466,7 @@ def create1_from_clustering_task(self, key, curation_note=''): task_mode, output_dir = (ClusteringTask & key).fetch1( 'task_mode', 'clustering_output_dir') - kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) creation_time, is_curated, is_qc = kilosort.extract_clustering_info(kilosort_dir) # Synthesize curation_id @@ -503,7 +502,7 @@ class Unit(dj.Part): def make(self, key): output_dir = (Curation & key).fetch1('curation_output_dir') - kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) kilosort_dataset = kilosort.Kilosort(kilosort_dir) acq_software = (EphysRecording & key).fetch1('acq_software') @@ -581,7 +580,7 @@ class Waveform(dj.Part): def make(self, key): output_dir = (Curation & key).fetch1('curation_output_dir') - kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) kilosort_dataset = kilosort.Kilosort(kilosort_dir) @@ -623,8 +622,8 @@ def yield_unit_waveforms(): spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) neuropixels_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) elif acq_software == 'Open Ephys': - session_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), - get_session_directory(key)) + session_dir = find_full_path(get_ephys_root_data_dir(), + get_session_directory(key)) openephys_dataset = openephys.OpenEphys(session_dir) neuropixels_recording = openephys_dataset.probes[probe_serial_number] @@ -665,13 +664,14 @@ def get_spikeglx_meta_filepath(ephys_recording_key): & 'file_path LIKE "%.ap.meta"').fetch1('file_path') try: - spikeglx_meta_filepath = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), + spikeglx_meta_filepath = find_full_path(get_ephys_root_data_dir(), spikeglx_meta_filepath) except FileNotFoundError: # if not found, search in session_dir again if not spikeglx_meta_filepath.exists(): - session_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), - get_session_directory(ephys_recording_key)) + session_dir = find_full_path(get_ephys_root_data_dir(), + get_session_directory( + ephys_recording_key)) inserted_probe_serial_number = (ProbeInsertion * probe.Probe & ephys_recording_key).fetch1('probe') @@ -708,8 +708,8 @@ def get_neuropixels_channel2electrode_map(ephys_recording_key, acq_software): for recorded_site, (shank, shank_col, shank_row, _) in enumerate( spikeglx_meta.shankmap['data'])} elif acq_software == 'Open Ephys': - session_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), - get_session_directory(ephys_recording_key)) + session_dir = find_full_path(get_ephys_root_data_dir(), + get_session_directory(ephys_recording_key)) openephys_dataset = openephys.OpenEphys(session_dir) probe_serial_number = (ProbeInsertion & ephys_recording_key).fetch1('probe') probe_dataset = openephys_dataset.probes[probe_serial_number] @@ -736,7 +736,7 @@ def generate_electrode_config(probe_type: str, electrodes: list): :return: a dict representing a key of the probe.ElectrodeConfig table """ # compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) - electrode_config_hash = element_data_loader.utils.dict_to_uuid({k['electrode']: k for k in electrodes}) + electrode_config_hash = dict_to_uuid({k['electrode']: k for k in electrodes}) electrode_list = sorted([k['electrode'] for k in electrodes]) electrode_gaps = ([-1] From 98813508f9a77ee3110d8df055957308361273d5 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 4 Oct 2021 11:49:27 -0500 Subject: [PATCH 14/21] Fixed doc string --- element_array_ephys/ephys.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/element_array_ephys/ephys.py b/element_array_ephys/ephys.py index 455afe8b..73355c76 100644 --- a/element_array_ephys/ephys.py +++ b/element_array_ephys/ephys.py @@ -457,8 +457,7 @@ class Curation(dj.Manual): def create1_from_clustering_task(self, key, curation_note=''): """ A function to create a new corresponding "Curation" for a particular - "ClusteringTask", which assumes that no curation was performed on the - dataset + "ClusteringTask" """ if key not in Clustering(): raise ValueError(f'No corresponding entry in Clustering available' From 818cc53edb5395a1cc845958a373365679174f22 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 4 Oct 2021 12:20:39 -0500 Subject: [PATCH 15/21] Update module import --- element_array_ephys/ephys_chronic.py | 57 ++++++++++++---------------- 1 file changed, 24 insertions(+), 33 deletions(-) diff --git a/element_array_ephys/ephys_chronic.py b/element_array_ephys/ephys_chronic.py index 545ca9cd..ea147a19 100644 --- a/element_array_ephys/ephys_chronic.py +++ b/element_array_ephys/ephys_chronic.py @@ -4,7 +4,7 @@ import numpy as np import inspect import importlib -import element_data_loader.utils +from element_data_loader.utils import find_root_directory, find_full_path, dict_to_uuid from .readers import spikeglx, kilosort, openephys from . import probe @@ -143,8 +143,8 @@ class EphysFile(dj.Part): """ def make(self, key): - session_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), - get_session_directory(key)) + session_dir = find_full_path(get_ephys_root_data_dir(), + get_session_directory(key)) inserted_probe_serial_number = (ProbeInsertion * probe.Probe & key).fetch1('probe') @@ -191,9 +191,8 @@ def make(self, key): 'acq_software': acq_software, 'sampling_rate': spikeglx_meta.meta['imSampRate']}) - root_dir = element_data_loader.utils.find_root_directory( - get_ephys_root_data_dir(), - meta_filepath) + root_dir = find_root_directory(get_ephys_root_data_dir(), + meta_filepath) self.EphysFile.insert1({ **key, 'file_path': meta_filepath.relative_to(root_dir).as_posix()}) @@ -226,9 +225,8 @@ def make(self, key): 'acq_software': acq_software, 'sampling_rate': probe_data.ap_meta['sample_rate']}) - root_dir = element_data_loader.utils.find_root_directory( - get_ephys_root_data_dir(), - probe_data.recording_info['recording_files'][0]) + root_dir = find_root_directory(get_ephys_root_data_dir(), + probe_data.recording_info['recording_files'][0]) self.EphysFile.insert([{**key, 'file_path': fp.relative_to(root_dir).as_posix()} for fp in probe_data.recording_info['recording_files']]) @@ -296,9 +294,8 @@ def make(self, key): shank, shank_col, shank_row, _ = spikeglx_recording.apmeta.shankmap['data'][recorded_site] electrode_keys.append(probe_electrodes[(shank, shank_col, shank_row)]) elif acq_software == 'Open Ephys': - session_dir = element_data_loader.utils.find_full_path( - get_ephys_root_data_dir(), - get_session_directory(key)) + session_dir = find_full_path(get_ephys_root_data_dir(), + get_session_directory(key)) loaded_oe = openephys.OpenEphys(session_dir) oe_probe = loaded_oe.probes[probe_sn] @@ -366,7 +363,7 @@ def insert_new_params(cls, processing_method: str, paramset_idx: int, 'paramset_idx': paramset_idx, 'paramset_desc': paramset_desc, 'params': params, - 'param_set_hash': element_data_loader.utils.dict_to_uuid(params)} + 'param_set_hash': dict_to_uuid(params)} param_query = cls & {'param_set_hash': param_dict['param_set_hash']} if param_query: # If the specified param-set already exists @@ -428,7 +425,7 @@ class Clustering(dj.Imported): def make(self, key): task_mode, output_dir = (ClusteringTask & key).fetch1( 'task_mode', 'clustering_output_dir') - kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) if task_mode == 'load': kilosort_dataset = kilosort.Kilosort(kilosort_dir) # check if the directory is a valid Kilosort output @@ -467,7 +464,7 @@ def create1_from_clustering_task(self, key, curation_note=''): task_mode, output_dir = (ClusteringTask & key).fetch1( 'task_mode', 'clustering_output_dir') - kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) creation_time, is_curated, is_qc = kilosort.extract_clustering_info(kilosort_dir) # Synthesize curation_id @@ -501,7 +498,7 @@ class Unit(dj.Part): def make(self, key): output_dir = (Curation & key).fetch1('curation_output_dir') - kilosort_dir = element_data_loader.utils.find_full_path(get_ephys_root_data_dir(), output_dir) + kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) kilosort_dataset = kilosort.Kilosort(kilosort_dir) acq_software = (EphysRecording & key).fetch1('acq_software') @@ -579,9 +576,7 @@ class Waveform(dj.Part): def make(self, key): output_dir = (Curation & key).fetch1('curation_output_dir') - kilosort_dir = element_data_loader.utils.find_full_path( - get_ephys_root_data_dir(), - output_dir) + kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) kilosort_dataset = kilosort.Kilosort(kilosort_dir) @@ -623,9 +618,8 @@ def yield_unit_waveforms(): spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) neuropixels_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) elif acq_software == 'Open Ephys': - session_dir = element_data_loader.utils.find_full_path( - get_ephys_root_data_dir(), - get_session_directory(key)) + session_dir = find_full_path(get_ephys_root_data_dir(), + get_session_directory(key)) openephys_dataset = openephys.OpenEphys(session_dir) neuropixels_recording = openephys_dataset.probes[probe_serial_number] @@ -666,15 +660,14 @@ def get_spikeglx_meta_filepath(ephys_recording_key): & 'file_path LIKE "%.ap.meta"').fetch1('file_path') try: - spikeglx_meta_filepath = element_data_loader.utils.find_full_path( - get_ephys_root_data_dir(), - spikeglx_meta_filepath) + spikeglx_meta_filepath = find_full_path(get_ephys_root_data_dir(), + spikeglx_meta_filepath) except FileNotFoundError: # if not found, search in session_dir again if not spikeglx_meta_filepath.exists(): - session_dir = element_data_loader.utils.find_full_path( - get_ephys_root_data_dir(), - get_session_directory(ephys_recording_key)) + session_dir = find_full_path(get_ephys_root_data_dir(), + get_session_directory( + ephys_recording_key)) inserted_probe_serial_number = (ProbeInsertion * probe.Probe & ephys_recording_key).fetch1('probe') @@ -712,9 +705,8 @@ def get_neuropixels_channel2electrode_map(ephys_recording_key, acq_software): for recorded_site, (shank, shank_col, shank_row, _) in enumerate( spikeglx_meta.shankmap['data'])} elif acq_software == 'Open Ephys': - session_dir = element_data_loader.utils.find_full_path( - get_ephys_root_data_dir(), - get_session_directory(ephys_recording_key)) + session_dir = find_full_path(get_ephys_root_data_dir(), + get_session_directory(ephys_recording_key)) openephys_dataset = openephys.OpenEphys(session_dir) probe_serial_number = (ProbeInsertion & ephys_recording_key).fetch1('probe') probe_dataset = openephys_dataset.probes[probe_serial_number] @@ -741,8 +733,7 @@ def generate_electrode_config(probe_type: str, electrodes: list): :return: a dict representing a key of the probe.ElectrodeConfig table """ # compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) - electrode_config_hash = element_data_loader.utils.dict_to_uuid( - {k['electrode']: k for k in electrodes}) + electrode_config_hash = dict_to_uuid({k['electrode']: k for k in electrodes}) electrode_list = sorted([k['electrode'] for k in electrodes]) electrode_gaps = ([-1] From 665cc287b5b84cfe961bca3e47c9ff407483a2b9 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 4 Oct 2021 12:35:35 -0500 Subject: [PATCH 16/21] Fix for missing `fileTimeSecs` --- element_array_ephys/readers/spikeglx.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/readers/spikeglx.py b/element_array_ephys/readers/spikeglx.py index ac9b2358..67569989 100644 --- a/element_array_ephys/readers/spikeglx.py +++ b/element_array_ephys/readers/spikeglx.py @@ -176,7 +176,7 @@ def __init__(self, meta_filepath): self.recording_time = datetime.strptime(self.meta.get('fileCreateTime_original', self.meta['fileCreateTime']), '%Y-%m-%dT%H:%M:%S') - self.recording_duration = self.meta['fileTimeSecs'] + self.recording_duration = self.meta.get('fileTimeSecs') # Get probe serial number - 'imProbeSN' for 3A and 'imDatPrb_sn' for 3B try: From 84bb6169c98fedfea50418a10c31e870b1e8913f Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 4 Oct 2021 12:46:49 -0500 Subject: [PATCH 17/21] [WIP] Add print statement --- element_array_ephys/readers/spikeglx.py | 1 + 1 file changed, 1 insertion(+) diff --git a/element_array_ephys/readers/spikeglx.py b/element_array_ephys/readers/spikeglx.py index 67569989..1754d809 100644 --- a/element_array_ephys/readers/spikeglx.py +++ b/element_array_ephys/readers/spikeglx.py @@ -177,6 +177,7 @@ def __init__(self, meta_filepath): self.meta['fileCreateTime']), '%Y-%m-%dT%H:%M:%S') self.recording_duration = self.meta.get('fileTimeSecs') + print(self.recording_duration) # Get probe serial number - 'imProbeSN' for 3A and 'imDatPrb_sn' for 3B try: From 1a4a7f5c0834f62a64a6508de0d0b5de148a4657 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Mon, 4 Oct 2021 12:48:29 -0500 Subject: [PATCH 18/21] Remove print statement --- element_array_ephys/readers/spikeglx.py | 1 - 1 file changed, 1 deletion(-) diff --git a/element_array_ephys/readers/spikeglx.py b/element_array_ephys/readers/spikeglx.py index 1754d809..67569989 100644 --- a/element_array_ephys/readers/spikeglx.py +++ b/element_array_ephys/readers/spikeglx.py @@ -177,7 +177,6 @@ def __init__(self, meta_filepath): self.meta['fileCreateTime']), '%Y-%m-%dT%H:%M:%S') self.recording_duration = self.meta.get('fileTimeSecs') - print(self.recording_duration) # Get probe serial number - 'imProbeSN' for 3A and 'imDatPrb_sn' for 3B try: From 4ca9b328f705d9363dd70a88ad857c5994f65d77 Mon Sep 17 00:00:00 2001 From: Chris Brozdowski Date: Thu, 30 Dec 2021 16:34:23 -0600 Subject: [PATCH 19/21] Suggested adds re upstream components Directing to workflow for upstream `SkullReference` and utility functions --- README.md | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index b1f81df9..f9be1ad2 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,11 @@ See [Background](Background.md) for the background information and development t ![element-array-ephys diagram](images/attached_array_ephys_element.svg) As the diagram depicts, the array ephys element starts immediately downstream from ***Session***, -and also requires some notion of ***Location*** as a dependency for ***InsertionLocation***. +and also requires some notion of ***Location*** as a dependency for ***InsertionLocation***. We +provide an [example workflow](https://github.com/datajoint/workflow-array-ephys/) with a +[pipeline script](https://github.com/datajoint/workflow-array-ephys/blob/main/workflow_array_ephys/pipeline.py) +that models (a) combining this Element with the corresponding [Element-Session](https://github.com/datajoint/element-session) +, and (b) declaring a ***SkullReference*** table to provide Location. ### The design of probe @@ -74,12 +78,12 @@ To activate the `element-array-ephys`, ones need to provide: + schema name for the ephys module 2. Upstream tables - + Session table - + SkullReference table (Reference table for InsertionLocation, specifying the skull reference) + + Session table: A set of keys identifying a recording session (see [Element-Session](https://github.com/datajoint/element-session)). + + SkullReference table: A reference table for InsertionLocation, specifying the skull reference (see [example pipeline](https://github.com/datajoint/workflow-array-ephys/blob/main/workflow_array_ephys/pipeline.py)). -3. Utility functions - + get_ephys_root_data_dir() - + get_session_directory() +3. Utility functions. See [example definitions here](https://github.com/datajoint/workflow-array-ephys/blob/main/workflow_array_ephys/paths.py) + + get_ephys_root_data_dir(): Returns your root data directory. + + get_session_directory(): Returns the path of the session data relative to the root. For more detail, check the docstring of the `element-array-ephys`: From 09e8a96504898f7840b031d09ce9346b639b600f Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Sun, 2 Jan 2022 20:26:16 -0600 Subject: [PATCH 20/21] Update error message --- element_array_ephys/ephys.py | 3 ++- element_array_ephys/ephys_chronic.py | 5 +++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/element_array_ephys/ephys.py b/element_array_ephys/ephys.py index 73355c76..b2d78f82 100644 --- a/element_array_ephys/ephys.py +++ b/element_array_ephys/ephys.py @@ -157,7 +157,8 @@ def make(self, key): else: raise FileNotFoundError( f'Ephys recording data not found!' - f' Neither SpikeGLX nor Open Ephys recording files found') + f' Neither SpikeGLX nor Open Ephys recording files found' + f' in {session_dir}') if acq_software == 'SpikeGLX': for meta_filepath in ephys_meta_filepaths: diff --git a/element_array_ephys/ephys_chronic.py b/element_array_ephys/ephys_chronic.py index ea147a19..61606aec 100644 --- a/element_array_ephys/ephys_chronic.py +++ b/element_array_ephys/ephys_chronic.py @@ -89,7 +89,7 @@ def get_session_directory(session_key: dict) -> str: @schema class AcquisitionSoftware(dj.Lookup): - definition = """ # Name of software used for recording of neuropixels probes - SpikeGLX or Open Ephys + definition = """ # Software used for recording of neuropixels probes acq_software: varchar(24) """ contents = zip(['SpikeGLX', 'Open Ephys']) @@ -158,7 +158,8 @@ def make(self, key): else: raise FileNotFoundError( f'Ephys recording data not found!' - f' Neither SpikeGLX nor Open Ephys recording files found') + f' Neither SpikeGLX nor Open Ephys recording files found' + f' in {session_dir}') if acq_software == 'SpikeGLX': for meta_filepath in ephys_meta_filepaths: From 6f9507c3d752f5bd010e1cf24a7369060d77b8d7 Mon Sep 17 00:00:00 2001 From: Kabilar Gunalan Date: Tue, 11 Jan 2022 15:51:02 -0600 Subject: [PATCH 21/21] Rename package --- README.md | 8 ++++---- element_array_ephys/ephys.py | 2 +- element_array_ephys/ephys_chronic.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index f9be1ad2..27a39c27 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ ephys pipeline. See [Background](Background.md) for the background information and development timeline. -## The Pipeline Architecture +## Element architecture ![element-array-ephys diagram](images/attached_array_ephys_element.svg) @@ -59,12 +59,12 @@ This ephys element features automatic ingestion for spike sorting results from t pip install --upgrade element-array-ephys ``` -+ Install `element-data-loader` ++ Install `element-interface` - + `element-data-loader` is a dependency of `element-array-ephys`, however it is not contained within `requirements.txt`. + + `element-interface` is a dependency of `element-array-ephys`, however it is not contained within `requirements.txt`. ``` - pip install "element-data-loader @ git+https://github.com/datajoint/element-data-loader" + pip install "element-interface @ git+https://github.com/datajoint/element-interface" ``` ## Usage diff --git a/element_array_ephys/ephys.py b/element_array_ephys/ephys.py index b2d78f82..a434d158 100644 --- a/element_array_ephys/ephys.py +++ b/element_array_ephys/ephys.py @@ -4,7 +4,7 @@ import numpy as np import inspect import importlib -from element_data_loader.utils import find_root_directory, find_full_path, dict_to_uuid +from element_interface.utils import find_root_directory, find_full_path, dict_to_uuid from .readers import spikeglx, kilosort, openephys from . import probe diff --git a/element_array_ephys/ephys_chronic.py b/element_array_ephys/ephys_chronic.py index 61606aec..16aa00d4 100644 --- a/element_array_ephys/ephys_chronic.py +++ b/element_array_ephys/ephys_chronic.py @@ -4,7 +4,7 @@ import numpy as np import inspect import importlib -from element_data_loader.utils import find_root_directory, find_full_path, dict_to_uuid +from element_interface.utils import find_root_directory, find_full_path, dict_to_uuid from .readers import spikeglx, kilosort, openephys from . import probe