Skip to content

Commit

Permalink
Merge branch 'release/2.34.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
mayofaulkner committed Apr 11, 2024
2 parents 966b659 + 59fd19d commit b1c1a58
Show file tree
Hide file tree
Showing 11 changed files with 73 additions and 36 deletions.
6 changes: 3 additions & 3 deletions examples/loading_data/loading_raw_ephys_data.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -107,13 +107,13 @@
"print(f'raw AP band sample for event at time {t_event}: {s_event}')\n",
"\n",
"# get the AP data surrounding samples\n",
"window_secs_ap = [-0.05, 0.05] # we'll look at 100ms before the event and 200ms after the event for AP\n",
"window_secs_ap = [-0.05, 0.05] # we'll look at 50ms before and after the event for AP\n",
"first, last = (int(window_secs_ap[0] * sr_ap.fs) + s_event, int(window_secs_ap[1] * sr_ap.fs + s_event))\n",
"raw_ap = sr_ap[first:last, :-sr_ap.nsync].T\n",
"\n",
"# get the LF data surrounding samples\n",
"window_secs_ap = [-0.750, 0.750] # we'll look at 100ms before the event and 200ms after the event\n",
"sample_lf = s_event // 12 # NB: for neuropixel probes this is always 12\n",
"window_secs_ap = [-0.750, 0.750] # we'll look at 750ms before and after the event because LF varies more slowly in time\n",
"sample_lf = s_event // 12 # NB: for neuropixel probes this is always 12 because AP is sampled at 12x the frequency of LF\n",
"first, last = (int(window_secs_ap[0] * sr_lf.fs) + sample_lf, int(window_secs_ap[1] * sr_lf.fs + sample_lf))\n",
"raw_lf = sr_lf[first:last, :-sr_lf.nsync].T"
]
Expand Down
2 changes: 1 addition & 1 deletion ibllib/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import logging
import warnings

__version__ = '2.33.0'
__version__ = '2.34.0'
warnings.filterwarnings('always', category=DeprecationWarning, module='ibllib')

# if this becomes a full-blown library we should let the logging configuration to the discretion of the dev
Expand Down
18 changes: 11 additions & 7 deletions ibllib/io/extractors/video_motion.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,10 +111,12 @@ def load_data(self, download=False):
self.data.camera_times = {vidio.label_from_path(url): ts for ts, url in zip(cam.data, cam.url)}
else:
alf_path = self.session_path / 'alf'
self.data.wheel = alfio.load_object(alf_path, 'wheel', short_keys=True)
self.data.trials = alfio.load_object(alf_path, 'trials')
wheel_path = next(alf_path.rglob('*wheel.timestamps*')).parent
self.data.wheel = alfio.load_object(wheel_path, 'wheel', short_keys=True)
trials_path = next(alf_path.rglob('*trials.table*')).parent
self.data.trials = alfio.load_object(trials_path, 'trials')
self.data.camera_times = {vidio.label_from_path(x): alfio.load_file_content(x) for x in
alf_path.glob('*Camera.times*')}
alf_path.rglob('*Camera.times*')}
assert all(x is not None for x in self.data.values())

def _set_eid_or_path(self, session_path_or_eid):
Expand Down Expand Up @@ -428,14 +430,16 @@ def fix_keys(alf_object):
return ob

alf_path = self.session_path.joinpath('alf')
wheel = (fix_keys(alfio.load_object(alf_path, 'wheel')) if location == 'SDSC' else alfio.load_object(alf_path, 'wheel'))
wheel_path = next(alf_path.rglob('*wheel.timestamps*')).parent
wheel = (fix_keys(alfio.load_object(wheel_path, 'wheel')) if location == 'SDSC'
else alfio.load_object(wheel_path, 'wheel'))
self.wheel_timestamps = wheel.timestamps
# Compute interpolated wheel position and wheel times
wheel_pos, self.wheel_time = wh.interpolate_position(wheel.timestamps, wheel.position, freq=1000)
# Compute wheel velocity
self.wheel_vel, _ = wh.velocity_filtered(wheel_pos, 1000)
# Load in original camera times
self.camera_times = alfio.load_file_content(next(alf_path.glob(f'_ibl_{self.label}Camera.times*.npy')))
self.camera_times = alfio.load_file_content(next(alf_path.rglob(f'_ibl_{self.label}Camera.times*.npy')))
self.camera_path = str(next(self.session_path.joinpath('raw_video_data').glob(f'_iblrig_{self.label}Camera.raw*.mp4')))
self.camera_meta = vidio.get_video_meta(self.camera_path)

Expand Down Expand Up @@ -473,8 +477,8 @@ def fix_keys(alf_object):
# We attempt to load in some behavior data (trials and dlc). This is only needed for the summary plots, having
# trial aligned paw velocity (from the dlc) is a nice sanity check to make sure the alignment went well
try:
self.trials = alfio.load_file_content(next(alf_path.glob('_ibl_trials.table*.pqt')))
self.dlc = alfio.load_file_content(next(alf_path.glob(f'_ibl_{self.label}Camera.dlc*.pqt')))
self.trials = alfio.load_file_content(next(alf_path.rglob('_ibl_trials.table*.pqt')))
self.dlc = alfio.load_file_content(next(alf_path.rglob(f'_ibl_{self.label}Camera.dlc*.pqt')))
self.dlc = likelihood_threshold(self.dlc)
self.behavior = True
except (ALFObjectNotFound, StopIteration):
Expand Down
28 changes: 21 additions & 7 deletions ibllib/oneibl/registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import itertools

from packaging import version
from requests import HTTPError

from one.alf.files import get_session_path, folder_parts, get_alf_path
from one.registration import RegistrationClient, get_dataset_type
from one.remote.globus import get_local_endpoint_id, get_lab_from_endpoint_id
Expand Down Expand Up @@ -81,17 +83,29 @@ def register_dataset(file_list, one=None, exists=False, versions=None, **kwargs)
client = IBLRegistrationClient(one)

# Check for protected datasets
def _get_protected(pr_status):
if isinstance(protected_status, list):
pr = any(d['status_code'] == 403 for d in pr_status)
else:
pr = protected_status['status_code'] == 403

return pr

# Account for cases where we are connected to cortex lab database
if one.alyx.base_url == 'https://alyx.cortexlab.net':
protected_status = IBLRegistrationClient(
ONE(base_url='https://alyx.internationalbrainlab.org', mode='remote')).check_protected_files(file_list)
try:
protected_status = IBLRegistrationClient(
ONE(base_url='https://alyx.internationalbrainlab.org', mode='remote')).check_protected_files(file_list)
protected = _get_protected(protected_status)
except HTTPError as err:
if "[Errno 500] /check-protected: 'A base session for" in str(err):
# If we get an error due to the session not existing, we take this to mean no datasets are protected
protected = False
else:
raise err
else:
protected_status = client.check_protected_files(file_list)

if isinstance(protected_status, list):
protected = any(d['status_code'] == 403 for d in protected_status)
else:
protected = protected_status['status_code'] == 403
protected = _get_protected(protected_status)

# If we find a protected dataset, and we don't have a force=True flag, raise an error
if protected and not kwargs.pop('force', False):
Expand Down
7 changes: 7 additions & 0 deletions ibllib/pipes/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,6 +387,13 @@ def assert_expected(self, expected_files, silent=False):
files = []
for expected_file in expected_files:
actual_files = list(Path(self.session_path).rglob(str(Path(*filter(None, reversed(expected_file[:2]))))))
# Account for revisions
if len(actual_files) == 0:
collection = expected_file[1] + '/#*' if expected_file[1] != '' else expected_file[1] + '#*'
expected_revision = (expected_file[0], collection, expected_file[2])
actual_files = list(
Path(self.session_path).rglob(str(Path(*filter(None, reversed(expected_revision[:2]))))))

if len(actual_files) == 0 and expected_file[2]:
everything_is_fine = False
if not silent:
Expand Down
4 changes: 2 additions & 2 deletions ibllib/pipes/video_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -506,7 +506,7 @@ def _run(self, overwrite=True, run_qc=True, plot_qc=True):
if exist and overwrite:
_logger.warning('EphysPostDLC outputs exist and overwrite=True, overwriting existing outputs.')
# Find all available DLC files
dlc_files = list(Path(self.session_path).joinpath('alf').glob('_ibl_*Camera.dlc.*'))
dlc_files = list(Path(self.session_path).joinpath('alf').rglob('_ibl_*Camera.dlc.*'))
for dlc_file in dlc_files:
_logger.debug(dlc_file)
output_files = []
Expand All @@ -521,7 +521,7 @@ def _run(self, overwrite=True, run_qc=True, plot_qc=True):
dlc_thresh = likelihood_threshold(dlc, 0.9)
# try to load respective camera times
try:
dlc_t = np.load(next(Path(self.session_path).joinpath('alf').glob(f'_ibl_{cam}Camera.times.*npy')))
dlc_t = np.load(next(Path(self.session_path).joinpath('alf').rglob(f'_ibl_{cam}Camera.times.*npy')))
times = True
if dlc_t.shape[0] == 0:
_logger.error(f'camera.times empty for {cam} camera. '
Expand Down
10 changes: 3 additions & 7 deletions ibllib/plots/figures.py
Original file line number Diff line number Diff line change
Expand Up @@ -783,19 +783,15 @@ def dlc_qc_plot(session_path, one=None, device_collection='raw_video_data',
assert any(data[f'{cam}_times'] is not None for cam in cameras), "No camera times data could be loaded, aborting."

# Load session level data
for alf_object in ['trials', 'wheel', 'licks']:
for alf_object, collection in zip(['trials', 'wheel', 'licks'], [trials_collection, trials_collection, 'alf']):
try:
if alf_object == 'licks':
data[f'{alf_object}'] = alfio.load_object(session_path.joinpath('alf'),
alf_object) # load locally
else:
data[f'{alf_object}'] = alfio.load_object(session_path.joinpath(trials_collection), alf_object) # load locally
data[f'{alf_object}'] = alfio.load_object(session_path.joinpath(collection), alf_object) # load locally
continue
except ALFObjectNotFound:
pass
try:
# then try from alyx
data[f'{alf_object}'] = one.load_object(one.path2eid(session_path), alf_object, collection=trials_collection)
data[f'{alf_object}'] = one.load_object(one.path2eid(session_path), alf_object, collection=collection)
except ALFObjectNotFound:
logger.warning(f"Could not load {alf_object} object, some plots have to be skipped.")
data[f'{alf_object}'] = None
Expand Down
7 changes: 4 additions & 3 deletions ibllib/qc/camera.py
Original file line number Diff line number Diff line change
Expand Up @@ -1137,8 +1137,9 @@ def load_data(self, download_data: bool = None,
alf_path = self.session_path / 'alf'
try:
assert not extract_times
cam_path = next(alf_path.rglob(f'*{self.label}Camera.times*')).parent
self.data['timestamps'] = alfio.load_object(
alf_path, f'{self.label}Camera', short_keys=True)['times']
cam_path, f'{self.label}Camera', short_keys=True)['times']
except AssertionError: # Re-extract
kwargs = dict(video_path=self.video_path, labels=self.label)
if self.sync == 'bpod':
Expand All @@ -1154,8 +1155,8 @@ def load_data(self, download_data: bool = None,
wheel_keys = ('timestamps', 'position')
try:
# glob in case wheel data are in sub-collections
alf_path = next(alf_path.rglob('*wheel.timestamps*')).parent
self.data['wheel'] = alfio.load_object(alf_path, 'wheel', short_keys=True)
wheel_path = next(alf_path.rglob('*wheel.timestamps*')).parent
self.data['wheel'] = alfio.load_object(wheel_path, 'wheel', short_keys=True)
except ALFObjectNotFound:
# Extract from raw data
if self.sync != 'bpod':
Expand Down
12 changes: 8 additions & 4 deletions ibllib/qc/dlc.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,11 @@ def load_data(self, download_data: bool = None) -> None:
alf_path = self.session_path / 'alf'

# Load times
self.data['camera_times'] = alfio.load_object(alf_path, f'{self.side}Camera')['times']
cam_path = next(alf_path.rglob(f'*{self.side}Camera.times*')).parent
self.data['camera_times'] = alfio.load_object(cam_path, f'{self.side}Camera')['times']
# Load dlc traces
dlc_df = alfio.load_object(alf_path, f'{self.side}Camera', namespace='ibl')['dlc']
dlc_path = next(alf_path.rglob(f'*{self.side}Camera.dlc*')).parent
dlc_df = alfio.load_object(dlc_path, f'{self.side}Camera', namespace='ibl')['dlc']
targets = np.unique(['_'.join(col.split('_')[:-1]) for col in dlc_df.columns])
# Set values to nan if likelihood is too low
dlc_coords = {}
Expand All @@ -106,11 +108,13 @@ def load_data(self, download_data: bool = None) -> None:
self.data['dlc_coords'] = dlc_coords

# load stim on times
self.data['stimOn_times'] = alfio.load_object(alf_path, 'trials', namespace='ibl')['stimOn_times']
trial_path = next(alf_path.rglob('*trials.table*')).parent
self.data['stimOn_times'] = alfio.load_object(trial_path, 'trials', namespace='ibl')['stimOn_times']

# load pupil diameters
if self.side in ['left', 'right']:
features = alfio.load_object(alf_path, f'{self.side}Camera', namespace='ibl')['features']
feat_path = next(alf_path.rglob(f'*{self.side}Camera.features*')).parent
features = alfio.load_object(feat_path, f'{self.side}Camera', namespace='ibl')['features']
self.data['pupilDiameter_raw'] = features['pupilDiameter_raw']
self.data['pupilDiameter_smooth'] = features['pupilDiameter_smooth']

Expand Down
5 changes: 3 additions & 2 deletions ibllib/tests/qc/test_task_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,10 @@ def _create_test_qc_outcomes():
class TestAggregateOutcome(unittest.TestCase):

def test_deprecation_warning(self):
"""Remove TaskQC.compute_session_status_from_dict after 2024-04-01."""
"""Remove TaskQC.compute_session_status_from_dict after 2024-06-01. Cherry pick commit
3cbbd1769e1ba82a51b09a992b2d5f4929f396b2 for removal of this test and applicable code"""
from datetime import datetime
self.assertFalse(datetime.now() > datetime(2024, 4, 10), 'remove TaskQC.compute_session_status_from_dict method.')
self.assertFalse(datetime.now() > datetime(2024, 6, 1), 'remove TaskQC.compute_session_status_from_dict method.')
qc_dict = {'_task_iti_delays': .99}
with self.assertWarns(DeprecationWarning), self.assertLogs(qcmetrics.__name__, spec.QC.WARNING):
out = qcmetrics.TaskQC.compute_session_status_from_dict(qc_dict)
Expand Down
10 changes: 10 additions & 0 deletions release_notes.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,13 @@
## Release Note 2.34.0

## features
- Task assert_expected_input now take into account revisions
- Camera qc and video motion now take into account dynamic pipeline folder

## bugfixes
- Typo in raw_ephys_data documentation
- oneibl.register_datasets accounts for non existing sessions when checking protected dsets

## Release Note 2.33.0

## features
Expand Down

0 comments on commit b1c1a58

Please sign in to comment.