Skip to content

Commit

Permalink
fixes #109
Browse files Browse the repository at this point in the history
  • Loading branch information
k1o0 committed Jan 17, 2024
1 parent 656e6d3 commit d6a2948
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 9 deletions.
14 changes: 10 additions & 4 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,21 +1,27 @@
# Changelog
## [Latest](https://github.com/int-brain-lab/ONE/commits/main) [2.5.3]
## [Latest](https://github.com/int-brain-lab/ONE/commits/main) [2.5.4]

### Modified

- support non-zero-padded sequence paths in ConvertersMixin.path2ref, e.g. subject/2020-01-01/1
- HOTFIX: initialize empty One cache tables with correct columns

## [2.5.3]

### Modified

- HOTFIX: support non-zero-padded sequence paths in ConvertersMixin.path2ref, e.g. subject/2020-01-01/1

## [2.5.2]

### Modified

- handle data urls that have URL parts before 'aggregates/' in OneAlyx.list_aggregates method
- HOTFIX: handle data urls that have URL parts before 'aggregates/' in OneAlyx.list_aggregates method

## [2.5.1]

### Modified

- exclude irrelevant s3 objects with source name in key, e.g. for foo/bar exclude foo/bar_baz/ key
- HOTFIX: exclude irrelevant s3 objects with source name in key, e.g. for foo/bar exclude foo/bar_baz/ key

## [Latest](https://github.com/int-brain-lab/ONE/commits/main) [2.5.0]

Expand Down
2 changes: 1 addition & 1 deletion one/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
"""The Open Neurophysiology Environment (ONE) API."""
__version__ = '2.5.3'
__version__ = '2.5.4'
2 changes: 1 addition & 1 deletion one/alf/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
from one.alf.files import session_path_parts, get_alf_path
from one.converters import session_record2path

__all__ = ['make_parquet_db', 'remove_missing_datasets']
__all__ = ['make_parquet_db', 'remove_missing_datasets', 'DATASETS_COLUMNS', 'SESSIONS_COLUMNS']
_logger = logging.getLogger(__name__)

# -------------------------------------------------------------------------------------------------
Expand Down
8 changes: 5 additions & 3 deletions one/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import one.alf.io as alfio
import one.alf.files as alfiles
import one.alf.exceptions as alferr
from .alf.cache import make_parquet_db
from .alf.cache import make_parquet_db, DATASETS_COLUMNS, SESSIONS_COLUMNS
from .alf.spec import is_uuid_string
from . import __version__
from one.converters import ConversionMixin, session_record2path
Expand Down Expand Up @@ -148,7 +148,9 @@ def load_cache(self, tables_dir=None, **kwargs):
# No tables present
meta['expired'] = True
meta['raw'] = {}
self._cache.update({'datasets': pd.DataFrame(), 'sessions': pd.DataFrame()})
self._cache.update({
'datasets': pd.DataFrame(columns=DATASETS_COLUMNS).set_index(['eid', 'id']),
'sessions': pd.DataFrame(columns=SESSIONS_COLUMNS).set_index('id')})
if self.offline: # In online mode, the cache tables should be downloaded later
warnings.warn(f'No cache tables found in {self._tables_dir}')
created = [datetime.fromisoformat(x['date_created'])
Expand Down Expand Up @@ -290,7 +292,7 @@ def _update_cache_from_records(self, strict=False, **kwargs):
to_drop = set(records.columns) - set(self._cache[table].columns)
records.drop(to_drop, axis=1, inplace=True)
records = records.reindex(columns=self._cache[table].columns)
assert all(self._cache[table].columns == records.columns)
assert set(self._cache[table].columns) == set(records.columns)
# Update existing rows
to_update = records.index.isin(self._cache[table].index)
self._cache[table].loc[records.index[to_update], :] = records[to_update]
Expand Down
7 changes: 7 additions & 0 deletions one/tests/test_one.py
Original file line number Diff line number Diff line change
Expand Up @@ -796,6 +796,13 @@ def test_update_cache_from_records(self):
with self.assertRaises(KeyError):
self.one._update_cache_from_records(unknown=datasets)
self.assertIsNone(self.one._update_cache_from_records(datasets=None))
# Absent cache table
self.one.load_cache(tables_dir='/foo')
self.one._update_cache_from_records(sessions=session, datasets=dataset)
self.assertTrue(all(self.one._cache.sessions == pd.DataFrame([session])))
self.assertEqual(1, len(self.one._cache.datasets))
self.assertEqual(self.one._cache.datasets.squeeze().name, dataset.name)
self.assertCountEqual(self.one._cache.datasets.squeeze().to_dict(), dataset.to_dict())

def test_save_loaded_ids(self):
"""Test One.save_loaded_ids and logic within One._check_filesystem"""
Expand Down

0 comments on commit d6a2948

Please sign in to comment.