From a4e3787853ca3306231f38df1d6ece31af8447ac Mon Sep 17 00:00:00 2001 From: Olivier Winter Date: Mon, 13 Feb 2023 11:34:09 +0000 Subject: [PATCH] Release 1.9.0 (#776) * Generic object added to Dataset model * Migration file; nullable fields * force change to filename_pattern field (previous migration failed to apply correctly) * Fix migrations in tests * added subjectTrials fixture * Fix REST tests * timezone.utc -> datetime.utcnow * Fix test * bump version number * GitHub Actions generated requirements_frozen.txt * rephrase password differences explitely * GitHub Actions generated requirements_frozen.txt --------- Co-authored-by: Miles Wells Co-authored-by: github-actions --- alyx/alyx/__init__.py | 2 +- alyx/data/fixtures/data.datasettype.json | 11 +++++ ..._dataset_content_type_dataset_object_id.py | 48 +++++++++++++++++++ ...0014_alter_datasettype_filename_pattern.py | 44 +++++++++++++++++ ...0015_alter_datasettype_filename_pattern.py | 44 +++++++++++++++++ alyx/data/models.py | 29 +++++------ alyx/data/tests.py | 16 +++++-- alyx/data/tests_rest.py | 14 +++--- alyx/data/transfers.py | 13 ++--- alyx/subjects/models.py | 2 +- alyx/subjects/views.py | 2 +- docs/gettingstarted.md | 4 +- requirements_frozen.txt | 20 ++++---- 13 files changed, 203 insertions(+), 46 deletions(-) create mode 100644 alyx/data/migrations/0013_dataset_content_type_dataset_object_id.py create mode 100644 alyx/data/migrations/0014_alter_datasettype_filename_pattern.py create mode 100644 alyx/data/migrations/0015_alter_datasettype_filename_pattern.py diff --git a/alyx/alyx/__init__.py b/alyx/alyx/__init__.py index 8310a90dd..249120fbe 100644 --- a/alyx/alyx/__init__.py +++ b/alyx/alyx/__init__.py @@ -1 +1 @@ -VERSION = __version__ = '1.8.0' +VERSION = __version__ = '1.9.0' diff --git a/alyx/data/fixtures/data.datasettype.json b/alyx/data/fixtures/data.datasettype.json index 90ef2b5f9..72f5b9ea1 100644 --- a/alyx/data/fixtures/data.datasettype.json +++ b/alyx/data/fixtures/data.datasettype.json @@ -1747,5 +1747,16 @@ "description": "A list of acquisition devices and behaviour protocols, along with the data and sync file location. The root keys are (devices, sync, task, procedures).", "filename_pattern": "_*_experiment.description.*" } + }, + { + "model": "data.datasettype", + "pk": "e0614a49-8f18-46b1-a4d9-0710a080fd8d", + "fields": { + "json": null, + "name": "subjectTrials.table", + "created_by": null, + "description": "All trials data for a given subject, contains the same columns as trials.table, plus \"session\", \"session_start_time\" and \"session_number\"", + "filename_pattern": "" + } } ] diff --git a/alyx/data/migrations/0013_dataset_content_type_dataset_object_id.py b/alyx/data/migrations/0013_dataset_content_type_dataset_object_id.py new file mode 100644 index 000000000..3cb9fad8d --- /dev/null +++ b/alyx/data/migrations/0013_dataset_content_type_dataset_object_id.py @@ -0,0 +1,48 @@ +# Generated by Django 4.1.5 on 2023-01-31 12:34 + +from django.db import migrations, transaction, models +import django.db.models.deletion + + +def forwards(apps, _): + """Go through the datasets and assign the session field to the content_object field""" + Dataset = apps.get_model('data', 'Dataset') + with transaction.atomic(): + for dataset in Dataset.objects.filter(session__isnull=False).iterator(): + if dataset.content_object is None: + dataset.content_object = dataset.session + dataset.save() + + +def backwards(apps, _): + Dataset = apps.get_model('data', 'Dataset') + with transaction.atomic(): + for dataset in Dataset.objects.filter(session__isnull=False).iterator(): + if dataset.content_object is not None: + dataset.content_object = None + dataset.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ('contenttypes', '0002_remove_content_type_name'), + ('data', '0012_alter_datasettype_filename_pattern_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='dataset', + name='content_type', + field=models.ForeignKey(null=True, blank=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype'), + preserve_default=False, + ), + migrations.AddField( + model_name='dataset', + name='object_id', + field=models.UUIDField( + null=True, blank=True, help_text='UUID, an object of content_type with this ID must already exist to attach a note.'), + preserve_default=False, + ), +# migrations.RunPython(forwards, backwards) + ] diff --git a/alyx/data/migrations/0014_alter_datasettype_filename_pattern.py b/alyx/data/migrations/0014_alter_datasettype_filename_pattern.py new file mode 100644 index 000000000..0392b4d16 --- /dev/null +++ b/alyx/data/migrations/0014_alter_datasettype_filename_pattern.py @@ -0,0 +1,44 @@ +# Generated by Django 4.1.5 on 2023-02-01 15:32 +# NB: The previous migrations (0011 and 0012) somehow failed to set the filename_pattern field as +# nullable. Migrations 0014 and 0015 reverse and remake this change which apparently fixed this +# issue. + +import alyx.base +from django.db import migrations, transaction + +PATTERN = '$$$' + + +def fix_null_fields(apps, _): + """Populate null filename_pattern fields before making column not null""" + DatasetType = apps.get_model('data', 'DatasetType') + assert not DatasetType.objects.filter(filename_pattern__startswith=PATTERN).count() + with transaction.atomic(): + for dtype in DatasetType.objects.filter(filename_pattern__isnull=True).iterator(): + dtype.filename_pattern = PATTERN + dtype.name + dtype.save() + + +def null_fields(apps, _): + """Reset previously null filename_pattern fields""" + DatasetType = apps.get_model('data', 'DatasetType') + with transaction.atomic(): + for dtype in DatasetType.objects.filter(filename_pattern__startswith=PATTERN).iterator(): + dtype.filename_pattern = None + dtype.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ('data', '0013_dataset_content_type_dataset_object_id'), + ] + + operations = [ + migrations.RunPython(fix_null_fields, null_fields), + migrations.AlterField( + model_name='datasettype', + name='filename_pattern', + field=alyx.base.CharNullField(blank=True, help_text="File name pattern (with wildcards) for this file in ALF naming convention. E.g. 'spikes.times.*' or '*.timestamps.*', or 'spikes.*.*' for a DataCollection, which would include all files starting with the word 'spikes'. NB: Case-insensitive matching.If null, the name field must match the object.attribute part of the filename.", max_length=255, unique=True), + ), + ] diff --git a/alyx/data/migrations/0015_alter_datasettype_filename_pattern.py b/alyx/data/migrations/0015_alter_datasettype_filename_pattern.py new file mode 100644 index 000000000..c70f932ea --- /dev/null +++ b/alyx/data/migrations/0015_alter_datasettype_filename_pattern.py @@ -0,0 +1,44 @@ +# Generated by Django 4.1.5 on 2023-02-01 15:33 +# NB: The previous migrations (0011 and 0012) somehow failed to set the filname_pattern field as +# nullable. Migrations 0014 and 0015 reverse and remake this change which apparently fixed this +# issue. + +import alyx.base +from django.db import migrations, transaction + +PATTERN = '$$$' + + +def fix_null_fields(apps, _): + """Populate null filename_pattern fields before making column not null""" + DatasetType = apps.get_model('data', 'DatasetType') + assert not DatasetType.objects.filter(filename_pattern__startswith=PATTERN).count() + with transaction.atomic(): + for dtype in DatasetType.objects.filter(filename_pattern__isnull=True).iterator(): + dtype.filename_pattern = PATTERN + dtype.name + dtype.save() + + +def null_fields(apps, _): + """Reset previously null filename_pattern fields""" + DatasetType = apps.get_model('data', 'DatasetType') + with transaction.atomic(): + for dtype in DatasetType.objects.filter(filename_pattern__startswith=PATTERN).iterator(): + dtype.filename_pattern = None + dtype.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ('data', '0014_alter_datasettype_filename_pattern'), + ] + + operations = [ + migrations.AlterField( + model_name='datasettype', + name='filename_pattern', + field=alyx.base.CharNullField(blank=True, help_text="File name pattern (with wildcards) for this file in ALF naming convention. E.g. 'spikes.times.*' or '*.timestamps.*', or 'spikes.*.*' for a DataCollection, which would include all files starting with the word 'spikes'. NB: Case-insensitive matching.If null, the name field must match the object.attribute part of the filename.", max_length=255, null=True, unique=True), + ), + migrations.RunPython(null_fields, fix_null_fields), + ] diff --git a/alyx/data/models.py b/alyx/data/models.py index 31d31d656..31994b347 100644 --- a/alyx/data/models.py +++ b/alyx/data/models.py @@ -3,6 +3,8 @@ from django.core.validators import RegexValidator from django.db import models from django.utils import timezone +from django.contrib.contenttypes.fields import GenericForeignKey +from django.contrib.contenttypes.models import ContentType from alyx.settings import TIME_ZONE, AUTH_USER_MODEL from actions.models import Session @@ -294,6 +296,13 @@ class Dataset(BaseExperimentalData): """ objects = DatasetManager() + # Generic foreign key to arbitrary model instances allows polymorphic relationships + content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE, null=True, blank=True) + object_id = models.UUIDField(help_text="UUID, an object of content_type with this " + "ID must already exist to attach a note.", + null=True, blank=True) + content_object = GenericForeignKey() + file_size = models.BigIntegerField(blank=True, null=True, help_text="Size in bytes") md5 = models.UUIDField(blank=True, null=True, @@ -334,31 +343,19 @@ class Dataset(BaseExperimentalData): @property def is_online(self): fr = self.file_records.filter(data_repository__globus_is_personal=False) - if fr: - return all(fr.values_list('exists', flat=True)) - else: - return False + return bool(fr.count() and all(fr.values_list('exists', flat=True))) @property def is_protected(self): - tags = self.tags.filter(protected=True) - if tags.count() > 0: - return True - else: - return False + return bool(self.tags.filter(protected=True).count()) @property def is_public(self): - tags = self.tags.filter(public=True) - if tags.count() > 0: - return True - else: - return False + return bool(self.tags.filter(public=True).count()) @property def data_url(self): - records = self.file_records.filter(data_repository__data_url__isnull=False, - exists=True) + records = self.file_records.filter(data_repository__data_url__isnull=False, exists=True) # returns preferentially globus non-personal endpoint if records: order_keys = ('data_repository__globus_is_personal', '-data_repository__name') diff --git a/alyx/data/tests.py b/alyx/data/tests.py index cdc33bed3..97fd53f5e 100644 --- a/alyx/data/tests.py +++ b/alyx/data/tests.py @@ -4,6 +4,8 @@ from django.db.models import ProtectedError from data.models import Dataset, DatasetType, Tag +from subjects.models import Subject +from misc.models import Lab from data.transfers import get_dataset_type @@ -11,9 +13,17 @@ class TestModel(TestCase): def test_model_methods(self): (dset, _) = Dataset.objects.get_or_create(name='toto.npy') - assert dset.is_online is False - assert dset.is_public is False - assert dset.is_protected is False + self.assertIs(dset.is_online, False) + self.assertIs(dset.is_public, False) + self.assertIs(dset.is_protected, False) + + def test_generic_foreign_key(self): + # Attempt to associate a dataset with a subject + self.lab = Lab.objects.create(name='test_lab') + subj = Subject.objects.create(nickname='foo', birth_date='2018-09-01', lab=self.lab) + dset = Dataset(name='toto.npy', content_object=subj) + + self.assertIs(dset.content_object, subj) def test_delete(self): (dset, _) = Dataset.objects.get_or_create(name='foo.npy') diff --git a/alyx/data/tests_rest.py b/alyx/data/tests_rest.py index fcce55ac9..04eeab20b 100644 --- a/alyx/data/tests_rest.py +++ b/alyx/data/tests_rest.py @@ -1,5 +1,5 @@ import datetime -import os.path as op +from pathlib import PurePosixPath import uuid from django.contrib.auth import get_user_model @@ -372,11 +372,11 @@ def _assert_registration(self, r, data): self.assertEqual(d0['file_records'][0]['data_repository'], 'dr') self.assertEqual(d0['file_records'][0]['relative_path'], - op.join(data['path'], 'a.b.e1')) + PurePosixPath(data['path'], 'a.b.e1').as_posix()) self.assertEqual(d1['file_records'][0]['data_repository'], 'dr') self.assertEqual(d1['file_records'][0]['relative_path'], - op.join(data['path'], 'a.c.e2')) + PurePosixPath(data['path'], 'a.c.e2').as_posix()) def test_register_existence_options(self): @@ -509,8 +509,8 @@ def test_register_with_revision(self): self.assertTrue(not r['revision']) self.assertEqual(r['collection'], 'dir') # Check the revision relative path doesn't exist - self.assertTrue(r['file_records'][0]['relative_path'] == - op.join(data['path'], data['filenames'])) + self.assertEqual(r['file_records'][0]['relative_path'], + PurePosixPath(data['path'], data['filenames']).as_posix()) # Now test specifying a revision in path data = {'path': '%s/2018-01-01/002/dir/#v1#' % self.subject, @@ -523,7 +523,7 @@ def test_register_with_revision(self): self.assertTrue(r['revision'] == 'v1') self.assertEqual('dir', r['collection']) # Check file record relative path includes revision - self.assertTrue('#v1#' in r['file_records'][0]['relative_path']) + self.assertIn('#v1#', r['file_records'][0]['relative_path']) # Now test specifying a collection and a revision in filename data = {'path': '%s/2018-01-01/002/dir' % self.subject, @@ -535,7 +535,7 @@ def test_register_with_revision(self): self.assertTrue(r['revision'] == 'v1') self.assertTrue(r['collection'] == 'dir/dir1') # Check file record relative path includes revision - self.assertTrue('#v1#' in r['file_records'][0]['relative_path']) + self.assertIn('#v1#', r['file_records'][0]['relative_path']) # Test that giving nested revision folders gives out an error data = {'path': '%s/2018-01-01/002/dir' % self.subject, diff --git a/alyx/data/transfers.py b/alyx/data/transfers.py index 654f5a2fa..03e648101 100644 --- a/alyx/data/transfers.py +++ b/alyx/data/transfers.py @@ -4,7 +4,7 @@ import os.path as op import re import time -from pathlib import Path +from pathlib import Path, PurePosixPath from django.db.models import Case, When, Count, Q, F import globus_sdk @@ -88,7 +88,7 @@ def _get_absolute_path(file_record): path2 = path2[6:] if path2.startswith('/'): path2 = path2[1:] - path = op.join(path1, path2) + path = PurePosixPath(path1, path2).as_posix() return path @@ -248,7 +248,7 @@ def _create_dataset_file_records( assert session is not None revision_name = f'#{revision.name}#' if revision else '' - relative_path = op.join(rel_dir_path, collection or '', revision_name, filename) + relative_path = PurePosixPath(rel_dir_path, collection or '', revision_name, filename) dataset_type = get_dataset_type(filename, DatasetType.objects.all()) data_format = get_data_format(filename) assert dataset_type @@ -261,8 +261,9 @@ def _create_dataset_file_records( # Get or create the dataset. dataset, is_new = Dataset.objects.get_or_create( - collection=collection, name=filename, session=session, - dataset_type=dataset_type, data_format=data_format, revision=revision) + collection=collection, name=filename, session=session, # content_object=session, + dataset_type=dataset_type, data_format=data_format, revision=revision + ) dataset.default_dataset = default is True dataset.save() @@ -303,7 +304,7 @@ def _create_dataset_file_records( exists = repo in exists_in # Do not create a new file record if it already exists. fr, is_new = FileRecord.objects.get_or_create( - dataset=dataset, data_repository=repo, relative_path=relative_path) + dataset=dataset, data_repository=repo, relative_path=relative_path.as_posix()) if is_new or is_patched: fr.exists = exists fr.json = None # this is important if a dataset is patched during an ongoing transfer diff --git a/alyx/subjects/models.py b/alyx/subjects/models.py index 344e8f8e5..fd1e2417e 100644 --- a/alyx/subjects/models.py +++ b/alyx/subjects/models.py @@ -263,7 +263,7 @@ def nicknamesafe(self): def age_days(self): if (self.death_date is None and self.birth_date is not None): # subject still alive - age = datetime.now(timezone.utc).date() - self.birth_date + age = datetime.utcnow().date() - self.birth_date elif (self.death_date is not None and self.birth_date is not None): # subject is dead age = self.death_date - self.birth_date diff --git a/alyx/subjects/views.py b/alyx/subjects/views.py index 42353c4f9..0dc5b98ac 100644 --- a/alyx/subjects/views.py +++ b/alyx/subjects/views.py @@ -49,7 +49,7 @@ class SubjectList(generics.ListCreateAPIView): queryset = SubjectListSerializer.setup_eager_loading(queryset) serializer_class = SubjectListSerializer permission_classes = rest_permission_classes() - filter_class = SubjectFilter + filterset_class = SubjectFilter class SubjectDetail(generics.RetrieveUpdateDestroyAPIView): diff --git a/docs/gettingstarted.md b/docs/gettingstarted.md index 20c2c64e1..b094a0613 100644 --- a/docs/gettingstarted.md +++ b/docs/gettingstarted.md @@ -59,7 +59,9 @@ cd alyx python manage.py runserver ``` -Then go to http://localhost:8000/admin, connect as `admin:admin` and change your password. +NB: the password above is the postgres database user password. It is used by Django only to connect to the database, and is distinct from any user password on admin website. + +You can then visit http://localhost:8000/admin, connect as `admin:admin` (ie. username admin and password admin) and update your admin interface password. ### macOS diff --git a/requirements_frozen.txt b/requirements_frozen.txt index 39f22200f..4e406af58 100644 --- a/requirements_frozen.txt +++ b/requirements_frozen.txt @@ -1,7 +1,7 @@ asgiref==3.6.0 backports.zoneinfo==0.2.1 -boto3==1.26.56 -botocore==1.29.56 +boto3==1.26.69 +botocore==1.29.69 certifi==2022.12.7 cffi==1.15.1 charset-normalizer==3.0.1 @@ -14,11 +14,11 @@ coverage==6.5.0 coveralls==3.3.1 cryptography==36.0.2 cycler==0.11.0 -Django==4.1.5 +Django==4.1.6 django-admin-list-filter-dropdown==1.0.3 django-admin-rangefilter==0.9.0 django-autocomplete-light==3.9.4 -django-cleanup==6.0.0 +django-cleanup==7.0.0 django-filter==21.1 django-ipware==4.0.2 django-js-asset==2.0.0 @@ -26,7 +26,7 @@ django-mptt==0.14.0 django-polymorphic==3.1.0 django-reversion==5.0.4 django-storages==1.13.2 -django-structlog==4.0.1 +django-structlog==4.1.1 django-test-without-migrations==0.6 djangorestframework==3.14.0 docopt==0.6.2 @@ -34,8 +34,8 @@ docutils==0.19 drfdocs==0.0.11 flake8==6.0.0 fonttools==4.38.0 -globus-cli==3.10.1 -globus-sdk==3.15.0 +globus-cli==3.11.0 +globus-sdk==3.16.0 iblutil==1.5.0 idna==3.4 importlib-metadata==6.0.0 @@ -51,12 +51,12 @@ matplotlib==3.6.3 mccabe==0.7.0 numba==0.56.4 numpy==1.23.5 -ONE-api==1.19.0 +ONE-api==1.19.1 packaging==23.0 pandas==1.5.3 Pillow==9.4.0 psycopg2-binary==2.9.5 -pyarrow==10.0.1 +pyarrow==11.0.0 pycodestyle==2.10.0 pycparser==2.21 pyflakes==3.0.1 @@ -76,4 +76,4 @@ typing_extensions==4.4.0 uritemplate==4.1.1 urllib3==1.26.14 webdavclient3==3.14.6 -zipp==3.11.0 +zipp==3.13.0