Skip to content

Commit

Permalink
Release 1.9.0 (#776)
Browse files Browse the repository at this point in the history
* Generic object added to Dataset model

* Migration file; nullable fields

* force change to filename_pattern field (previous migration failed to apply correctly)

* Fix migrations in tests

* added subjectTrials fixture

* Fix REST tests

* timezone.utc -> datetime.utcnow

* Fix test

* bump version number

* GitHub Actions generated requirements_frozen.txt

* rephrase password differences explitely

* GitHub Actions generated requirements_frozen.txt

---------

Co-authored-by: Miles Wells <[email protected]>
Co-authored-by: github-actions <[email protected]>
  • Loading branch information
3 people authored Feb 13, 2023
1 parent 661e137 commit a4e3787
Show file tree
Hide file tree
Showing 13 changed files with 203 additions and 46 deletions.
2 changes: 1 addition & 1 deletion alyx/alyx/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
VERSION = __version__ = '1.8.0'
VERSION = __version__ = '1.9.0'
11 changes: 11 additions & 0 deletions alyx/data/fixtures/data.datasettype.json
Original file line number Diff line number Diff line change
Expand Up @@ -1747,5 +1747,16 @@
"description": "A list of acquisition devices and behaviour protocols, along with the data and sync file location. The root keys are (devices, sync, task, procedures).",
"filename_pattern": "_*_experiment.description.*"
}
},
{
"model": "data.datasettype",
"pk": "e0614a49-8f18-46b1-a4d9-0710a080fd8d",
"fields": {
"json": null,
"name": "subjectTrials.table",
"created_by": null,
"description": "All trials data for a given subject, contains the same columns as trials.table, plus \"session\", \"session_start_time\" and \"session_number\"",
"filename_pattern": ""
}
}
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Generated by Django 4.1.5 on 2023-01-31 12:34

from django.db import migrations, transaction, models
import django.db.models.deletion


def forwards(apps, _):
"""Go through the datasets and assign the session field to the content_object field"""
Dataset = apps.get_model('data', 'Dataset')
with transaction.atomic():
for dataset in Dataset.objects.filter(session__isnull=False).iterator():
if dataset.content_object is None:
dataset.content_object = dataset.session
dataset.save()


def backwards(apps, _):
Dataset = apps.get_model('data', 'Dataset')
with transaction.atomic():
for dataset in Dataset.objects.filter(session__isnull=False).iterator():
if dataset.content_object is not None:
dataset.content_object = None
dataset.save()


class Migration(migrations.Migration):

dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('data', '0012_alter_datasettype_filename_pattern_and_more'),
]

operations = [
migrations.AddField(
model_name='dataset',
name='content_type',
field=models.ForeignKey(null=True, blank=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype'),
preserve_default=False,
),
migrations.AddField(
model_name='dataset',
name='object_id',
field=models.UUIDField(
null=True, blank=True, help_text='UUID, an object of content_type with this ID must already exist to attach a note.'),
preserve_default=False,
),
# migrations.RunPython(forwards, backwards)
]
44 changes: 44 additions & 0 deletions alyx/data/migrations/0014_alter_datasettype_filename_pattern.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# Generated by Django 4.1.5 on 2023-02-01 15:32
# NB: The previous migrations (0011 and 0012) somehow failed to set the filename_pattern field as
# nullable. Migrations 0014 and 0015 reverse and remake this change which apparently fixed this
# issue.

import alyx.base
from django.db import migrations, transaction

PATTERN = '$$$'


def fix_null_fields(apps, _):
"""Populate null filename_pattern fields before making column not null"""
DatasetType = apps.get_model('data', 'DatasetType')
assert not DatasetType.objects.filter(filename_pattern__startswith=PATTERN).count()
with transaction.atomic():
for dtype in DatasetType.objects.filter(filename_pattern__isnull=True).iterator():
dtype.filename_pattern = PATTERN + dtype.name
dtype.save()


def null_fields(apps, _):
"""Reset previously null filename_pattern fields"""
DatasetType = apps.get_model('data', 'DatasetType')
with transaction.atomic():
for dtype in DatasetType.objects.filter(filename_pattern__startswith=PATTERN).iterator():
dtype.filename_pattern = None
dtype.save()


class Migration(migrations.Migration):

dependencies = [
('data', '0013_dataset_content_type_dataset_object_id'),
]

operations = [
migrations.RunPython(fix_null_fields, null_fields),
migrations.AlterField(
model_name='datasettype',
name='filename_pattern',
field=alyx.base.CharNullField(blank=True, help_text="File name pattern (with wildcards) for this file in ALF naming convention. E.g. 'spikes.times.*' or '*.timestamps.*', or 'spikes.*.*' for a DataCollection, which would include all files starting with the word 'spikes'. NB: Case-insensitive matching.If null, the name field must match the object.attribute part of the filename.", max_length=255, unique=True),
),
]
44 changes: 44 additions & 0 deletions alyx/data/migrations/0015_alter_datasettype_filename_pattern.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# Generated by Django 4.1.5 on 2023-02-01 15:33
# NB: The previous migrations (0011 and 0012) somehow failed to set the filname_pattern field as
# nullable. Migrations 0014 and 0015 reverse and remake this change which apparently fixed this
# issue.

import alyx.base
from django.db import migrations, transaction

PATTERN = '$$$'


def fix_null_fields(apps, _):
"""Populate null filename_pattern fields before making column not null"""
DatasetType = apps.get_model('data', 'DatasetType')
assert not DatasetType.objects.filter(filename_pattern__startswith=PATTERN).count()
with transaction.atomic():
for dtype in DatasetType.objects.filter(filename_pattern__isnull=True).iterator():
dtype.filename_pattern = PATTERN + dtype.name
dtype.save()


def null_fields(apps, _):
"""Reset previously null filename_pattern fields"""
DatasetType = apps.get_model('data', 'DatasetType')
with transaction.atomic():
for dtype in DatasetType.objects.filter(filename_pattern__startswith=PATTERN).iterator():
dtype.filename_pattern = None
dtype.save()


class Migration(migrations.Migration):

dependencies = [
('data', '0014_alter_datasettype_filename_pattern'),
]

operations = [
migrations.AlterField(
model_name='datasettype',
name='filename_pattern',
field=alyx.base.CharNullField(blank=True, help_text="File name pattern (with wildcards) for this file in ALF naming convention. E.g. 'spikes.times.*' or '*.timestamps.*', or 'spikes.*.*' for a DataCollection, which would include all files starting with the word 'spikes'. NB: Case-insensitive matching.If null, the name field must match the object.attribute part of the filename.", max_length=255, null=True, unique=True),
),
migrations.RunPython(null_fields, fix_null_fields),
]
29 changes: 13 additions & 16 deletions alyx/data/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
from django.core.validators import RegexValidator
from django.db import models
from django.utils import timezone
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType

from alyx.settings import TIME_ZONE, AUTH_USER_MODEL
from actions.models import Session
Expand Down Expand Up @@ -294,6 +296,13 @@ class Dataset(BaseExperimentalData):
"""
objects = DatasetManager()

# Generic foreign key to arbitrary model instances allows polymorphic relationships
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE, null=True, blank=True)
object_id = models.UUIDField(help_text="UUID, an object of content_type with this "
"ID must already exist to attach a note.",
null=True, blank=True)
content_object = GenericForeignKey()

file_size = models.BigIntegerField(blank=True, null=True, help_text="Size in bytes")

md5 = models.UUIDField(blank=True, null=True,
Expand Down Expand Up @@ -334,31 +343,19 @@ class Dataset(BaseExperimentalData):
@property
def is_online(self):
fr = self.file_records.filter(data_repository__globus_is_personal=False)
if fr:
return all(fr.values_list('exists', flat=True))
else:
return False
return bool(fr.count() and all(fr.values_list('exists', flat=True)))

@property
def is_protected(self):
tags = self.tags.filter(protected=True)
if tags.count() > 0:
return True
else:
return False
return bool(self.tags.filter(protected=True).count())

@property
def is_public(self):
tags = self.tags.filter(public=True)
if tags.count() > 0:
return True
else:
return False
return bool(self.tags.filter(public=True).count())

@property
def data_url(self):
records = self.file_records.filter(data_repository__data_url__isnull=False,
exists=True)
records = self.file_records.filter(data_repository__data_url__isnull=False, exists=True)
# returns preferentially globus non-personal endpoint
if records:
order_keys = ('data_repository__globus_is_personal', '-data_repository__name')
Expand Down
16 changes: 13 additions & 3 deletions alyx/data/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,26 @@
from django.db.models import ProtectedError

from data.models import Dataset, DatasetType, Tag
from subjects.models import Subject
from misc.models import Lab
from data.transfers import get_dataset_type


class TestModel(TestCase):
def test_model_methods(self):
(dset, _) = Dataset.objects.get_or_create(name='toto.npy')

assert dset.is_online is False
assert dset.is_public is False
assert dset.is_protected is False
self.assertIs(dset.is_online, False)
self.assertIs(dset.is_public, False)
self.assertIs(dset.is_protected, False)

def test_generic_foreign_key(self):
# Attempt to associate a dataset with a subject
self.lab = Lab.objects.create(name='test_lab')
subj = Subject.objects.create(nickname='foo', birth_date='2018-09-01', lab=self.lab)
dset = Dataset(name='toto.npy', content_object=subj)

self.assertIs(dset.content_object, subj)

def test_delete(self):
(dset, _) = Dataset.objects.get_or_create(name='foo.npy')
Expand Down
14 changes: 7 additions & 7 deletions alyx/data/tests_rest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import datetime
import os.path as op
from pathlib import PurePosixPath
import uuid

from django.contrib.auth import get_user_model
Expand Down Expand Up @@ -372,11 +372,11 @@ def _assert_registration(self, r, data):

self.assertEqual(d0['file_records'][0]['data_repository'], 'dr')
self.assertEqual(d0['file_records'][0]['relative_path'],
op.join(data['path'], 'a.b.e1'))
PurePosixPath(data['path'], 'a.b.e1').as_posix())

self.assertEqual(d1['file_records'][0]['data_repository'], 'dr')
self.assertEqual(d1['file_records'][0]['relative_path'],
op.join(data['path'], 'a.c.e2'))
PurePosixPath(data['path'], 'a.c.e2').as_posix())

def test_register_existence_options(self):

Expand Down Expand Up @@ -509,8 +509,8 @@ def test_register_with_revision(self):
self.assertTrue(not r['revision'])
self.assertEqual(r['collection'], 'dir')
# Check the revision relative path doesn't exist
self.assertTrue(r['file_records'][0]['relative_path'] ==
op.join(data['path'], data['filenames']))
self.assertEqual(r['file_records'][0]['relative_path'],
PurePosixPath(data['path'], data['filenames']).as_posix())

# Now test specifying a revision in path
data = {'path': '%s/2018-01-01/002/dir/#v1#' % self.subject,
Expand All @@ -523,7 +523,7 @@ def test_register_with_revision(self):
self.assertTrue(r['revision'] == 'v1')
self.assertEqual('dir', r['collection'])
# Check file record relative path includes revision
self.assertTrue('#v1#' in r['file_records'][0]['relative_path'])
self.assertIn('#v1#', r['file_records'][0]['relative_path'])

# Now test specifying a collection and a revision in filename
data = {'path': '%s/2018-01-01/002/dir' % self.subject,
Expand All @@ -535,7 +535,7 @@ def test_register_with_revision(self):
self.assertTrue(r['revision'] == 'v1')
self.assertTrue(r['collection'] == 'dir/dir1')
# Check file record relative path includes revision
self.assertTrue('#v1#' in r['file_records'][0]['relative_path'])
self.assertIn('#v1#', r['file_records'][0]['relative_path'])

# Test that giving nested revision folders gives out an error
data = {'path': '%s/2018-01-01/002/dir' % self.subject,
Expand Down
13 changes: 7 additions & 6 deletions alyx/data/transfers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import os.path as op
import re
import time
from pathlib import Path
from pathlib import Path, PurePosixPath

from django.db.models import Case, When, Count, Q, F
import globus_sdk
Expand Down Expand Up @@ -88,7 +88,7 @@ def _get_absolute_path(file_record):
path2 = path2[6:]
if path2.startswith('/'):
path2 = path2[1:]
path = op.join(path1, path2)
path = PurePosixPath(path1, path2).as_posix()
return path


Expand Down Expand Up @@ -248,7 +248,7 @@ def _create_dataset_file_records(

assert session is not None
revision_name = f'#{revision.name}#' if revision else ''
relative_path = op.join(rel_dir_path, collection or '', revision_name, filename)
relative_path = PurePosixPath(rel_dir_path, collection or '', revision_name, filename)
dataset_type = get_dataset_type(filename, DatasetType.objects.all())
data_format = get_data_format(filename)
assert dataset_type
Expand All @@ -261,8 +261,9 @@ def _create_dataset_file_records(

# Get or create the dataset.
dataset, is_new = Dataset.objects.get_or_create(
collection=collection, name=filename, session=session,
dataset_type=dataset_type, data_format=data_format, revision=revision)
collection=collection, name=filename, session=session, # content_object=session,
dataset_type=dataset_type, data_format=data_format, revision=revision
)
dataset.default_dataset = default is True
dataset.save()

Expand Down Expand Up @@ -303,7 +304,7 @@ def _create_dataset_file_records(
exists = repo in exists_in
# Do not create a new file record if it already exists.
fr, is_new = FileRecord.objects.get_or_create(
dataset=dataset, data_repository=repo, relative_path=relative_path)
dataset=dataset, data_repository=repo, relative_path=relative_path.as_posix())
if is_new or is_patched:
fr.exists = exists
fr.json = None # this is important if a dataset is patched during an ongoing transfer
Expand Down
2 changes: 1 addition & 1 deletion alyx/subjects/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def nicknamesafe(self):
def age_days(self):
if (self.death_date is None and self.birth_date is not None):
# subject still alive
age = datetime.now(timezone.utc).date() - self.birth_date
age = datetime.utcnow().date() - self.birth_date
elif (self.death_date is not None and self.birth_date is not None):
# subject is dead
age = self.death_date - self.birth_date
Expand Down
2 changes: 1 addition & 1 deletion alyx/subjects/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class SubjectList(generics.ListCreateAPIView):
queryset = SubjectListSerializer.setup_eager_loading(queryset)
serializer_class = SubjectListSerializer
permission_classes = rest_permission_classes()
filter_class = SubjectFilter
filterset_class = SubjectFilter


class SubjectDetail(generics.RetrieveUpdateDestroyAPIView):
Expand Down
4 changes: 3 additions & 1 deletion docs/gettingstarted.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,9 @@ cd alyx
python manage.py runserver
```
Then go to http://localhost:8000/admin, connect as `admin:admin` and change your password.
NB: the password above is the postgres database user password. It is used by Django only to connect to the database, and is distinct from any user password on admin website.

You can then visit http://localhost:8000/admin, connect as `admin:admin` (ie. username admin and password admin) and update your admin interface password.


### macOS
Expand Down
Loading

0 comments on commit a4e3787

Please sign in to comment.