Skip to content

Commit

Permalink
tests and bug fix on write
Browse files Browse the repository at this point in the history
  • Loading branch information
mavaylon1 committed Sep 27, 2023
1 parent a872b59 commit d1c987e
Show file tree
Hide file tree
Showing 8 changed files with 49 additions and 41 deletions.
2 changes: 1 addition & 1 deletion src/hdmf/backends/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def write(self, **kwargs):
# If HERD is not provided, create a new one
else:
from hdmf.common import HERD
herd = HERD()
herd = HERD(type_map=self.manager.type_map)

# add_ref_term_set to search for and resolve the TermSetWrapper
herd.add_ref_term_set(container) # container would be the NWBFile
Expand Down
2 changes: 1 addition & 1 deletion src/hdmf/build/objectmapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -569,7 +569,7 @@ def get_attr_value(self, **kwargs):
# This is when the wrapped item is an attribute
# Refer to h5tools.py for wrapped datasets
attr_val = attr_val.value
if attr_val is not None:
elif attr_val is not None:
attr_val = self.__convert_string(attr_val, spec)
spec_dt = self.__get_data_type(spec)
if spec_dt is not None:
Expand Down
4 changes: 1 addition & 3 deletions src/hdmf/common/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,7 @@ def _get_file_from_container(self, **kwargs):
def __check_termset_wrapper(self, **kwargs):
"""
Takes a list of objects and checks the fields for TermSetWrapper.
:return: [[object, attribute_name, wrapper1], [object, attribute_name, wrapper2], ...]
:return: [[object1, attribute_name1, wrapper1], [object2, attribute_name2, wrapper2], ...]
"""
objects = kwargs['objects']

Expand All @@ -427,10 +427,8 @@ def __check_termset_wrapper(self, **kwargs):
for attribute in obj_fields:
attr = getattr(obj, attribute)
if isinstance(attr, TermSetWrapper):
# breakpoint()
# Search objects that are wrapped
ret.append([obj, attribute, attr])
# breakpoint()
return ret

@docval({'name': 'root_container', 'type': HERDManager,
Expand Down
9 changes: 1 addition & 8 deletions src/hdmf/common/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,7 @@ def __init__(self, **kwargs):
def add_row(self, **kwargs):
"""Append a data value to this VectorData column"""
val = getargs('val', kwargs)
if isinstance(self.data, TermSetWrapper):
if self.data.termset.validate(term=val):
self.data.append(val)
else:
msg = ("%s is not in the term set." % val)
raise ValueError(msg)
else:
self.append(val)
self.append(val)

def get(self, key, **kwargs):
"""
Expand Down
2 changes: 1 addition & 1 deletion src/hdmf/data_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def append_data(data, arg):
if isinstance(data, (list, DataIO)):
data.append(arg)
return data
elif type(data).__name__ == 'TermSetWrapper':
elif type(data).__name__ == 'TermSetWrapper': # circular import
data.append(arg)
return data
elif isinstance(data, np.ndarray):
Expand Down
10 changes: 3 additions & 7 deletions src/hdmf/term_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,10 +174,7 @@ def __enum_expander(self):

class TermSetWrapper:
"""
This class allows any HDF5 group, dataset, or attribute to have a TermSet.
In HDMF, a group is a Container object, a dataset is a Data object,
an attribute can be a reference type to an HDMF object or a base type, e.g., text.
This class allows any HDF5 dataset or attribute to have a TermSet.
"""
@docval({'name': 'termset',
'type': TermSet,
Expand All @@ -192,10 +189,10 @@ def __init__(self, **kwargs):
self.__validate()

def __validate(self):
# check if list, tuple, array, Data
# check if list, tuple, array
if isinstance(self.__value, (list, np.ndarray, tuple)): # TODO: Future ticket on DataIO support
values = self.__value
# create list if none of those
# create list if none of those -> mostly for attributes
else:
values = [self.__value]
# iteratively validate
Expand All @@ -220,7 +217,6 @@ def termset(self):
def dtype(self):
return self.__getattr__('dtype')

# TODO: Probably useful when dealing with DATAIO (Future)
def __getattr__(self, val):
"""
This method is to get attributes that are not defined in init.
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/helpers/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ def get_foo_buildmanager():
datasets=[
DatasetSpec(
"an example dataset",
"int",
"text",
name="my_data",
attributes=[AttributeSpec("attr2", "an example integer attribute", "int")],
)
Expand Down
59 changes: 40 additions & 19 deletions tests/unit/test_io_hdf5_h5tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -825,26 +825,47 @@ def test_roundtrip_pathlib_path(self):
self.assertListEqual(foofile.buckets['bucket1'].foos['foo1'].my_data,
read_foofile.buckets['bucket1'].foos['foo1'].my_data[:].tolist())

# @unittest.skipIf(not LINKML_INSTALLED, "optional LinkML module is not installed")
# def test_roundtrip_TermSetWrapper_dataset(self):
# terms = TermSet(term_schema_path='tests/unit/example_test_term_set.yaml')
# foo = Foo(name="species", attr1='attr1', attr2=0,
# my_data=TermSetWrapper(value=['Homo sapiens', 'Mus musculus'],
# termset=terms))
# foobucket = FooBucket('bucket1', [foo])
# foofile = FooFile(buckets=[foobucket])
#
# with HDF5IO(self.path, manager=self.manager, mode='w', herd_path='./HERD.zip') as io:
# io.write(foofile)
@unittest.skipIf(not LINKML_INSTALLED, "optional LinkML module is not installed")
def test_roundtrip_TermSetWrapper_dataset(self):
terms = TermSet(term_schema_path='tests/unit/example_test_term_set.yaml')
foo = Foo(name="species", attr1='attr1', attr2=0,
my_data=TermSetWrapper(value=['Homo sapiens', 'Mus musculus'],
termset=terms))
from hdmf.common import DynamicTable, VectorData

Check failure on line 834 in tests/unit/test_io_hdf5_h5tools.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (F401)

tests/unit/test_io_hdf5_h5tools.py:834:33: F401 `hdmf.common.DynamicTable` imported but unused

col1 = VectorData(

Check failure on line 836 in tests/unit/test_io_hdf5_h5tools.py

View workflow job for this annotation

GitHub Actions / ruff

Ruff (F841)

tests/unit/test_io_hdf5_h5tools.py:836:9: F841 Local variable `col1` is assigned to but never used
name='Species_1',
description='...',
data=TermSetWrapper(value=['Homo sapiens'], termset=terms)
)
foobucket = FooBucket('bucket1', [foo])
foofile = FooFile(buckets=[foobucket])
#
# with HDF5IO(self.path, manager=self.manager, mode='r') as io:
# read_foofile = io.read()
# self.assertListEqual(foofile.buckets['bucket1'].foos['foo1'].my_data.value,
# read_foofile.buckets['bucket1'].foos['foo1'].my_data[:].tolist())

# @unittest.skipIf(not LINKML_INSTALLED, "optional LinkML module is not installed")
# def test_roundtrip_TermSetWrapper_attribute(self):
# pass
with HDF5IO(self.path, manager=get_foo_buildmanager(), mode='w', herd_path='./HERD.zip') as io:
io.write(foofile)

with HDF5IO(self.path, manager=self.manager, mode='r') as io:
read_foofile = io.read()
self.assertListEqual(foofile.buckets['bucket1'].foos['species'].my_data.value,
read_foofile.buckets['bucket1'].foos['species'].my_data[:].tolist())
remove_test_file('./HERD.zip')

@unittest.skipIf(not LINKML_INSTALLED, "optional LinkML module is not installed")
def test_roundtrip_TermSetWrapper_attribute(self):
terms = TermSet(term_schema_path='tests/unit/example_test_term_set.yaml')
foo = Foo(name="species", attr1=TermSetWrapper(value='Homo sapiens', termset=terms),
attr2=0, my_data=[1,2,3])
foobucket = FooBucket('bucket1', [foo])
foofile = FooFile(buckets=[foobucket])

with HDF5IO(self.path, manager=self.manager, mode='w', herd_path='./HERD.zip') as io:
io.write(foofile)

with HDF5IO(self.path, manager=self.manager, mode='r') as io:
read_foofile = io.read()
self.assertEqual(foofile.buckets['bucket1'].foos['species'].attr1.value,
read_foofile.buckets['bucket1'].foos['species'].attr1)
remove_test_file('./HERD.zip')


class TestHDF5IO(TestCase):
Expand Down

0 comments on commit d1c987e

Please sign in to comment.