Skip to content

Commit

Permalink
tests
Browse files Browse the repository at this point in the history
  • Loading branch information
mavaylon1 committed Sep 27, 2023
1 parent b87c323 commit c60a68b
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 68 deletions.
2 changes: 1 addition & 1 deletion src/hdmf/build/objectmapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -911,7 +911,7 @@ def __add_attributes(self, builder, attributes, container, build_manager, source
if spec.value is not None:
attr_value = spec.value
else:
attr_value = self.get_attr_value(spec, container, build_manager) #
attr_value = self.get_attr_value(spec, container, build_manager)
if attr_value is None:
attr_value = spec.default_value

Expand Down
4 changes: 0 additions & 4 deletions src/hdmf/common/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,10 +548,6 @@ def add_ref(self, **kwargs):
if isinstance(container, Data):
if attribute == 'data':
attribute = None
# if attribute == 'my_data':
# attribute = None
# if attribute == 'data':
# attribute = None
key = kwargs['key']
field = kwargs['field']
entity_id = kwargs['entity_id']
Expand Down
14 changes: 0 additions & 14 deletions src/hdmf/common/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -586,23 +586,9 @@ def add_row(self, **kwargs):
extra_columns = set(list(data.keys())) - set(list(self.__colids.keys()))
missing_columns = set(list(self.__colids.keys())) - set(list(data.keys()))

bad_data = []
for colname, colnum in self.__colids.items():
if colname not in data:
raise ValueError("column '%s' missing" % colname)
col = self.__df_cols[colnum]
if isinstance(col, VectorIndex):
continue
else:
if isinstance(col.data, TermSetWrapper):
if col.data.termset.validate(term=data[colname]):
continue
else:
bad_data.append(data[colname])

if len(bad_data)!=0:
msg = ('"%s" is not in the term set.' % ', '.join([str(item) for item in bad_data]))
raise ValueError(msg)

# check to see if any of the extra columns just need to be added
if extra_columns:
Expand Down
5 changes: 3 additions & 2 deletions tests/unit/common/test_resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,9 +283,10 @@ def test_check_termset_wrapper(self):

er = HERD()
ret = er._HERD__check_termset_wrapper(objs)
self.assertEqual(ret[0][0].__class__.__name__, 'VectorData')

self.assertTrue(isinstance(ret[0][0], VectorData))
self.assertEqual(ret[0][1], 'data')
self.assertEqual(ret[0][2].__class__.__name__, 'TermSetWrapper')
self.assertTrue(isinstance(ret[0][2], TermSetWrapper))

@unittest.skipIf(not LINKML_INSTALLED, "optional LinkML module is not installed")
def test_add_ref_termset_data(self):
Expand Down
76 changes: 29 additions & 47 deletions tests/unit/test_io_hdf5_h5tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -837,7 +837,7 @@ def test_roundtrip_TermSetWrapper_dataset(self):
with HDF5IO(self.path, manager=get_foo_buildmanager("text"), mode='w', herd_path='./HERD.zip') as io:
io.write(foofile)

with HDF5IO(self.path, manager=self.manager, mode='r') as io:
with HDF5IO(self.path, manager=get_foo_buildmanager("text"), mode='r') as io:
read_foofile = io.read()
self.assertListEqual(foofile.buckets['bucket1'].foos['species'].my_data.value,
read_foofile.buckets['bucket1'].foos['species'].my_data[:].tolist())
Expand Down Expand Up @@ -1071,52 +1071,34 @@ def test_io_read_herd_value_warn(self):

self.remove_er_files()

# def test_io_write_extend_herd(self):
# """
# Test the optional write of HERD with extending an existing HERD instance.
# """
# # create a container that uses TermSetWrapper
# terms = TermSet(term_schema_path='tests/unit/example_test_term_set.yaml')
# foo = Foo(name="foo", attr1='attr1', attr2=0,
# my_data=TermSetWrapper(value=['Homo sapiens', 'Mus musculus'],
# termset=terms))
# foobucket = FooBucket('bucket1', [foo])
# file = FooFile(buckets=[foobucket])
#
# er = HERD()
# er.add_ref(file=file,
# container=file,
# key='special',
# entity_id="id11",
# entity_uri='url11')
#
# with HDF5IO(self.path, manager=self.manager, mode='w', herd_path='./HERD.zip') as io:
# io.write(file, herd=er)
# with HDF5IO(self.path, manager=self.manager, mode='r', herd_path='./HERD.zip') as io:
# container = io.read()
# self.assertIsInstance(io.herd, HERD)
#
# self.remove_er_files()
#
# def test_io_write_create_herd(self):
# """
# Test the optional write of HERD with an new instance.
# """
# # create a container that uses TermSetWrapper
# terms = TermSet(term_schema_path='tests/unit/example_test_term_set.yaml')
# foo = Foo(name="my_data", attr1='attr1', attr2=0,
# my_data=TermSetWrapper(value=['Homo sapiens', 'Mus musculus'],
# termset=terms))
# foobucket = FooBucket('bucket1', [foo])
# file = FooFile(buckets=[foobucket])
#
# with HDF5IO(self.path, manager=self.manager, mode='w', herd_path='./HERD.zip') as io:
# io.write(file)
# with HDF5IO(self.path, manager=self.manager, mode='r', herd_path='./HERD.zip') as io:
# container = io.read()
# self.assertIsInstance(io.herd, HERD)
#
# self.remove_er_files()
def test_io_write_extend_herd(self):
"""
Test the optional write of HERD with extending an existing HERD instance.
"""
terms = TermSet(term_schema_path='tests/unit/example_test_term_set.yaml')
foo = Foo(name="species", attr1='attr1', attr2=0,
my_data=TermSetWrapper(value=['Homo sapiens', 'Mus musculus'],
termset=terms))

foobucket = FooBucket('bucket1', [foo])
foofile = FooFile(buckets=[foobucket])

er = HERD()
er.add_ref(file=foofile,
container=foofile,
key='special',
entity_id="id11",
entity_uri='url11')

with HDF5IO(self.path, manager=get_foo_buildmanager("text"), mode='w', herd_path='./HERD.zip') as io:
io.write(foofile)

with HDF5IO(self.path, manager=get_foo_buildmanager("text"), mode='r') as io:
read_foofile = io.read()
self.assertListEqual(foofile.buckets['bucket1'].foos['species'].my_data.value,
read_foofile.buckets['bucket1'].foos['species'].my_data[:].tolist())

self.remove_er_files()


class TestMultiWrite(TestCase):
Expand Down

0 comments on commit c60a68b

Please sign in to comment.