Skip to content

Commit

Permalink
Merge pull request #78 from catalystneuro/remove_top_level_imports
Browse files Browse the repository at this point in the history
Remove top level imports of big modules
  • Loading branch information
CodyCBakerPhD authored Sep 3, 2022
2 parents ae08b4a + dab71df commit 44f0188
Show file tree
Hide file tree
Showing 5 changed files with 52 additions and 41 deletions.
6 changes: 6 additions & 0 deletions .github/workflows/testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,11 @@ jobs:
- name: Install neuroconv with minimal requirements
run: pip install .[test]
- name: Run import tests
run: |
pytest tests/imports.py::TestImportStructure::test_top_level
pytest tests/imports.py::TestImportStructure::test_tools
pytest tests/imports.py::TestImportStructure::test_datainterfaces
- name: Run minimal tests
run: pytest tests/test_minimal -n auto --dist loadscope

Expand All @@ -49,6 +54,7 @@ jobs:

- name: Install with icephys requirements
run: pip install .[icephys]

#- name: Run icephys tests # There are no icephys specific tests without data
# run: pytest tests/test_icephys -n auto --dist loadscope

Expand Down
4 changes: 0 additions & 4 deletions src/neuroconv/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,2 @@
from .nwbconverter import NWBConverter

# TODO: temporarily disabled until decision to remove in other PR
# from .tools import spikeinterface, roiextractors, neo

from .tools.yaml_conversion_specification import run_conversion_from_yaml
7 changes: 0 additions & 7 deletions src/neuroconv/tools/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1 @@
# To be treated as sub-modules or plugins
# TODO: temporarily disabled until decision to remove in other PR
# from .spikeinterface import spikeinterface
# from .roiextractors import roiextractors
# from .neo import neo

# Basically utils but rely on external dependencies
from .importing import get_package
50 changes: 33 additions & 17 deletions tests/test_minimal/test_import_structure.py → tests/imports.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from unittest import TestCase
# This module is meant for the tests to be run as stand-alone so as to emulate a fresh import
# Run them by using:
# pytest tests/import_structure.py::TestImportStructure::test_name

import neuroconv
from neuroconv import datainterfaces
from unittest import TestCase


def _strip_magic_module_attributes(dictionary: dict) -> dict:
def _strip_magic_module_attributes(ls: list) -> list:
exclude_keys = [
"__name__",
"__doc__",
Expand All @@ -16,31 +17,46 @@ def _strip_magic_module_attributes(dictionary: dict) -> dict:
"__cached__",
"__builtins__",
]
return {k: v for k, v in dictionary.items() if k not in exclude_keys}
return list(filter(lambda key: key not in exclude_keys, ls))


class TestImportStructure(TestCase):
def test_outer_import_structure(self):
current_structure = _strip_magic_module_attributes(dictionary=neuroconv.__dict__)
def test_top_level(self):
import neuroconv

current_structure = _strip_magic_module_attributes(ls=neuroconv.__dict__)
expected_structure = [
# Sub-modules
"nwbconverter",
"basedatainterface",
"baseextractorinterface",
"datainterfaces",
"tools",
"utils",
"tools", # Attached to namespace by NWBConverter import
"utils", # Attached to namesapce by NWBconverter import
# Exposed attributes
"NWBConverter",
# "spikeinterface", # TODO: decide what to do here
# "roiextractors",
# "neo",
"run_conversion_from_yaml",
]
self.assertCountEqual(first=current_structure, second=expected_structure)

def test_datainterfaces_import_structure(self):
current_structure = _strip_magic_module_attributes(dictionary=datainterfaces.__dict__)
def test_tools(self):
"""Python dir() calls (and __dict__ as well) update dynamically based on global imports."""

from neuroconv import tools

current_structure = _strip_magic_module_attributes(ls=tools.__dict__)
expected_structure = [
# Sub-Packages
"yaml_conversion_specification", # Attached to namespace by top __init__ call of NWBConverter
# Sub-modules
"importing", # Attached to namespace by importing get_package
"nwb_helpers", # Attached to namespace by top __init__ call of NWBConverter
# Functions imported on the __init__
"get_package",
]
self.assertCountEqual(first=current_structure, second=expected_structure)

def test_datainterfaces(self):
from neuroconv import datainterfaces

current_structure = _strip_magic_module_attributes(ls=datainterfaces.__dict__)
expected_structure = [
# Sub-modules
"behavior",
Expand Down
26 changes: 13 additions & 13 deletions tests/test_ecephys/test_tools_spikeextractors.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
get_default_nwbfile_metadata,
)

# from neuroconv import spikeinterface # TODO: testing aliased import; possibly remove
from neuroconv.tools import spikeinterface # testing aliased import
from neuroconv.tools.spikeinterface import (
get_nwb_metadata,
write_recording,
Expand Down Expand Up @@ -135,12 +135,12 @@ def check_si_roundtrip(self, path: FilePathType):
def test_write_recording(self):
path = self.test_dir + "/test.nwb"

# spikeinterface.write_recording(self.RX, path, metadata=self.placeholder_metadata) # testing aliased import
# RX_nwb = se.NwbRecordingExtractor(path)
# check_recording_return_types(RX_nwb)
# check_recordings_equal(self.RX, RX_nwb)
# check_dumping(RX_nwb)
# del RX_nwb
spikeinterface.write_recording(self.RX, path, metadata=self.placeholder_metadata) # testing aliased import
RX_nwb = se.NwbRecordingExtractor(path)
check_recording_return_types(RX_nwb)
check_recordings_equal(self.RX, RX_nwb)
check_dumping(RX_nwb)
del RX_nwb

write_recording(recording=self.RX, nwbfile_path=path, overwrite=True, metadata=self.placeholder_metadata)
RX_nwb = se.NwbRecordingExtractor(path)
Expand Down Expand Up @@ -288,12 +288,12 @@ def test_write_sorting(self):
path = self.test_dir + "/test.nwb"
sf = self.RX.get_sampling_frequency()

# # Append sorting to existing file
# write_recording(recording=self.RX, nwbfile_path=path, overwrite=True, metadata=self.placeholder_metadata)
# spikeinterface.write_sorting(sorting=self.SX, nwbfile_path=path, overwrite=False) # testing aliased import
# SX_nwb = se.NwbSortingExtractor(path)
# check_sortings_equal(self.SX, SX_nwb)
# check_dumping(SX_nwb)
# Append sorting to existing file
write_recording(recording=self.RX, nwbfile_path=path, overwrite=True, metadata=self.placeholder_metadata)
spikeinterface.write_sorting(sorting=self.SX, nwbfile_path=path, overwrite=False) # testing aliased import
SX_nwb = se.NwbSortingExtractor(path)
check_sortings_equal(self.SX, SX_nwb)
check_dumping(SX_nwb)

# Test for handling unit property descriptions argument
property_descriptions = dict(stability="This is a description of stability.")
Expand Down

0 comments on commit 44f0188

Please sign in to comment.