diff --git a/+file/fillExport.m b/+file/fillExport.m index 378a52e0..9a4a242c 100644 --- a/+file/fillExport.m +++ b/+file/fillExport.m @@ -235,6 +235,14 @@ propertyChecks{end+1} = sprintf(['~isempty(%1$s) ' ... '&& ~isa(%1$s, ''types.untyped.SoftLink'') ' ... '&& ~isa(%1$s, ''types.untyped.ExternalLink'')'], propertyReference); + + % Properties that are required and dependent will not be exported + % if the property they depend on are unset (empty). Ensure such cases + % are warned against if they occur. + if prop.required && ~prop.readonly + warnIfNotExportedString = sprintf('obj.warnIfPropertyAttributeNotExported(''%s'', ''%s'', fullpath)', name, depPropname); + warningNeededCheck = sprintf('isempty(obj.%s) && ~isempty(obj.%s)', depPropname, name); + end end if ~prop.required @@ -242,8 +250,15 @@ end if ~isempty(propertyChecks) - dataExportString = sprintf('if %s\n%s\nend' ... + dataExportString = sprintf('if %s\n%s' ... , strjoin(propertyChecks, ' && '), file.addSpaces(dataExportString, 4) ... ); + if prop.required && ~prop.readonly + dataExportString = sprintf('%s\nelseif %s\n%s\nend' ... + , dataExportString, warningNeededCheck, file.addSpaces(warnIfNotExportedString, 4) ... + ); + else + dataExportString = sprintf('%s\nend', dataExportString); + end end -end \ No newline at end of file +end diff --git a/+tests/+unit/nwbExportTest.m b/+tests/+unit/nwbExportTest.m new file mode 100644 index 00000000..abd45868 --- /dev/null +++ b/+tests/+unit/nwbExportTest.m @@ -0,0 +1,72 @@ +classdef nwbExportTest < matlab.unittest.TestCase + + properties + NwbObject + OutputFolder = "out" + end + + methods (TestClassSetup) + function setupClass(testCase) + % Get the root path of the matnwb repository + rootPath = misc.getMatnwbDir(); + + % Use a fixture to add the folder to the search path + testCase.applyFixture(matlab.unittest.fixtures.PathFixture(rootPath)); + + % Use a fixture to create a temporary working directory + testCase.applyFixture(matlab.unittest.fixtures.WorkingFolderFixture); + generateCore('savedir', '.'); + end + end + + methods (TestMethodSetup) + function setupMethod(testCase) + testCase.NwbObject = testCase.initNwbFile(); + + if isfolder( testCase.OutputFolder ) + rmdir(testCase.OutputFolder, "s") + end + mkdir(testCase.OutputFolder) + end + end + + methods (Test) + function testExportDependentAttributeWithMissingParentA(testCase) + testCase.NwbObject.general_source_script_file_name = 'my_test_script.m'; + nwbFilePath = fullfile(testCase.OutputFolder, 'test_part1.nwb'); + testCase.verifyWarning(@(f, fn) nwbExport(testCase.NwbObject, nwbFilePath), 'NWB:DependentAttributeNotExported') + + % Add value for dataset which attribute depends on and export again + testCase.NwbObject.general_source_script = 'my test'; + nwbFilePath = fullfile(testCase.OutputFolder, 'test_part2.nwb'); + testCase.verifyWarningFree(@(f, fn) nwbExport(testCase.NwbObject, nwbFilePath)) + end + + function testExportDependentAttributeWithMissingParentB(testCase) + time_series = types.core.TimeSeries( ... + 'data', linspace(0, 0.4, 50), ... + 'starting_time_rate', 10.0, ... + 'description', 'a test series', ... + 'data_unit', 'n/a' ... + ); + + testCase.NwbObject.acquisition.set('time_series', time_series); + nwbFilePath = fullfile(testCase.OutputFolder, 'test_part1.nwb'); + testCase.verifyWarning(@(f, fn) nwbExport(testCase.NwbObject, nwbFilePath), 'NWB:DependentAttributeNotExported') + + % Add value for dataset which attribute depends on and export again + time_series.starting_time = 1; + nwbFilePath = fullfile(testCase.OutputFolder, 'test_part2.nwb'); + testCase.verifyWarningFree(@(f, fn) nwbExport(testCase.NwbObject, nwbFilePath)) + end + end + + methods (Static) + function nwb = initNwbFile() + nwb = NwbFile( ... + 'session_description', 'test file for nwb export', ... + 'identifier', 'export_test', ... + 'session_start_time', datetime("now", 'TimeZone', 'local') ); + end + end +end diff --git a/+types/+core/ImageSeries.m b/+types/+core/ImageSeries.m index f0ec2f68..8e5addb3 100644 --- a/+types/+core/ImageSeries.m +++ b/+types/+core/ImageSeries.m @@ -175,6 +175,8 @@ end if ~isempty(obj.external_file) && ~isa(obj.external_file, 'types.untyped.SoftLink') && ~isa(obj.external_file, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/external_file/starting_frame'], obj.external_file_starting_frame, 'forceArray'); + elseif isempty(obj.external_file) && ~isempty(obj.external_file_starting_frame) + obj.warnIfPropertyAttributeNotExported('external_file_starting_frame', 'external_file', fullpath) end if ~isempty(obj.format) if startsWith(class(obj.format), 'types.untyped.') diff --git a/+types/+core/ImagingPlane.m b/+types/+core/ImagingPlane.m index ddfdcb59..c47afe39 100644 --- a/+types/+core/ImagingPlane.m +++ b/+types/+core/ImagingPlane.m @@ -388,6 +388,8 @@ end if ~isempty(obj.grid_spacing) && ~isa(obj.grid_spacing, 'types.untyped.SoftLink') && ~isa(obj.grid_spacing, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/grid_spacing/unit'], obj.grid_spacing_unit); + elseif isempty(obj.grid_spacing) && ~isempty(obj.grid_spacing_unit) + obj.warnIfPropertyAttributeNotExported('grid_spacing_unit', 'grid_spacing', fullpath) end if ~isempty(obj.imaging_rate) if startsWith(class(obj.imaging_rate), 'types.untyped.') @@ -429,6 +431,8 @@ end if ~isempty(obj.origin_coords) && ~isa(obj.origin_coords, 'types.untyped.SoftLink') && ~isa(obj.origin_coords, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/origin_coords/unit'], obj.origin_coords_unit); + elseif isempty(obj.origin_coords) && ~isempty(obj.origin_coords_unit) + obj.warnIfPropertyAttributeNotExported('origin_coords_unit', 'origin_coords', fullpath) end if ~isempty(obj.reference_frame) if startsWith(class(obj.reference_frame), 'types.untyped.') diff --git a/+types/+core/ImagingRetinotopy.m b/+types/+core/ImagingRetinotopy.m index 08769f3f..24dbf26d 100644 --- a/+types/+core/ImagingRetinotopy.m +++ b/+types/+core/ImagingRetinotopy.m @@ -785,12 +785,18 @@ end if ~isempty(obj.axis_1_phase_map) && ~isa(obj.axis_1_phase_map, 'types.untyped.SoftLink') && ~isa(obj.axis_1_phase_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/axis_1_phase_map/dimension'], obj.axis_1_phase_map_dimension, 'forceArray'); + elseif isempty(obj.axis_1_phase_map) && ~isempty(obj.axis_1_phase_map_dimension) + obj.warnIfPropertyAttributeNotExported('axis_1_phase_map_dimension', 'axis_1_phase_map', fullpath) end if ~isempty(obj.axis_1_phase_map) && ~isa(obj.axis_1_phase_map, 'types.untyped.SoftLink') && ~isa(obj.axis_1_phase_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/axis_1_phase_map/field_of_view'], obj.axis_1_phase_map_field_of_view, 'forceArray'); + elseif isempty(obj.axis_1_phase_map) && ~isempty(obj.axis_1_phase_map_field_of_view) + obj.warnIfPropertyAttributeNotExported('axis_1_phase_map_field_of_view', 'axis_1_phase_map', fullpath) end if ~isempty(obj.axis_1_phase_map) && ~isa(obj.axis_1_phase_map, 'types.untyped.SoftLink') && ~isa(obj.axis_1_phase_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/axis_1_phase_map/unit'], obj.axis_1_phase_map_unit); + elseif isempty(obj.axis_1_phase_map) && ~isempty(obj.axis_1_phase_map_unit) + obj.warnIfPropertyAttributeNotExported('axis_1_phase_map_unit', 'axis_1_phase_map', fullpath) end if ~isempty(obj.axis_1_power_map) if startsWith(class(obj.axis_1_power_map), 'types.untyped.') @@ -801,12 +807,18 @@ end if ~isempty(obj.axis_1_power_map) && ~isa(obj.axis_1_power_map, 'types.untyped.SoftLink') && ~isa(obj.axis_1_power_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/axis_1_power_map/dimension'], obj.axis_1_power_map_dimension, 'forceArray'); + elseif isempty(obj.axis_1_power_map) && ~isempty(obj.axis_1_power_map_dimension) + obj.warnIfPropertyAttributeNotExported('axis_1_power_map_dimension', 'axis_1_power_map', fullpath) end if ~isempty(obj.axis_1_power_map) && ~isa(obj.axis_1_power_map, 'types.untyped.SoftLink') && ~isa(obj.axis_1_power_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/axis_1_power_map/field_of_view'], obj.axis_1_power_map_field_of_view, 'forceArray'); + elseif isempty(obj.axis_1_power_map) && ~isempty(obj.axis_1_power_map_field_of_view) + obj.warnIfPropertyAttributeNotExported('axis_1_power_map_field_of_view', 'axis_1_power_map', fullpath) end if ~isempty(obj.axis_1_power_map) && ~isa(obj.axis_1_power_map, 'types.untyped.SoftLink') && ~isa(obj.axis_1_power_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/axis_1_power_map/unit'], obj.axis_1_power_map_unit); + elseif isempty(obj.axis_1_power_map) && ~isempty(obj.axis_1_power_map_unit) + obj.warnIfPropertyAttributeNotExported('axis_1_power_map_unit', 'axis_1_power_map', fullpath) end if startsWith(class(obj.axis_2_phase_map), 'types.untyped.') refs = obj.axis_2_phase_map.export(fid, [fullpath '/axis_2_phase_map'], refs); @@ -815,12 +827,18 @@ end if ~isempty(obj.axis_2_phase_map) && ~isa(obj.axis_2_phase_map, 'types.untyped.SoftLink') && ~isa(obj.axis_2_phase_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/axis_2_phase_map/dimension'], obj.axis_2_phase_map_dimension, 'forceArray'); + elseif isempty(obj.axis_2_phase_map) && ~isempty(obj.axis_2_phase_map_dimension) + obj.warnIfPropertyAttributeNotExported('axis_2_phase_map_dimension', 'axis_2_phase_map', fullpath) end if ~isempty(obj.axis_2_phase_map) && ~isa(obj.axis_2_phase_map, 'types.untyped.SoftLink') && ~isa(obj.axis_2_phase_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/axis_2_phase_map/field_of_view'], obj.axis_2_phase_map_field_of_view, 'forceArray'); + elseif isempty(obj.axis_2_phase_map) && ~isempty(obj.axis_2_phase_map_field_of_view) + obj.warnIfPropertyAttributeNotExported('axis_2_phase_map_field_of_view', 'axis_2_phase_map', fullpath) end if ~isempty(obj.axis_2_phase_map) && ~isa(obj.axis_2_phase_map, 'types.untyped.SoftLink') && ~isa(obj.axis_2_phase_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/axis_2_phase_map/unit'], obj.axis_2_phase_map_unit); + elseif isempty(obj.axis_2_phase_map) && ~isempty(obj.axis_2_phase_map_unit) + obj.warnIfPropertyAttributeNotExported('axis_2_phase_map_unit', 'axis_2_phase_map', fullpath) end if ~isempty(obj.axis_2_power_map) if startsWith(class(obj.axis_2_power_map), 'types.untyped.') @@ -831,12 +849,18 @@ end if ~isempty(obj.axis_2_power_map) && ~isa(obj.axis_2_power_map, 'types.untyped.SoftLink') && ~isa(obj.axis_2_power_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/axis_2_power_map/dimension'], obj.axis_2_power_map_dimension, 'forceArray'); + elseif isempty(obj.axis_2_power_map) && ~isempty(obj.axis_2_power_map_dimension) + obj.warnIfPropertyAttributeNotExported('axis_2_power_map_dimension', 'axis_2_power_map', fullpath) end if ~isempty(obj.axis_2_power_map) && ~isa(obj.axis_2_power_map, 'types.untyped.SoftLink') && ~isa(obj.axis_2_power_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/axis_2_power_map/field_of_view'], obj.axis_2_power_map_field_of_view, 'forceArray'); + elseif isempty(obj.axis_2_power_map) && ~isempty(obj.axis_2_power_map_field_of_view) + obj.warnIfPropertyAttributeNotExported('axis_2_power_map_field_of_view', 'axis_2_power_map', fullpath) end if ~isempty(obj.axis_2_power_map) && ~isa(obj.axis_2_power_map, 'types.untyped.SoftLink') && ~isa(obj.axis_2_power_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/axis_2_power_map/unit'], obj.axis_2_power_map_unit); + elseif isempty(obj.axis_2_power_map) && ~isempty(obj.axis_2_power_map_unit) + obj.warnIfPropertyAttributeNotExported('axis_2_power_map_unit', 'axis_2_power_map', fullpath) end if startsWith(class(obj.axis_descriptions), 'types.untyped.') refs = obj.axis_descriptions.export(fid, [fullpath '/axis_descriptions'], refs); @@ -852,18 +876,28 @@ end if ~isempty(obj.focal_depth_image) && ~isa(obj.focal_depth_image, 'types.untyped.SoftLink') && ~isa(obj.focal_depth_image, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/focal_depth_image/bits_per_pixel'], obj.focal_depth_image_bits_per_pixel); + elseif isempty(obj.focal_depth_image) && ~isempty(obj.focal_depth_image_bits_per_pixel) + obj.warnIfPropertyAttributeNotExported('focal_depth_image_bits_per_pixel', 'focal_depth_image', fullpath) end if ~isempty(obj.focal_depth_image) && ~isa(obj.focal_depth_image, 'types.untyped.SoftLink') && ~isa(obj.focal_depth_image, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/focal_depth_image/dimension'], obj.focal_depth_image_dimension, 'forceArray'); + elseif isempty(obj.focal_depth_image) && ~isempty(obj.focal_depth_image_dimension) + obj.warnIfPropertyAttributeNotExported('focal_depth_image_dimension', 'focal_depth_image', fullpath) end if ~isempty(obj.focal_depth_image) && ~isa(obj.focal_depth_image, 'types.untyped.SoftLink') && ~isa(obj.focal_depth_image, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/focal_depth_image/field_of_view'], obj.focal_depth_image_field_of_view, 'forceArray'); + elseif isempty(obj.focal_depth_image) && ~isempty(obj.focal_depth_image_field_of_view) + obj.warnIfPropertyAttributeNotExported('focal_depth_image_field_of_view', 'focal_depth_image', fullpath) end if ~isempty(obj.focal_depth_image) && ~isa(obj.focal_depth_image, 'types.untyped.SoftLink') && ~isa(obj.focal_depth_image, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/focal_depth_image/focal_depth'], obj.focal_depth_image_focal_depth); + elseif isempty(obj.focal_depth_image) && ~isempty(obj.focal_depth_image_focal_depth) + obj.warnIfPropertyAttributeNotExported('focal_depth_image_focal_depth', 'focal_depth_image', fullpath) end if ~isempty(obj.focal_depth_image) && ~isa(obj.focal_depth_image, 'types.untyped.SoftLink') && ~isa(obj.focal_depth_image, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/focal_depth_image/format'], obj.focal_depth_image_format); + elseif isempty(obj.focal_depth_image) && ~isempty(obj.focal_depth_image_format) + obj.warnIfPropertyAttributeNotExported('focal_depth_image_format', 'focal_depth_image', fullpath) end if ~isempty(obj.sign_map) if startsWith(class(obj.sign_map), 'types.untyped.') @@ -874,9 +908,13 @@ end if ~isempty(obj.sign_map) && ~isa(obj.sign_map, 'types.untyped.SoftLink') && ~isa(obj.sign_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/sign_map/dimension'], obj.sign_map_dimension, 'forceArray'); + elseif isempty(obj.sign_map) && ~isempty(obj.sign_map_dimension) + obj.warnIfPropertyAttributeNotExported('sign_map_dimension', 'sign_map', fullpath) end if ~isempty(obj.sign_map) && ~isa(obj.sign_map, 'types.untyped.SoftLink') && ~isa(obj.sign_map, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/sign_map/field_of_view'], obj.sign_map_field_of_view, 'forceArray'); + elseif isempty(obj.sign_map) && ~isempty(obj.sign_map_field_of_view) + obj.warnIfPropertyAttributeNotExported('sign_map_field_of_view', 'sign_map', fullpath) end if startsWith(class(obj.vasculature_image), 'types.untyped.') refs = obj.vasculature_image.export(fid, [fullpath '/vasculature_image'], refs); @@ -885,15 +923,23 @@ end if ~isempty(obj.vasculature_image) && ~isa(obj.vasculature_image, 'types.untyped.SoftLink') && ~isa(obj.vasculature_image, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/vasculature_image/bits_per_pixel'], obj.vasculature_image_bits_per_pixel); + elseif isempty(obj.vasculature_image) && ~isempty(obj.vasculature_image_bits_per_pixel) + obj.warnIfPropertyAttributeNotExported('vasculature_image_bits_per_pixel', 'vasculature_image', fullpath) end if ~isempty(obj.vasculature_image) && ~isa(obj.vasculature_image, 'types.untyped.SoftLink') && ~isa(obj.vasculature_image, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/vasculature_image/dimension'], obj.vasculature_image_dimension, 'forceArray'); + elseif isempty(obj.vasculature_image) && ~isempty(obj.vasculature_image_dimension) + obj.warnIfPropertyAttributeNotExported('vasculature_image_dimension', 'vasculature_image', fullpath) end if ~isempty(obj.vasculature_image) && ~isa(obj.vasculature_image, 'types.untyped.SoftLink') && ~isa(obj.vasculature_image, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/vasculature_image/field_of_view'], obj.vasculature_image_field_of_view, 'forceArray'); + elseif isempty(obj.vasculature_image) && ~isempty(obj.vasculature_image_field_of_view) + obj.warnIfPropertyAttributeNotExported('vasculature_image_field_of_view', 'vasculature_image', fullpath) end if ~isempty(obj.vasculature_image) && ~isa(obj.vasculature_image, 'types.untyped.SoftLink') && ~isa(obj.vasculature_image, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/vasculature_image/format'], obj.vasculature_image_format); + elseif isempty(obj.vasculature_image) && ~isempty(obj.vasculature_image_format) + obj.warnIfPropertyAttributeNotExported('vasculature_image_format', 'vasculature_image', fullpath) end end end diff --git a/+types/+core/NWBFile.m b/+types/+core/NWBFile.m index 4fb0534c..9b6a1417 100644 --- a/+types/+core/NWBFile.m +++ b/+types/+core/NWBFile.m @@ -1010,6 +1010,8 @@ end if ~isempty(obj.general_source_script) && ~isa(obj.general_source_script, 'types.untyped.SoftLink') && ~isa(obj.general_source_script, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/general/source_script/file_name'], obj.general_source_script_file_name); + elseif isempty(obj.general_source_script) && ~isempty(obj.general_source_script_file_name) + obj.warnIfPropertyAttributeNotExported('general_source_script_file_name', 'general_source_script', fullpath) end io.writeGroup(fid, [fullpath '/general']); if ~isempty(obj.general_stimulus) diff --git a/+types/+core/TimeSeries.m b/+types/+core/TimeSeries.m index 5c905fb5..2503b45b 100644 --- a/+types/+core/TimeSeries.m +++ b/+types/+core/TimeSeries.m @@ -380,6 +380,8 @@ end if ~isempty(obj.data) && ~isa(obj.data, 'types.untyped.SoftLink') && ~isa(obj.data, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/data/unit'], obj.data_unit); + elseif isempty(obj.data) && ~isempty(obj.data_unit) + obj.warnIfPropertyAttributeNotExported('data_unit', 'data', fullpath) end if ~isempty(obj.description) io.writeAttribute(fid, [fullpath '/description'], obj.description); @@ -393,6 +395,8 @@ end if ~isempty(obj.starting_time) && ~isa(obj.starting_time, 'types.untyped.SoftLink') && ~isa(obj.starting_time, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/starting_time/rate'], obj.starting_time_rate); + elseif isempty(obj.starting_time) && ~isempty(obj.starting_time_rate) + obj.warnIfPropertyAttributeNotExported('starting_time_rate', 'starting_time', fullpath) end if ~isempty(obj.starting_time) && ~isa(obj.starting_time, 'types.untyped.SoftLink') && ~isa(obj.starting_time, 'types.untyped.ExternalLink') io.writeAttribute(fid, [fullpath '/starting_time/unit'], obj.starting_time_unit); diff --git a/+types/+untyped/MetaClass.m b/+types/+untyped/MetaClass.m index 6d7840a9..eeffa248 100644 --- a/+types/+untyped/MetaClass.m +++ b/+types/+untyped/MetaClass.m @@ -95,5 +95,16 @@ end end end + + function warnIfPropertyAttributeNotExported(obj, propName, depPropName, fullpath) + warnState = warning('backtrace', 'off'); + cleanupObj = onCleanup(@(s) warning(warnState)); + warningId = 'NWB:DependentAttributeNotExported'; + warningMessage = sprintf( [ ... + 'The property "%s" of type "%s" was not exported to file ', ... + 'location "%s" because it depends on the property "%s" ', ... + 'which is unset.' ], propName, class(obj), fullpath, depPropName); + warning(warningId, warningMessage) %#ok + end end end \ No newline at end of file diff --git a/tutorials/convertTrials.m b/tutorials/convertTrials.m index 99fa5c4d..7d5b5b1b 100644 --- a/tutorials/convertTrials.m +++ b/tutorials/convertTrials.m @@ -9,11 +9,11 @@ % % author: Lawrence Niu % contact: lawrence@vidriotech.com -% last updated: May 27, 2021 +% last updated: Sep 14, 2024 %% Script Configuration -% The following details configuration parameters specific to the publishing script, -% and can be skipped when implementing your own conversion. +% The following section describes configuration parameters specific to the +% publishing script, and can be skipped when implementing your own conversion. % The parameters can be changed to fit any of the available sessions. animal = 'ANM255201'; @@ -21,29 +21,31 @@ identifier = [animal '_' session]; -metadata_loc = fullfile('data','metadata', ['meta_data_' identifier '.mat']); -datastructure_loc = fullfile('data','data_structure_files',... +% Specify the local path for the downloaded data: +data_root_path = 'data'; + +metadata_loc = fullfile(data_root_path, 'metadata', ['meta_data_' identifier '.mat']); +datastructure_loc = fullfile(data_root_path, 'data_structure_files',... ['data_structure_' identifier '.mat']); -rawdata_loc = fullfile('data', 'RawVoltageTraces', [identifier '.tar']); +rawdata_loc = fullfile(data_root_path, 'RawVoltageTraces', [identifier '.tar']); %% % The animal and session specifier can be changed with the |animal| and |session| % variable name respectively. |metadata_loc|, |datastructure_loc|, and |rawdata_loc| % should refer to the metadata .mat file, the data structure .mat file, % and the raw .tar file. -outloc = 'out'; +output_directory = 'out'; -if 7 ~= exist(outloc, 'dir') - mkdir(outloc); +if ~isfolder(output_directory) + mkdir(output_directory); end source_file = [mfilename() '.m']; [~, source_script, ~] = fileparts(source_file); %% -% The NWB file will be saved in the output directory indicated by |outdir| +% The NWB file will be saved in the output directory indicated by |output_directory| %% General Information - nwb = NwbFile(); nwb.identifier = identifier; nwb.general_source_script = source_script; @@ -83,7 +85,7 @@ %% The ALM-3 File Structure % Each ALM-3 session has three files: a metadata .mat file describing the experiment, a % data structures .mat file containing analyzed data, and a raw .tar archive -% containing multiple raw electrophysiology data separated by trial as .mat files. +% containing multiple raw electrophysiology data separated by trials as .mat files. % All files will be merged into a single NWB file. %% Metadata @@ -95,15 +97,15 @@ loaded = load(metadata_loc, 'meta_data'); meta = loaded.meta_data; -%experiment-specific treatment for animals with the ReaChR gene modification +% Experiment-specific treatment for animals with the ReaChR gene modification isreachr = any(cell2mat(strfind(meta.animalGeneModification, 'ReaChR'))); -%sessions are separated by date of experiment. +% Sessions are separated by date of experiment. nwb.general_session_id = meta.dateOfExperiment; -%ALM-3 data start time is equivalent to the reference time. +% ALM-3 data start time is equivalent to the reference time. nwb.session_start_time = datetime([meta.dateOfExperiment meta.timeOfExperiment],... - 'InputFormat', 'yyyyMMddHHmmss'); + 'InputFormat', 'yyyyMMddHHmmss', 'TimeZone', 'America/New_York'); % Eastern Daylight Time nwb.timestamps_reference_time = nwb.session_start_time; nwb.general_experimenter = strjoin(meta.experimenters, ', '); @@ -124,9 +126,9 @@ % However, since these fields are mostly experimental annotations, we instead pack the % extra values into the |description| field as a string. -%The formatStruct function simply prints the field and values given the struct. -%An optional cell array of field names specifies whitelist of fields to print. This -%function is provided with this script in the tutorials directory. +% The formatStruct function simply prints the field and values given the struct. +% An optional cell array of field names specifies whitelist of fields to print. +% This function is provided with this script in the tutorials directory. nwb.general_subject.genotype = formatStruct(... meta, ... {'animalStrain'; 'animalGeneModification'; 'animalGeneCopy';... @@ -150,8 +152,8 @@ newline, ... formatStruct(meta.behavior, {'task_keyword'})]; -% Miscellaneous collection information from ALM-3 that didn't quite fit any NWB properties -% are stored in general/data_collection. +% Miscellaneous collection information from ALM-3 that didn't quite fit any NWB +% properties are stored in general/data_collection. nwb.general_data_collection = formatStruct(meta.extracellular,... {'extracellularDataType';'cellType';'identificationMethod';'amplifierRolloff';... 'spikeSorting';'ADunit'}); @@ -183,16 +185,15 @@ 'device', types.untyped.SoftLink(['/general/devices/' deviceName])); nwb.general_extracellular_ephys.set(deviceName, egroup); %% -% The NWB *ElectrodeGroup* object stores experimental information regarding a group of -% probes. Doing so requires a *SoftLink* to the probe specified under +% The NWB *ElectrodeGroup* object stores experimental information regarding a +% group of probes. Doing so requires a *SoftLink* to the probe specified under % |general_devices|. SoftLink objects are direct maps to -% on export, -% and thus, require a true HDF5 path. +% on +% export, and thus, require a true HDF5 path. -% you can specify column names and values as key-value arguments in the DynamicTable +% You can specify column names and values as key-value arguments in the DynamicTable % constructor. -dtColNames = {'x', 'y', 'z', 'imp', 'location', 'filtering','group',... - 'group_name'}; +dtColNames = {'x', 'y', 'z', 'imp', 'location', 'filtering','group', 'group_name'}; dynTable = types.hdmf_common.DynamicTable(... 'colnames', dtColNames,... 'description', 'Electrodes',... @@ -207,13 +208,13 @@ 'group', types.hdmf_common.VectorData('description', 'Reference to the ElectrodeGroup this electrode is a part of.'),... 'group_name', types.hdmf_common.VectorData('description', 'Name of the ElectrodeGroup this electrode is a part of.')); -%raw HDF5 path to the above electrode group. Referenced by +% Raw HDF5 path to the above electrode group. Referenced by % the general/extracellular_ephys Dynamic Table egroupPath = ['/general/extracellular_ephys/' deviceName]; eGroupReference = types.untyped.ObjectView(egroupPath); for i = 1:length(meta.extracellular.siteLocations) location = meta.extracellular.siteLocations{i}; - % add each row in the dynamic table. The `id` column is populated + % Add each row in the dynamic table. The `id` column is populated % dynamically. dynTable.addRow(... 'x', location(1), 'y', location(2), 'z', location(3),... @@ -249,6 +250,7 @@ 'device', types.untyped.SoftLink(['/general/devices/' laserName]), ... 'description', formatStruct(meta.photostim, {... 'stimulationMethod';'photostimCoordinates';'identificationMethod'}))); + %% Analysis Data Structure % The ALM-3 data structures .mat file contains analyzed spike data, trial-specific % parameters, and behavioral analysis data. @@ -278,7 +280,7 @@ ephus = data.timeSeriesArrayHash.value{1}; ephusUnit = data.timeUnitNames{data.timeUnitIds(ephus.timeUnit)}; -% lick direction and timestamps trace +% Lick direction and timestamps trace tsIdx = strcmp(ephus.idStr, 'lick_trace'); bts = types.core.BehavioralTimeSeries(); @@ -292,7 +294,7 @@ nwb.acquisition.set('lick_trace', bts); bts_ref = types.untyped.ObjectView('/acquisition/lick_trace/lick_trace_ts'); -% acousto-optic modulator input trace +% Acousto-optic modulator input trace tsIdx = strcmp(ephus.idStr, 'aom_input_trace'); ts = types.core.TimeSeries(... 'data', ephus.valueMatrix(:,tsIdx), ... @@ -303,11 +305,11 @@ nwb.stimulus_presentation.set('aom_input_trace', ts); ts_ref = types.untyped.ObjectView('/stimulus/presentation/aom_input_trace'); -% laser power +% Laser power tsIdx = strcmp(ephus.idStr, 'laser_power'); ots = types.core.OptogeneticSeries(... - 'data', ephus.valueMatrix(:,tsIdx), ... - 'data_unit', 'mW', ... + 'data', ephus.valueMatrix(:, tsIdx), ... + 'data_conversion', 1e-3, ... % data is stored in mW, data unit for OptogeneticSeries is watts 'description', ephus.idStrDetailed{tsIdx}, ... 'timestamps', ephus.time, ... 'timestamps_unit', ephusUnit, ... @@ -315,7 +317,7 @@ nwb.stimulus_presentation.set('laser_power', ots); ots_ref = types.untyped.ObjectView('/stimulus/presentation/laser_power'); -% append trials timeseries references in order +% Append trials timeseries references in order [ephus_trials, ~, trials_to_data] = unique(ephus.trial); for i=1:length(ephus_trials) i_loc = i == trials_to_data; @@ -349,33 +351,36 @@ % here because of how the data is formatted. DynamicTable is flexible % enough to accomodate both styles of data conversion. trials_epoch = types.core.TimeIntervals(... - 'start_time', types.hdmf_common.VectorData('data', data.trialStartTimes,... - 'description', 'Start time of epoch, in seconds.'),... - 'colnames', [data.trialTypeStr; data.trialPropertiesHash.keyNames .';... - {'start_time'; 'stop_time'; 'acquisition'; 'timeseries'}],... + 'colnames', {'start_time'}, ... 'description', 'trial data and properties', ... - 'id', types.hdmf_common.ElementIdentifiers('data', data.trialIds),... - 'timeseries', types.hdmf_common.VectorData('description', 'Index into timeseries Data'),... - 'timeseries_index', types.hdmf_common.VectorIndex(... - 'description', 'Index into Timeseries VectorData',... - 'target', types.untyped.ObjectView('/intervals/trials/timeseries'))); + 'start_time', types.hdmf_common.VectorData(... + 'data', data.trialStartTimes', ... + 'description', 'Start time of epoch, in seconds.'), ... + 'id', types.hdmf_common.ElementIdentifiers(... + 'data', data.trialIds' ) ); +% Add columns for the trial types for i=1:length(data.trialTypeStr) - trials_epoch.vectordata.set(data.trialTypeStr{i}, ... - types.hdmf_common.VectorData('data', data.trialTypeMat(i,:),... - 'description', data.trialTypeStr{i})); + columnName = data.trialTypeStr{i}; + columnData = types.hdmf_common.VectorData(... + 'data', data.trialTypeMat(i,:)', ... % transpose for column vector + 'description', data.trialTypeStr{i}); + trials_epoch.addColumn( columnName, columnData ) end +% Add columns for the trial properties for i=1:length(data.trialPropertiesHash.keyNames) + columnName = data.trialPropertiesHash.keyNames{i}; descr = data.trialPropertiesHash.descr{i}; if iscellstr(descr) descr = strjoin(descr, newline); end - trials_epoch.vectordata.set(data.trialPropertiesHash.keyNames{i}, ... - types.hdmf_common.VectorData(... - 'data', data.trialPropertiesHash.value{i}, ... - 'description', descr)); + columnData = types.hdmf_common.VectorData(... + 'data', data.trialPropertiesHash.value{i},... + 'description', data.trialTypeStr{i}); + trials_epoch.addColumn( columnName, columnData ) end + nwb.intervals_trials = trials_epoch; %% @@ -403,7 +408,7 @@ for i=1:length(ids) esData = esHash.value{i}; - % add trials ID reference + % Add trials ID reference good_trials_mask = ismember(esData.eventTrials, nwb.intervals_trials.id.data); eventTrials = esData.eventTrials(good_trials_mask); @@ -411,7 +416,7 @@ waveforms = esData.waveforms(good_trials_mask,:); channel = esData.channel(good_trials_mask); - % add waveform data to "unitx" and associate with "waveform" column as ObjectView. + % Add waveform data to "unitx" and associate with "waveform" column as ObjectView. ses = types.core.SpikeEventSeries(... 'control', ids(i),... 'control_description', 'Units Table ID',... @@ -430,10 +435,9 @@ end nwb.analysis.set(ses_name, ses); nwb.units.addRow(... - 'id', ids(i), 'trials', eventTrials,'spike_times', eventTimes, 'waveforms', ses_ref,... - 'tablepath', '/units'); + 'id', ids(i), 'trials', eventTrials, 'spike_times', eventTimes, 'waveforms', ses_ref); - %add this timeseries into the trials table as well. + % Add this timeseries into the trials table as well. [s_trials, ~, trials_to_data] = unique(eventTrials); for j=1:length(s_trials) trial = s_trials(j); @@ -455,9 +459,9 @@ % object under the name 'trial n' where 'n' is the trial ID. The trials are then linked % to the |trials| dynamic table for easy referencing. fprintf('Processing Raw Acquisition Data from `%s` (will take a while)\n', rawdata_loc); -untarLoc = fullfile(pwd, identifier); -if 7 ~= exist(untarLoc, 'dir') - untar(rawdata_loc, pwd); +untarLoc = strrep(rawdata_loc, '.tar', ''); +if ~isfolder(untarLoc) + untar(rawdata_loc, fileparts(rawdata_loc)); end rawfiles = dir(untarLoc); @@ -469,8 +473,8 @@ 'table',types.untyped.ObjectView('/general/extracellular_ephys/electrodes'),... 'data',(1:nrows) - 1); objrefs = cell(size(rawfiles)); -trials_idx = nwb.intervals_trials; -endTimestamps = trials_idx.start_time.data; + +endTimestamps = trials_epoch.start_time.data; for i=1:length(rawfiles) tnumstr = regexp(rawfiles{i}, '_trial_(\d+)\.mat$', 'tokens', 'once'); tnumstr = tnumstr{1}; @@ -493,32 +497,45 @@ objrefs{tnum} = types.untyped.ObjectView(['/acquisition/' tname]); end -%Link to the raw data by adding the acquisition column with ObjectViews -%to the data +% Link to the raw data by adding the acquisition column with ObjectViews +% to the data emptyrefs = cellfun('isempty', objrefs); objrefs(emptyrefs) = {types.untyped.ObjectView('')}; -trials_idx.vectordata.set('acquisition', types.hdmf_common.VectorData(... + +trials_epoch.addColumn('acquisition', types.hdmf_common.VectorData(... 'description', 'soft link to acquisition data for this trial',... - 'data', [objrefs{:}])); -trials_idx.stop_time = types.hdmf_common.VectorData(... - 'data', endTimestamps,... - 'description', 'the end time of each trial'); + 'data', [objrefs{:}]')); -%% Export +trials_epoch.stop_time = types.hdmf_common.VectorData(... + 'data', endTimestamps',... + 'description', 'the end time of each trial'); +trials_epoch.colnames{end+1} = 'stop_time'; -%first, we'll format and store |trial_timeseries| into |intervals_trials|. +%% Add timeseries to trials_epoch +% First, we'll format and store |trial_timeseries| into |intervals_trials|. % note that |timeseries_index| data is 0-indexed. ts_len = cellfun('size', trial_timeseries, 1); -is_len_nonzero = ts_len > 0; -ts_len_nonzero = ts_len(is_len_nonzero); -nwb.intervals_trials.timeseries_index.data = cumsum(ts_len_nonzero); -% intervals/trials/timeseries is a compound type so we use cell2table to +nwb.intervals_trials.timeseries_index = types.hdmf_common.VectorIndex(... + 'description', 'Index into Timeseries VectorData', ... + 'data', cumsum(ts_len)', ... + 'target', types.untyped.ObjectView('/intervals/trials/timeseries') ); + +% Intervals/trials/timeseries is a compound type so we use cell2table to % convert this 2-d cell array into a compatible table. -nwb.intervals_trials.timeseries.data = cell2table(vertcat(trial_timeseries{is_len_nonzero}),... +is_len_nonzero = ts_len > 0; +trial_timeseries_table = cell2table(vertcat(trial_timeseries{is_len_nonzero}),... 'VariableNames', {'timeseries', 'idx_start', 'count'}); +trial_timeseries_table = movevars(trial_timeseries_table, 'timeseries', 'After', 'count'); + +interval_trials_timeseries = types.core.TimeSeriesReferenceVectorData(... + 'description', 'Index into TimeSeries data', ... + 'data', trial_timeseries_table); +nwb.intervals_trials.timeseries = interval_trials_timeseries; +nwb.intervals_trials.colnames{end+1} = 'timeseries'; -outDest = fullfile(outloc, [identifier '.nwb']); -if 2 == exist(outDest, 'file') - delete(outDest); +%% Export +nwbFilePath = fullfile(output_directory, [identifier '.nwb']); +if isfile(nwbFilePath) + delete(nwbFilePath); end -nwbExport(nwb, outDest); \ No newline at end of file +nwbExport(nwb, nwbFilePath); diff --git a/tutorials/html/convertTrials.html b/tutorials/html/convertTrials.html index 4b981e3f..1df73a43 100644 --- a/tutorials/html/convertTrials.html +++ b/tutorials/html/convertTrials.html @@ -1,12 +1,17 @@ - - - - - NWB File Conversion Tutorial

NWB File Conversion Tutorial

How to convert trial-based experimental data to the Neurodata Without Borders file format using MatNWB. This example uses the CRCNS ALM-3 data set. Information on how to download the data can be found on the CRCNS Download Page. One should first familiarize themselves with the file format, which can be found on the ALM-3 About Page under the Documentation files.

author: Lawrence Niu
+  
+
+
+
+

NWB File Conversion Tutorial

+ +

How to convert trial-based experimental data to the Neurodata Without Borders file format using MatNWB. This example uses the CRCNS ALM-3 data set. Information on how to download the data can be found on the CRCNS Download Page. One should first familiarize themselves with the file format, which can be found on the ALM-3 About Page under the Documentation files.

+
author: Lawrence Niu
 contact: lawrence@vidriotech.com
-last updated: May 27, 2021

Contents

Script Configuration

The following details configuration parameters specific to the publishing script, and can be skipped when implementing your own conversion. The parameters can be changed to fit any of the available sessions.

animal = 'ANM255201';
+last updated: Sep 14, 2024
+ +

Contents

+ +

Script Configuration

+

The following section describes configuration parameters specific to the publishing script, and can be skipped when implementing your own conversion. The parameters can be changed to fit any of the available sessions.

+
animal = 'ANM255201';
 session = '20141124';
 
 identifier = [animal '_' session];
 
-metadata_loc = fullfile('data','metadata', ['meta_data_' identifier '.mat']);
-datastructure_loc = fullfile('data','data_structure_files',...
+% Specify the local path for the downloaded data:
+data_root_path = 'data';
+
+metadata_loc = fullfile(data_root_path, 'metadata', ['meta_data_' identifier '.mat']);
+datastructure_loc = fullfile(data_root_path, 'data_structure_files',...
     ['data_structure_' identifier '.mat']);
-rawdata_loc = fullfile('data', 'RawVoltageTraces', [identifier '.tar']);
-

The animal and session specifier can be changed with the animal and session variable name respectively. metadata_loc, datastructure_loc, and rawdata_loc should refer to the metadata .mat file, the data structure .mat file, and the raw .tar file.

outloc = 'out';
+rawdata_loc = fullfile(data_root_path, 'RawVoltageTraces', [identifier '.tar']);
+
+

The animal and session specifier can be changed with the animal and session variable name respectively. metadata_loc, datastructure_loc, and rawdata_loc should refer to the metadata .mat file, the data structure .mat file, and the raw .tar file.

+
output_directory = 'out';
 
-if 7 ~= exist(outloc, 'dir')
-    mkdir(outloc);
+if ~isfolder(output_directory)
+    mkdir(output_directory);
 end
 
 source_file = [mfilename() '.m'];
 [~, source_script, ~] = fileparts(source_file);
-

The NWB file will be saved in the output directory indicated by outdir

General Information

nwb = NwbFile();
+
+

The NWB file will be saved in the output directory indicated by output_directory +

+

General Information

+
nwb = NwbFile();
 nwb.identifier = identifier;
 nwb.general_source_script = source_script;
 nwb.general_source_script_file_name = source_file;
@@ -112,24 +169,33 @@
     'covering the entire exposed skull, ',...
     'followed by a thin layer of clear nail polish (Electron Microscopy Sciences, 72180).'];
 nwb.session_description = sprintf('Animal `%s` on Session `%s`', animal, session);
-

All properties with the prefix general contain context for the entire experiment such as lab, institution, and experimentors. For session-delimited data from the same experiment, these fields will all be the same. Note that most of this information was pulled from the publishing paper and not from any of the downloadable data.

The only required property is the identifier, which distinguishes one session from another within an experiment. In our case, the ALM-3 data uses a combination of session date and animal ID.

The ALM-3 File Structure

Each ALM-3 session has three files: a metadata .mat file describing the experiment, a data structures .mat file containing analyzed data, and a raw .tar archive containing multiple raw electrophysiology data separated by trial as .mat files. All files will be merged into a single NWB file.

Metadata

ALM-3 Metadata contains information about the reference times, experimental context, methodology, as well as details of the electrophysiology, optophysiology, and behavioral portions of the experiment. A vast majority of these details are placed in general prefixed properties in NWB.

fprintf('Processing Meta Data from `%s`\n', metadata_loc);
+
+

All properties with the prefix general contain context for the entire experiment such as lab, institution, and experimentors. For session-delimited data from the same experiment, these fields will all be the same. Note that most of this information was pulled from the publishing paper and not from any of the downloadable data.

+

The only required property is the identifier, which distinguishes one session from another within an experiment. In our case, the ALM-3 data uses a combination of session date and animal ID.

+

The ALM-3 File Structure

+

Each ALM-3 session has three files: a metadata .mat file describing the experiment, a data structures .mat file containing analyzed data, and a raw .tar archive containing multiple raw electrophysiology data separated by trials as .mat files. All files will be merged into a single NWB file.

+

Metadata

+

ALM-3 Metadata contains information about the reference times, experimental context, methodology, as well as details of the electrophysiology, optophysiology, and behavioral portions of the experiment. A vast majority of these details are placed in general prefixed properties in NWB.

+
fprintf('Processing Meta Data from `%s`\n', metadata_loc);
 loaded = load(metadata_loc, 'meta_data');
 meta = loaded.meta_data;
 
-%experiment-specific treatment for animals with the ReaChR gene modification
+% Experiment-specific treatment for animals with the ReaChR gene modification
 isreachr = any(cell2mat(strfind(meta.animalGeneModification, 'ReaChR')));
 
-%sessions are separated by date of experiment.
+% Sessions are separated by date of experiment.
 nwb.general_session_id = meta.dateOfExperiment;
 
-%ALM-3 data start time is equivalent to the reference time.
+% ALM-3 data start time is equivalent to the reference time.
 nwb.session_start_time = datetime([meta.dateOfExperiment meta.timeOfExperiment],...
-    'InputFormat', 'yyyyMMddHHmmss');
+    'InputFormat', 'yyyyMMddHHmmss', 'TimeZone', 'America/New_York'); % Eastern Daylight Time
 nwb.timestamps_reference_time = nwb.session_start_time;
 
 nwb.general_experimenter = strjoin(meta.experimenters, ', ');
-
Processing Meta Data from `data\metadata\meta_data_ANM255201_20141124.mat`
-
nwb.general_subject = types.core.Subject(...
+
+
Processing Meta Data from `data/metadata/meta_data_ANM255201_20141124.mat`
+
+
nwb.general_subject = types.core.Subject(...
     'species', meta.species{1}, ...
     'subject_id', meta.animalID{1}(1,:), ... %weird case with duplicate Animal ID
     'sex', meta.sex, ...
@@ -137,9 +203,12 @@
     'description', [...
         'Whisker Config: ' strjoin(meta.whiskerConfig, ', ') newline...
         'Animal Source: ' strjoin(meta.animalSource, ', ')]);
-

Ideally, if a raw data field does not correspond directly to a NWB field, one would create their own using a custom NWB extension class. However, since these fields are mostly experimental annotations, we instead pack the extra values into the description field as a string.

%The formatStruct function simply prints the field and values given the struct.
-%An optional cell array of field names specifies whitelist of fields to print.  This
-%function is provided with this script in the tutorials directory.
+
+

Ideally, if a raw data field does not correspond directly to a NWB field, one would create their own using a custom NWB extension class. However, since these fields are mostly experimental annotations, we instead pack the extra values into the description field as a string.

+
+% The formatStruct function simply prints the field and values given the struct.
+% An optional cell array of field names specifies whitelist of fields to print.
+% This function is provided with this script in the tutorials directory.
 nwb.general_subject.genotype = formatStruct(...
     meta, ...
     {'animalStrain'; 'animalGeneModification'; 'animalGeneCopy';...
@@ -163,8 +232,8 @@
     newline, ...
     formatStruct(meta.behavior, {'task_keyword'})];
 
-% Miscellaneous collection information from ALM-3 that didn't quite fit any NWB properties
-% are stored in general/data_collection.
+% Miscellaneous collection information from ALM-3 that didn't quite fit any NWB
+% properties are stored in general/data_collection.
 nwb.general_data_collection = formatStruct(meta.extracellular,...
     {'extracellularDataType';'cellType';'identificationMethod';'amplifierRolloff';...
     'spikeSorting';'ADunit'});
@@ -182,7 +251,8 @@
     laserName = 'laser-473nm (Laser Quantum, Gem 473)';
 end
 nwb.general_devices.set(laserName, types.core.Device());
-
structDesc = {'recordingCoordinates';'recordingMarker';'recordingType';'penetrationN';...
+
+
structDesc = {'recordingCoordinates';'recordingMarker';'recordingType';'penetrationN';...
     'groundCoordinates'};
 if ~isempty(meta.extracellular.referenceCoordinates)
     structDesc{end+1} = 'referenceCoordinates';
@@ -193,10 +263,12 @@
     'location', recordingLocation,...
     'device', types.untyped.SoftLink(['/general/devices/' deviceName]));
 nwb.general_extracellular_ephys.set(deviceName, egroup);
-

The NWB ElectrodeGroup object stores experimental information regarding a group of probes. Doing so requires a SoftLink to the probe specified under general_devices. SoftLink objects are direct maps to HDF5 Soft Links on export, and thus, require a true HDF5 path.

% you can specify column names and values as key-value arguments in the DynamicTable
+
+

The NWB ElectrodeGroup object stores experimental information regarding a group of probes. Doing so requires a SoftLink to the probe specified under general_devices. SoftLink objects are direct maps to HDF5 Soft Links on export, and thus, require a true HDF5 path.

+
+% You can specify column names and values as key-value arguments in the DynamicTable
 % constructor.
-dtColNames = {'x', 'y', 'z', 'imp', 'location', 'filtering','group',...
-    'group_name'};
+dtColNames = {'x', 'y', 'z', 'imp', 'location', 'filtering','group', 'group_name'};
 dynTable = types.hdmf_common.DynamicTable(...
     'colnames', dtColNames,...
     'description', 'Electrodes',...
@@ -211,13 +283,13 @@
     'group', types.hdmf_common.VectorData('description', 'Reference to the ElectrodeGroup this electrode is a part of.'),...
     'group_name', types.hdmf_common.VectorData('description', 'Name of the ElectrodeGroup this electrode is a part of.'));
 
-%raw HDF5 path to the above electrode group. Referenced by
+% Raw HDF5 path to the above electrode group. Referenced by
 % the general/extracellular_ephys Dynamic Table
 egroupPath = ['/general/extracellular_ephys/' deviceName];
 eGroupReference = types.untyped.ObjectView(egroupPath);
 for i = 1:length(meta.extracellular.siteLocations)
     location = meta.extracellular.siteLocations{i};
-    % add each row in the dynamic table. The `id` column is populated
+    % Add each row in the dynamic table. The `id` column is populated
     % dynamically.
     dynTable.addRow(...
         'x', location(1), 'y', location(2), 'z', location(3),...
@@ -227,8 +299,13 @@
         'group', eGroupReference,...
         'group_name', probetype);
 end
-

The group column in the Dynamic Table contains an ObjectView to the previously created ElectrodeGroup. An ObjectView can be best thought of as a direct pointer to another typed object. It also directly maps to a HDF5 Object Reference, thus the HDF5 path requirement. ObjectViews are slightly different from SoftLinks in that they can be stored in datasets (data columns, tables, and data fields in NWBData objects).

nwb.general_extracellular_ephys_electrodes = dynTable;
-

The electrodes property in extracellular_ephys is a special keyword in NWB that must be paired with a Dynamic Table. These are tables which can have an unbounded number of columns and rows, each as their own dataset. With the exception of the id column, all other columns must be VectorData or VectorIndex objects. The id column, meanwhile, must be an ElementIdentifiers object. The names of all used columns are specified in the in the colnames property as a cell array of strings.

% general/optogenetics/photostim
+
+

The group column in the Dynamic Table contains an ObjectView to the previously created ElectrodeGroup. An ObjectView can be best thought of as a direct pointer to another typed object. It also directly maps to a HDF5 Object Reference, thus the HDF5 path requirement. ObjectViews are slightly different from SoftLinks in that they can be stored in datasets (data columns, tables, and data fields in NWBData objects).

+
nwb.general_extracellular_ephys_electrodes = dynTable;
+
+

The electrodes property in extracellular_ephys is a special keyword in NWB that must be paired with a Dynamic Table. These are tables which can have an unbounded number of columns and rows, each as their own dataset. With the exception of the id column, all other columns must be VectorData or VectorIndex objects. The id column, meanwhile, must be an ElementIdentifiers object. The names of all used columns are specified in the in the colnames property as a cell array of strings.

+
+% general/optogenetics/photostim
 nwb.general_optogenetics.set('photostim', ...
     types.core.OptogeneticStimulusSite(...
     'excitation_lambda', meta.photostim.photostimWavelength{1}, ...
@@ -236,7 +313,13 @@
     'device', types.untyped.SoftLink(['/general/devices/' laserName]), ...
     'description', formatStruct(meta.photostim, {...
     'stimulationMethod';'photostimCoordinates';'identificationMethod'})));
-

Analysis Data Structure

The ALM-3 data structures .mat file contains analyzed spike data, trial-specific parameters, and behavioral analysis data.

Hashes

ALM-3 stores its data structures in the form of hashes which are essentially the same as python's dictionaries or MATLAB's maps but where the keys and values are stored under separate struct fields. Getting a hashed value from a key involves retrieving the array index that the key is in and applying it to the parallel array in the values field.

You can find more information about hashes and how they're used on the ALM-3 about page.

fprintf('Processing Data Structure `%s`\n', datastructure_loc);
+
+

Analysis Data Structure

+

The ALM-3 data structures .mat file contains analyzed spike data, trial-specific parameters, and behavioral analysis data.

+

Hashes

+

ALM-3 stores its data structures in the form of hashes which are essentially the same as python's dictionaries or MATLAB's maps but where the keys and values are stored under separate struct fields. Getting a hashed value from a key involves retrieving the array index that the key is in and applying it to the parallel array in the values field.

+

You can find more information about hashes and how they're used on the ALM-3 about page.

+
fprintf('Processing Data Structure `%s`\n', datastructure_loc);
 loaded = load(datastructure_loc, 'obj');
 data = loaded.obj;
 
@@ -244,11 +327,14 @@
 % may not be in trial order.
 % Trial timeseries will be a compound type under intervals/trials.
 trial_timeseries = cell(size(data.trialIds));
-
Processing Data Structure `data\data_structure_files\data_structure_ANM255201_20141124.mat`
-

NWB comes with default support for trial-based data. These must be TimeIntervals that are placed in the intervals property. Note that trials is a special keyword that is required for PyNWB compatibility.

ephus = data.timeSeriesArrayHash.value{1};
+
+
Processing Data Structure `data/data_structure_files/data_structure_ANM255201_20141124.mat`
+
+

NWB comes with default support for trial-based data. These must be TimeIntervals that are placed in the intervals property. Note that trials is a special keyword that is required for PyNWB compatibility.

+
ephus = data.timeSeriesArrayHash.value{1};
 ephusUnit = data.timeUnitNames{data.timeUnitIds(ephus.timeUnit)};
 
-% lick direction and timestamps trace
+% Lick direction and timestamps trace
 tsIdx = strcmp(ephus.idStr, 'lick_trace');
 bts = types.core.BehavioralTimeSeries();
 
@@ -262,7 +348,7 @@
 nwb.acquisition.set('lick_trace', bts);
 bts_ref = types.untyped.ObjectView('/acquisition/lick_trace/lick_trace_ts');
 
-% acousto-optic modulator input trace
+% Acousto-optic modulator input trace
 tsIdx = strcmp(ephus.idStr, 'aom_input_trace');
 ts = types.core.TimeSeries(...
     'data', ephus.valueMatrix(:,tsIdx), ...
@@ -273,11 +359,11 @@
 nwb.stimulus_presentation.set('aom_input_trace', ts);
 ts_ref = types.untyped.ObjectView('/stimulus/presentation/aom_input_trace');
 
-% laser power
+% Laser power
 tsIdx = strcmp(ephus.idStr, 'laser_power');
 ots = types.core.OptogeneticSeries(...
-    'data', ephus.valueMatrix(:,tsIdx), ...
-    'data_unit', 'mW', ...
+    'data', ephus.valueMatrix(:, tsIdx), ...
+    'data_conversion', 1e-3, ... % data is stored in mW, data unit for OptogeneticSeries is watts
     'description', ephus.idStrDetailed{tsIdx}, ...
     'timestamps', ephus.time, ...
     'timestamps_unit', ephusUnit, ...
@@ -285,7 +371,7 @@
 nwb.stimulus_presentation.set('laser_power', ots);
 ots_ref = types.untyped.ObjectView('/stimulus/presentation/laser_power');
 
-% append trials timeseries references in order
+% Append trials timeseries references in order
 [ephus_trials, ~, trials_to_data] = unique(ephus.trial);
 for i=1:length(ephus_trials)
     i_loc = i == trials_to_data;
@@ -298,36 +384,48 @@
         ts_ref  int64(t_start) int64(t_count);...
         ots_ref int64(t_start) int64(t_count)};
 end
-

The timeseries property of the TimeIntervals object is an example of a compound data type. These types are essentially tables of data in HDF5 and can be represented by a MATLAB table, an array of structs, or a struct of arrays. Beware: validation of column lengths here is not guaranteed by the type checker until export.

VectorIndex objects index into a larger VectorData column. The object that is being referenced is indicated by the target property, which uses an ObjectView. Each element in the VectorIndex marks the last element in the corresponding vector data object for the VectorIndex row. Thus, the starting index for this row would be the previous index + 1. Note that these indices must be 0-indexed for compatibility with pynwb. You can see this in effect with the timeseries property which is indexed by the timeseries_index property.

Though TimeIntervals is a subclass of the DynamicTable type, we opt for populating the Dynamic Table data by column instead of using `addRow` here because of how the data is formatted. DynamicTable is flexible enough to accomodate both styles of data conversion.

trials_epoch = types.core.TimeIntervals(...
-    'start_time', types.hdmf_common.VectorData('data', data.trialStartTimes,...
-        'description', 'Start time of epoch, in seconds.'),...
-    'colnames', [data.trialTypeStr; data.trialPropertiesHash.keyNames .';...
-        {'start_time'; 'stop_time'; 'acquisition'; 'timeseries'}],...
+
+

The timeseries property of the TimeIntervals object is an example of a compound data type. These types are essentially tables of data in HDF5 and can be represented by a MATLAB table, an array of structs, or a struct of arrays. Beware: validation of column lengths here is not guaranteed by the type checker until export.

+

+VectorIndex objects index into a larger VectorData column. The object that is being referenced is indicated by the target property, which uses an ObjectView. Each element in the VectorIndex marks the last element in the corresponding vector data object for the VectorIndex row. Thus, the starting index for this row would be the previous index + 1. Note that these indices must be 0-indexed for compatibility with pynwb. You can see this in effect with the timeseries property which is indexed by the timeseries_index property.

+

Though TimeIntervals is a subclass of the DynamicTable type, we opt for populating the Dynamic Table data by column instead of using `addRow` here because of how the data is formatted. DynamicTable is flexible enough to accomodate both styles of data conversion.

+
trials_epoch = types.core.TimeIntervals(...
+    'colnames', {'start_time'}, ...
     'description', 'trial data and properties', ...
-    'id', types.hdmf_common.ElementIdentifiers('data', data.trialIds),...
-    'timeseries', types.hdmf_common.VectorData('description', 'Index into timeseries Data'),...
-    'timeseries_index', types.hdmf_common.VectorIndex(...
-    'description', 'Index into Timeseries VectorData',...
-    'target', types.untyped.ObjectView('/intervals/trials/timeseries')));
+    'start_time', types.hdmf_common.VectorData(...
+        'data', data.trialStartTimes', ...
+        'description', 'Start time of epoch, in seconds.'), ...
+    'id', types.hdmf_common.ElementIdentifiers(...
+        'data', data.trialIds' ) );
 
+% Add columns for the trial types
 for i=1:length(data.trialTypeStr)
-    trials_epoch.vectordata.set(data.trialTypeStr{i}, ...
-        types.hdmf_common.VectorData('data', data.trialTypeMat(i,:),...
-            'description', data.trialTypeStr{i}));
+    columnName = data.trialTypeStr{i};
+    columnData = types.hdmf_common.VectorData(...
+         'data', data.trialTypeMat(i,:)', ... % transpose for column vector
+         'description', data.trialTypeStr{i});
+    trials_epoch.addColumn( columnName, columnData )
 end
 
+% Add columns for the trial properties
 for i=1:length(data.trialPropertiesHash.keyNames)
+    columnName = data.trialPropertiesHash.keyNames{i};
     descr = data.trialPropertiesHash.descr{i};
     if iscellstr(descr)
         descr = strjoin(descr, newline);
     end
-    trials_epoch.vectordata.set(data.trialPropertiesHash.keyNames{i}, ...
-        types.hdmf_common.VectorData(...
-        'data', data.trialPropertiesHash.value{i}, ...
-        'description', descr));
+    columnData = types.hdmf_common.VectorData(...
+         'data', data.trialPropertiesHash.value{i},...
+         'description', data.trialTypeStr{i});
+    trials_epoch.addColumn( columnName, columnData )
 end
+
 nwb.intervals_trials = trials_epoch;
-

Ephus spike data is separated into units which directly maps to the NWB property of the same name. Each such unit contains a group of analysed waveforms and spike times, all linked to a different subset of trials IDs.

The waveforms are placed in the analysis Set and are paired with their unit name ('unitx' where 'x' is some unit ID).

Trial IDs, wherever they are used, are placed in a relevent control property in the data object and will indicate what data is associated with what trial as defined in trials's id column.

nwb.units = types.core.Units('colnames',...
+
+

Ephus spike data is separated into units which directly maps to the NWB property of the same name. Each such unit contains a group of analysed waveforms and spike times, all linked to a different subset of trials IDs.

+

The waveforms are placed in the analysis Set and are paired with their unit name ('unitx' where 'x' is some unit ID).

+

Trial IDs, wherever they are used, are placed in a relevent control property in the data object and will indicate what data is associated with what trial as defined in trials's id column.

+
nwb.units = types.core.Units('colnames',...
     {'spike_times', 'trials', 'waveforms'},...
     'description', 'Analysed Spike Events');
 esHash = data.eventSeriesHash;
@@ -338,7 +436,7 @@
 
 for i=1:length(ids)
     esData = esHash.value{i};
-    % add trials ID reference
+    % Add trials ID reference
 
     good_trials_mask = ismember(esData.eventTrials, nwb.intervals_trials.id.data);
     eventTrials = esData.eventTrials(good_trials_mask);
@@ -346,7 +444,7 @@
     waveforms = esData.waveforms(good_trials_mask,:);
     channel = esData.channel(good_trials_mask);
 
-    % add waveform data to "unitx" and associate with "waveform" column as ObjectView.
+    % Add waveform data to "unitx" and associate with "waveform" column as ObjectView.
     ses = types.core.SpikeEventSeries(...
         'control', ids(i),...
         'control_description', 'Units Table ID',...
@@ -365,10 +463,9 @@
     end
     nwb.analysis.set(ses_name, ses);
     nwb.units.addRow(...
-        'id', ids(i), 'trials', eventTrials,'spike_times', eventTimes, 'waveforms', ses_ref,...
-        'tablepath', '/units');
+        'id', ids(i), 'trials', eventTrials, 'spike_times', eventTimes, 'waveforms', ses_ref);
 
-    %add this timeseries into the trials table as well.
+    % Add this timeseries into the trials table as well.
     [s_trials, ~, trials_to_data] = unique(eventTrials);
     for j=1:length(s_trials)
         trial = s_trials(j);
@@ -379,10 +476,14 @@
         trial_timeseries{trial}(end+1, :) = {ses_ref int64(t_start) int64(t_count)};
     end
 end
-

To better understand how spike_times_index and spike_times map to each other, refer to this diagram from the Extracellular Electrophysiology Tutorial.

Raw Acquisition Data

Each ALM-3 session is associated with a large number of raw voltage data grouped by trial ID. To map this data to NWB, each trial is created as its own ElectricalSeries object under the name 'trial n' where 'n' is the trial ID. The trials are then linked to the trials dynamic table for easy referencing.

fprintf('Processing Raw Acquisition Data from `%s` (will take a while)\n', rawdata_loc);
-untarLoc = fullfile(pwd, identifier);
-if 7 ~= exist(untarLoc, 'dir')
-    untar(rawdata_loc, pwd);
+
+

To better understand how spike_times_index and spike_times map to each other, refer to this diagram from the Extracellular Electrophysiology Tutorial.

+

Raw Acquisition Data

+

Each ALM-3 session is associated with a large number of raw voltage data grouped by trial ID. To map this data to NWB, each trial is created as its own ElectricalSeries object under the name 'trial n' where 'n' is the trial ID. The trials are then linked to the trials dynamic table for easy referencing.

+
fprintf('Processing Raw Acquisition Data from `%s` (will take a while)\n', rawdata_loc);
+untarLoc = strrep(rawdata_loc, '.tar', '');
+if ~isfolder(untarLoc)
+    untar(rawdata_loc, fileparts(rawdata_loc));
 end
 
 rawfiles = dir(untarLoc);
@@ -394,8 +495,8 @@
     'table',types.untyped.ObjectView('/general/extracellular_ephys/electrodes'),...
     'data',(1:nrows) - 1);
 objrefs = cell(size(rawfiles));
-trials_idx = nwb.intervals_trials;
-endTimestamps = trials_idx.start_time.data;
+
+endTimestamps = trials_epoch.start_time.data;
 for i=1:length(rawfiles)
     tnumstr = regexp(rawfiles{i}, '_trial_(\d+)\.mat$', 'tokens', 'once');
     tnumstr = tnumstr{1};
@@ -418,34 +519,57 @@
     objrefs{tnum} = types.untyped.ObjectView(['/acquisition/' tname]);
 end
 
-%Link to the raw data by adding the acquisition column with ObjectViews
-%to the data
+% Link to the raw data by adding the acquisition column with ObjectViews
+% to the data
 emptyrefs = cellfun('isempty', objrefs);
 objrefs(emptyrefs) = {types.untyped.ObjectView('')};
-trials_idx.vectordata.set('acquisition', types.hdmf_common.VectorData(...
+
+trials_epoch.addColumn('acquisition', types.hdmf_common.VectorData(...
     'description', 'soft link to acquisition data for this trial',...
-    'data', [objrefs{:}]));
-trials_idx.stop_time = types.hdmf_common.VectorData(...
-    'data', endTimestamps,...
-    'description', 'the end time of each trial');
-
Processing Raw Acquisition Data from `data\RawVoltageTraces\ANM255201_20141124.tar` (will take a while)
-

Export

%first, we'll format and store |trial_timeseries| into |intervals_trials|.
-% note that |timeseries_index| data is 0-indexed.
-ts_len = cellfun('size', trial_timeseries, 1);
-is_len_nonzero = ts_len > 0;
-ts_len_nonzero = ts_len(is_len_nonzero);
-nwb.intervals_trials.timeseries_index.data = cumsum(ts_len_nonzero);
-% intervals/trials/timeseries is a compound type so we use cell2table to
+    'data', [objrefs{:}]'));
+
+trials_epoch.stop_time = types.hdmf_common.VectorData(...
+     'data', endTimestamps',...
+     'description', 'the end time of each trial');
+trials_epoch.colnames{end+1} = 'stop_time';
+
+
Processing Raw Acquisition Data from `data/RawVoltageTraces/ANM255201_20141124.tar` (will take a while)
+
+

Add timeseries to trials_epoch

+

First, we'll format and store trial_timeseries into intervals_trials. note that timeseries_index data is 0-indexed.

+
ts_len = cellfun('size', trial_timeseries, 1);
+nwb.intervals_trials.timeseries_index = types.hdmf_common.VectorIndex(...
+    'description', 'Index into Timeseries VectorData', ...
+    'data', cumsum(ts_len)', ...
+    'target', types.untyped.ObjectView('/intervals/trials/timeseries') );
+
+% Intervals/trials/timeseries is a compound type so we use cell2table to
 % convert this 2-d cell array into a compatible table.
-nwb.intervals_trials.timeseries.data = cell2table(vertcat(trial_timeseries{is_len_nonzero}),...
+is_len_nonzero = ts_len > 0;
+trial_timeseries_table = cell2table(vertcat(trial_timeseries{is_len_nonzero}),...
     'VariableNames', {'timeseries', 'idx_start', 'count'});
-
-outDest = fullfile(outloc, [identifier '.nwb']);
-if 2 == exist(outDest, 'file')
-    delete(outDest);
+trial_timeseries_table = movevars(trial_timeseries_table, 'timeseries', 'After', 'count');
+
+interval_trials_timeseries = types.core.TimeSeriesReferenceVectorData(...
+    'description', 'Index into TimeSeries data', ...
+    'data', trial_timeseries_table);
+nwb.intervals_trials.timeseries = interval_trials_timeseries;
+nwb.intervals_trials.colnames{end+1} = 'timeseries';
+
+

Export

+
nwbFilePath = fullfile(output_directory, [identifier '.nwb']);
+if isfile(nwbFilePath)
+    delete(nwbFilePath);
 end
-nwbExport(nwb, outDest);
-
\ No newline at end of file +--> + + diff --git a/tutorials/html/intro.html b/tutorials/html/intro.html index ef1c1a90..9fb4c9db 100644 --- a/tutorials/html/intro.html +++ b/tutorials/html/intro.html @@ -1,35 +1,43 @@ -Introduction to MatNWB