diff --git a/+file/Attribute.m b/+file/Attribute.m index e465429b..3131d3ae 100644 --- a/+file/Attribute.m +++ b/+file/Attribute.m @@ -48,7 +48,7 @@ else obj.value = []; obj.readonly = false; - end + end if isKey(source, 'dims') obj.dimnames = source('dims'); diff --git a/+file/Dataset.m b/+file/Dataset.m index 77a2bbda..80b7e94c 100644 --- a/+file/Dataset.m +++ b/+file/Dataset.m @@ -6,6 +6,8 @@ dtype; isConstrainedSet; required; + value; + readonly; %determines whether value can be changed or not scalar; shape; dimnames; @@ -22,12 +24,15 @@ obj.type = ''; obj.dtype = 'any'; obj.required = true; + obj.value = []; + obj.readonly = false; obj.scalar = true; obj.definesType = false; obj.shape = {}; obj.dimnames = {}; obj.attributes = []; + if nargin < 1 return; @@ -42,6 +47,20 @@ if isKey(source, nameKey) obj.name = source(nameKey); end + + % Todo: same as for attribute, should consolidate + valueKey = 'value'; + defaultKey = 'default_value'; + if isKey(source, defaultKey) + obj.value = source(defaultKey); + obj.readonly = false; + elseif isKey(source, valueKey) + obj.value = source(valueKey); + obj.readonly = true; + else + obj.value = []; + obj.readonly = false; + end typeKeys = {'neurodata_type_def', 'data_type_def'}; parentKeys = {'neurodata_type_inc', 'data_type_inc'}; @@ -121,15 +140,19 @@ %constrained % error unless it defines the object. - - if isempty(obj.type) - error('You shouldn''t be calling getProps on an untyped dataset'); - end - - if obj.isConstrainedSet && ~obj.definesType - error('You shouldn''t be calling getProps on a constrained dataset'); - end - + + assert(... + ~isempty(obj.type), ... + 'NWB:Dataset:UnsupportedOperation', ... + 'The method `getProps` should not be called on an untyped dataset.' ... + ); + + assert( ... + ~obj.isConstrainedSet || obj.definesType, ... + 'NWB:Dataset:UnsupportedOperation', ... + 'The method `getProps` should not be called on constrained dataset.' ... + ); + if ~isempty(obj.dtype) props('data') = obj.dtype; end diff --git a/+file/Group.m b/+file/Group.m index d9fababe..00e62146 100644 --- a/+file/Group.m +++ b/+file/Group.m @@ -146,8 +146,14 @@ %should never happen if obj.isConstrainedSet && ~obj.definesType - error('getProps shouldn''t be called on a constrained set.'); + error('NWB:Group:UnsupportedOperation', ... + 'The method `getProps` should not be called on a constrained dataset.'); end + assert( ... + ~obj.isConstrainedSet || obj.definesType, ... + 'NWB:Group:UnsupportedOperation', ... + 'The method `getProps` should not be called on a constrained group.' ... + ); %datasets for i=1:length(obj.datasets) diff --git a/+file/cloneNwbFileClass.m b/+file/cloneNwbFileClass.m index c62b25d0..02771849 100644 --- a/+file/cloneNwbFileClass.m +++ b/+file/cloneNwbFileClass.m @@ -5,14 +5,21 @@ function cloneNwbFileClass(typeFileName, fullTypeName) nwbFilePath = which('NwbFile'); installPath = fileparts(nwbFilePath); -fileId = fopen(nwbFilePath); -text = strrep(char(fread(fileId) .'),... - 'NwbFile < types.core.NWBFile',... +nwbFileClassDef = fileread(nwbFilePath); + +% Update superclass name +updatedNwbFileClassDef = strrep(nwbFileClassDef, ... + 'NwbFile < types.core.NWBFile', ... sprintf('NwbFile < %s', fullTypeName)); -fclose(fileId); + +% Update call to superclass constructor +updatedNwbFileClassDef = strrep(updatedNwbFileClassDef, ... + 'obj = obj@types.core.NWBFile', ... + sprintf('obj = obj@%s', fullTypeName)); fileId = fopen(fullfile(installPath, [typeFileName '.m']), 'W'); -fwrite(fileId, text); +fwrite(fileId, updatedNwbFileClassDef); fclose(fileId); -end +rehash(); +end diff --git a/+file/fillClass.m b/+file/fillClass.m index 2f4c48eb..58723bfd 100644 --- a/+file/fillClass.m +++ b/+file/fillClass.m @@ -33,16 +33,22 @@ optional = [optional {propertyName}]; end - if isa(prop, 'file.Attribute') + if isa(prop, 'file.Attribute') || isa(prop, 'file.Dataset') if prop.readonly readonly = [readonly {propertyName}]; end if ~isempty(prop.value) - defaults = [defaults {propertyName}]; + if isa(prop, 'file.Attribute') + defaults = [defaults {propertyName}]; + else % file.Dataset + if isRequired || all(isPropertyRequired) + defaults = [defaults {propertyName}]; + end + end end - if ~isempty(prop.dependent) + if isa(prop, 'file.Attribute') && ~isempty(prop.dependent) %extract prefix parentName = strrep(propertyName, ['_' prop.name], ''); parent = classprops(parentName); diff --git a/+file/fillConstructor.m b/+file/fillConstructor.m index 124330d4..269829d2 100644 --- a/+file/fillConstructor.m +++ b/+file/fillConstructor.m @@ -100,7 +100,8 @@ end end - %warn for missing namespaces/property types + % warn for missing namespaces/property types + warningId = 'NWB:ClassGenerator:NamespaceOrTypeNotFound'; warnmsg = ['`' parentName '`''s constructor is unable to check for type `%1$s` ' ... 'because its namespace or type specifier could not be found. Try generating ' ... 'the namespace or class definition for type `%1$s` or fix its schema.']; @@ -109,7 +110,7 @@ invalidWarn = invalid & (dynamicConstrained | isAnonymousType) & ~isAttribute; invalidVars = varnames(invalidWarn); for i=1:length(invalidVars) - warning(warnmsg, invalidVars{i}); + warning(warningId, warnmsg, invalidVars{i}); end varnames = lower(varnames); diff --git a/+file/fillCustomConstraint.m b/+file/fillCustomConstraint.m index 5be7d9e8..afe34a52 100644 --- a/+file/fillCustomConstraint.m +++ b/+file/fillCustomConstraint.m @@ -12,9 +12,11 @@ customConstraintStr = sprintf( [... 'function checkCustomConstraint(obj)\n', ... ' assert(~isempty(obj.timestamps) || ~isempty(obj.starting_time), ...\n', ... + ' ''NWB:TimeSeries:TimeNotSpecified'', ...\n ', ... ' "''timestamps'' or ''starting_time'' must be specified")\n', ... ' if ~isempty(obj.starting_time)\n', ... ' assert(~isempty(obj.starting_time_rate), ...\n', ... + ' ''NWB:TimeSeries:RateMissing'', ...\n', ... ' "''starting_time_rate'' must be specified when ''starting_time'' is specified")\n', ... ' end\n', ... 'end'] ); diff --git a/+file/fillExport.m b/+file/fillExport.m index 9a4a242c..589fc0a3 100644 --- a/+file/fillExport.m +++ b/+file/fillExport.m @@ -21,9 +21,6 @@ for i = 1:length(propertyNames) propertyName = propertyNames{i}; pathProps = traverseRaw(propertyName, RawClass); - if isempty(pathProps) - keyboard; - end prop = pathProps{end}; elideProps = pathProps(1:end-1); elisions = cell(length(elideProps),1); @@ -84,11 +81,10 @@ path = {}; if isa(RawClass, 'file.Dataset') - if isempty(RawClass.attributes) - return; + if ~isempty(RawClass.attributes) + matchesAttribute = strcmp({RawClass.attributes.name}, propertyName); + path = {RawClass.attributes(matchesAttribute)}; end - matchesAttribute = strcmp({RawClass.attributes.name}, propertyName); - path = {RawClass.attributes(matchesAttribute)}; return; end diff --git a/+file/fillProps.m b/+file/fillProps.m index 114234f7..47faf216 100644 --- a/+file/fillProps.m +++ b/+file/fillProps.m @@ -52,28 +52,14 @@ typeStr = ['Table with columns: (', strjoin(columnDocStr, ', '), ')']; elseif isa(prop, 'file.Attribute') if isa(prop.dtype, 'containers.Map') - switch prop.dtype('reftype') - case 'region' - refTypeName = 'Region'; - case 'object' - refTypeName = 'Object'; - otherwise - error('Invalid reftype found whilst filling Constructor prop docs.'); - end - typeStr = sprintf('%s Reference to %s', refTypeName, prop.dtype('target_type')); + assertValidRefType(prop.dtype('reftype')) + typeStr = sprintf('%s reference to %s', capitalize(prop.dtype('reftype')), prop.dtype('target_type')); else typeStr = prop.dtype; end elseif isa(prop, 'containers.Map') - switch prop('reftype') - case 'region' - refTypeName = 'region'; - case 'object' - refTypeName = 'object'; - otherwise - error('Invalid reftype found whilst filling Constructor prop docs.'); - end - typeStr = sprintf('%s Reference to %s', refTypeName, prop('target_type')); + assertValidRefType(prop('reftype')) + typeStr = sprintf('%s reference to %s', capitalize(prop('reftype')), prop('target_type')); elseif isa(prop, 'file.interface.HasProps') typeStrCell = cell(size(prop)); for iProp = 1:length(typeStrCell) @@ -106,4 +92,20 @@ if nargin >= 2 propStr = [propName ' = ' propStr]; end +end + +function assertValidRefType(referenceType) + arguments + referenceType (1,1) string + end + assert( ismember(referenceType, ["region", "object"]), ... + 'NWB:ClassGenerator:InvalidRefType', ... + 'Invalid reftype found while filling description for class properties.') +end + +function word = capitalize(word) + arguments + word (1,:) char + end + word(1) = upper(word(1)); end \ No newline at end of file diff --git a/+file/fillValidators.m b/+file/fillValidators.m index caacadb2..ebd9cbf3 100644 --- a/+file/fillValidators.m +++ b/+file/fillValidators.m @@ -4,8 +4,8 @@ nm = propnames{i}; prop = props(nm); - - if isa(prop, 'file.Attribute') && prop.readonly && ~isempty(prop.value) + if (isa(prop, 'file.Attribute') || isa(prop, 'file.Dataset')) ... + && prop.readonly && ~isempty(prop.value) % Need to add a validator for inherited and readonly properties. In % the superclass these properties might not be read only and due to % inheritance its not possible to change property attributes @@ -155,34 +155,19 @@ fillDimensionValidation(prop.dtype, prop.shape)... }, newline); elseif prop.isConstrainedSet - try - fullname = namespaceReg.getFullClassName(prop.type); - catch ME - if ~endsWith(ME.identifier, 'Namespace:NotFound') - rethrow(ME); - end - - warning('NWB:Fill:Validators:NamespaceNotFound',... - ['Namespace could not be found for type `%s`.' ... - ' Skipping Validation for property `%s`.'], prop.type, name); - return; + fullname = getFullClassName(namespaceReg, prop.type, name); + if isempty(fullname) + return end + unitValidationStr = strjoin({unitValidationStr... ['constrained = { ''' fullname ''' };']... ['types.util.checkSet(''' name ''', struct(), constrained, val);']... }, newline); else - try - fullname = namespaceReg.getFullClassName(prop.type); - catch ME - if ~endsWith(ME.identifier, 'Namespace:NotFound') - rethrow(ME); - end - - warning('NWB:Fill:Validators:NamespaceNotFound',... - ['Namespace could not be found for type `%s`.' ... - ' Skipping Validation for property `%s`.'], prop.type, name); - return; + fullname = getFullClassName(namespaceReg, prop.type, name); + if isempty(fullname) + return end unitValidationStr = [unitValidationStr newline fillDtypeValidation(name, fullname)]; end @@ -240,7 +225,7 @@ ' return;'... 'end'... 'if ~istable(val) && ~isstruct(val) && ~isa(val, ''containers.Map'')'... - [' error(''Property `' name '` must be a table,struct, or containers.Map.'');']... + [' error(''NWB:Type:InvalidPropertyType'', ''Property `' name '` must be a table, struct, or containers.Map.'');']... 'end'... 'vprops = struct();'... }, newline); @@ -294,8 +279,7 @@ classNameSplit = strsplit(className, '.'); shortName = classNameSplit{end}; - - errorStr = sprintf( 'error(''Unable to set the ''''%s'''' property of class ''''%s'''' because it is read-only.'')', name, className, shortName); + errorStr = sprintf( 'error(''NWB:Type:ReadOnlyProperty'', ''Unable to set the ''''%s'''' property of class ''''%s'''' because it is read-only.'')', name, className, shortName); if ischar(value) condition = strjoin({ ... @@ -311,11 +295,27 @@ % Note: According to the documentation for Attribute specification keys % (https://schema-language.readthedocs.io/en/latest/description.html#sec-attributes-spec), % the above cases should be sufficient. - error('Unhandled case') + error('NWB:ClassGenerator:ReadOnlyValidatorNotImplemented', ... + 'Read-only validator is not implemented for values of type "%s"', class(value)) end fdvstr = strjoin({... condition, ... sprintf(' %s', errorStr), ... 'end' }, newline ); +end + +function fullname = getFullClassName(namespaceReg, propType, name) + fullname = ''; + try + fullname = namespaceReg.getFullClassName(propType); + catch ME + if ~endsWith(ME.identifier, 'Namespace:NotFound') + rethrow(ME); + end + + warning('NWB:Fill:Validators:NamespaceNotFound',... + ['Namespace could not be found for type `%s`.' ... + ' Skipping Validation for property `%s`.'], propType, name); + end end \ No newline at end of file diff --git a/+file/writeNamespace.m b/+file/writeNamespace.m index 95a77b65..00ab7aa9 100644 --- a/+file/writeNamespace.m +++ b/+file/writeNamespace.m @@ -4,7 +4,7 @@ function writeNamespace(namespaceName, saveDir) classFileDir = fullfile(saveDir, '+types', ['+' misc.str2validName(Namespace.name)]); -if 7 ~= exist(classFileDir, 'dir') +if ~isfolder(classFileDir) mkdir(classFileDir); end @@ -14,18 +14,14 @@ function writeNamespace(namespaceName, saveDir) className = classes{i}; [processed, classprops, inherited] = file.processClass(className, Namespace, pregenerated); - if isempty(processed) - continue; - end - - fid = fopen(fullfile(classFileDir, [className '.m']), 'W'); - try + if ~isempty(processed) + fid = fopen(fullfile(classFileDir, [className '.m']), 'W'); + % Create cleanup object to close to file in case the write operation fails. + fileCleanupObj = onCleanup(@(id) fclose(fid)); fwrite(fid, file.fillClass(className, Namespace, processed, ... classprops, inherited), 'char'); - catch ME - fclose(fid); - rethrow(ME) + else + % pass end - fclose(fid); end end \ No newline at end of file diff --git a/+io/+space/+shape/Point.m b/+io/+space/+shape/Point.m index 83d105e4..d7f94d7e 100644 --- a/+io/+space/+shape/Point.m +++ b/+io/+space/+shape/Point.m @@ -22,11 +22,9 @@ end function varargout = getMatlabIndex(obj) - if 0 == nargout - return; + if nargout > 0 + varargout{1} = obj.index; end - - varargout{1} = obj.index; end end end diff --git a/+io/+spec/+internal/readEmbeddedSpecLocation.m b/+io/+spec/+internal/readEmbeddedSpecLocation.m new file mode 100644 index 00000000..15a28908 --- /dev/null +++ b/+io/+spec/+internal/readEmbeddedSpecLocation.m @@ -0,0 +1,18 @@ +function specLocation = readEmbeddedSpecLocation(fid, specLocAttributeName) + arguments + fid (1,1) H5ML.id + specLocAttributeName (1,1) string = '.specloc' + end + + specLocation = ''; + try % Check .specloc + attributeId = H5A.open(fid, specLocAttributeName); + attributeCleanup = onCleanup(@(id) H5A.close(attributeId)); + referenceRawData = H5A.read(attributeId); + specLocation = H5R.get_name(attributeId, 'H5R_OBJECT', referenceRawData); + catch ME + if ~strcmp(ME.identifier, 'MATLAB:imagesci:hdf5lib:libraryError') + rethrow(ME); + end % don't error if the attribute doesn't exist. + end +end \ No newline at end of file diff --git a/+io/+spec/getEmbeddedSpecLocation.m b/+io/+spec/getEmbeddedSpecLocation.m new file mode 100644 index 00000000..32215d9c --- /dev/null +++ b/+io/+spec/getEmbeddedSpecLocation.m @@ -0,0 +1,16 @@ +function specLocation = getEmbeddedSpecLocation(filename, options) +% getEmbeddedSpecLocation - Get location of embedded specs in NWB file +% +% Note: Returns an empty string if the spec location does not exist +% +% See also io.spec.internal.readEmbeddedSpecLocation + + arguments + filename (1,1) string {matnwb.common.mustBeNwbFile} + options.SpecLocAttributeName (1,1) string = '.specloc' + end + + fid = H5F.open(filename); + fileCleanup = onCleanup(@(id) H5F.close(fid) ); + specLocation = io.spec.internal.readEmbeddedSpecLocation(fid, options.SpecLocAttributeName); +end diff --git a/+io/+spec/readEmbeddedSpecifications.m b/+io/+spec/readEmbeddedSpecifications.m new file mode 100644 index 00000000..919e2fc9 --- /dev/null +++ b/+io/+spec/readEmbeddedSpecifications.m @@ -0,0 +1,60 @@ +function specs = readEmbeddedSpecifications(filename, specLocation) +% readEmbeddedSpecifications - Read embedded specs from an NWB file +% +% specs = io.spec.readEmbeddedSpecifications(filename, specLocation) read +% embedded specs from the specLocation in an NWB file +% +% Inputs: +% filename (string) : Absolute path of an nwb file +% specLocation (string) : h5 path for the location of specs inside the NWB file +% +% Outputs +% specs cell: A cell array of structs with one element for each embedded +% specification. Each struct has two fields: +% +% - namespaceName (char) : Name of the namespace for a specification +% - namespaceText (char) : The namespace declaration for a specification +% - schemaMap (containers.Map): A set of schema specifications for the namespace + + arguments + filename (1,1) string {matnwb.common.mustBeNwbFile} + specLocation (1,1) string + end + + specInfo = h5info(filename, specLocation); + specs = deal( cell(size(specInfo.Groups)) ); + + fid = H5F.open(filename); + fileCleanup = onCleanup(@(id) H5F.close(fid) ); + + for iGroup = 1:length(specInfo.Groups) + location = specInfo.Groups(iGroup).Groups(1); + + namespaceName = split(specInfo.Groups(iGroup).Name, '/'); + namespaceName = namespaceName{end}; + + filenames = {location.Datasets.Name}; + if ~any(strcmp('namespace', filenames)) + warning('NWB:Read:GenerateSpec:CacheInvalid',... + 'Couldn''t find a `namespace` in namespace `%s`. Skipping cache generation.',... + namespaceName); + return; + end + sourceNames = {location.Datasets.Name}; + fileLocation = strcat(location.Name, '/', sourceNames); + schemaMap = containers.Map; + for iFileLocation = 1:length(fileLocation) + did = H5D.open(fid, fileLocation{iFileLocation}); + if strcmp('namespace', sourceNames{iFileLocation}) + namespaceText = H5D.read(did); + else + schemaMap(sourceNames{iFileLocation}) = H5D.read(did); + end + H5D.close(did); + end + + specs{iGroup}.namespaceName = namespaceName; + specs{iGroup}.namespaceText = namespaceText; + specs{iGroup}.schemaMap = schemaMap; + end +end diff --git a/+io/+spec/writeEmbeddedSpecifications.m b/+io/+spec/writeEmbeddedSpecifications.m new file mode 100644 index 00000000..23c2e269 --- /dev/null +++ b/+io/+spec/writeEmbeddedSpecifications.m @@ -0,0 +1,45 @@ +function writeEmbeddedSpecifications(fid, jsonSpecs) + specLocation = io.spec.internal.readEmbeddedSpecLocation(fid); + + if isempty(specLocation) + specLocation = '/specifications'; + io.writeGroup(fid, specLocation); + specView = types.untyped.ObjectView(specLocation); + io.writeAttribute(fid, '/.specloc', specView); + end + + for iJson = 1:length(jsonSpecs) + JsonDatum = jsonSpecs(iJson); + schemaNamespaceLocation = strjoin({specLocation, JsonDatum.name}, '/'); + namespaceExists = io.writeGroup(fid, schemaNamespaceLocation); + if namespaceExists + namespaceGroupId = H5G.open(fid, schemaNamespaceLocation); + names = getVersionNames(namespaceGroupId); + H5G.close(namespaceGroupId); + for iNames = 1:length(names) + H5L.delete(fid, [schemaNamespaceLocation '/' names{iNames}],... + 'H5P_DEFAULT'); + end + end + schemaLocation =... + strjoin({schemaNamespaceLocation, JsonDatum.version}, '/'); + io.writeGroup(fid, schemaLocation); + Json = JsonDatum.json; + schemeNames = keys(Json); + for iScheme = 1:length(schemeNames) + name = schemeNames{iScheme}; + path = [schemaLocation '/' name]; + io.writeDataset(fid, path, Json(name)); + end + end +end + +function versionNames = getVersionNames(namespaceGroupId) + [~, ~, versionNames] = H5L.iterate(namespaceGroupId,... + 'H5_INDEX_NAME', 'H5_ITER_NATIVE',... + 0, @removeGroups, {}); + function [status, versionNames] = removeGroups(~, name, versionNames) + versionNames{end+1} = name; + status = 0; + end +end diff --git a/+io/getBaseType.m b/+io/getBaseType.m index 680019a0..bd1a37f2 100644 --- a/+io/getBaseType.m +++ b/+io/getBaseType.m @@ -45,6 +45,7 @@ id = [prefix suffix]; else - error('Type `%s` is not a supported raw type', type); + error('NWB:IO:UnsupportedBaseType', ... + 'Type `%s` is not a supported raw type', type); end end \ No newline at end of file diff --git a/+io/parseCompound.m b/+io/parseCompound.m index 983224a4..35bf451c 100644 --- a/+io/parseCompound.m +++ b/+io/parseCompound.m @@ -51,6 +51,12 @@ logicalFieldName = fieldName(isLogicalType); for iFieldName = 1:length(logicalFieldName) name = logicalFieldName{iFieldName}; - data.(name) = strcmp('TRUE', data.(name)); + if isa(data.(name), 'int8') + data.(name) = logical(data.(name)); + elseif isa(data.(name), 'cell') && ismember(string(data.(name){1}), ["TRUE", "FALSE"]) + data.(name) = strcmp('TRUE', data.(name)); + else + error('NWB:ParseCompound:UnknownLogicalFormat', 'Could not resolve data of logical type') + end end end \ No newline at end of file diff --git a/+io/parseGroup.m b/+io/parseGroup.m index 46a87c6c..4574e8d7 100644 --- a/+io/parseGroup.m +++ b/+io/parseGroup.m @@ -76,9 +76,7 @@ parsed = NwbFile(kwargs{:}); else file.cloneNwbFileClass(Type.name, Type.typename); - rehash(); parsed = io.createParsedType(info.Name, Type.typename, kwargs{:}); - end return; @@ -116,14 +114,14 @@ if any(leads) %since set has been edited, we bubble up deletion of the old keys. subset = elide(pvalue, prop(leads), pvar); - elided = [elided; subset]; + elided = [elided; subset]; %#ok if pvalue.Count == 0 drop(i) = true; elseif any(strcmp(pvar, prop)) elided(pvar) = pvalue; drop(i) = true; else - warning('Unable to match property `%s` under prefix `%s`',... + warning('NWB:Parse:UnmatchedProperty', 'Unable to match property `%s` under prefix `%s`',... pvar, prefix); end end diff --git a/+io/resolvePath.m b/+io/resolvePath.m index 7ffcdbd7..0a1cb3f6 100644 --- a/+io/resolvePath.m +++ b/+io/resolvePath.m @@ -8,7 +8,6 @@ %process slash tokens o = nwb; -errmsg = 'Could not resolve path `%s`.'; while ~isempty(tokens) if isa(o, 'types.untyped.Set') [o, tokens] = resolveSet(o, tokens); @@ -18,7 +17,7 @@ [o, tokens] = resolveObj(o, tokens); end if isempty(o) - error(errmsg, path); + error('NWB:IO:UnresolvedPath', 'Could not resolve path `%s`.', path); end end end diff --git a/+io/timestamp2datetime.m b/+io/timestamp2datetime.m index e1db9198..f1fd165f 100644 --- a/+io/timestamp2datetime.m +++ b/+io/timestamp2datetime.m @@ -113,9 +113,8 @@ elseif ischar(timestamps) cells = {timestamps}; else - errorId = "NWB:timestamp2datetime:MustBeCharCellArrayOrString"; - errorMsg = ['timestamps must be a string, character array, ', ... - 'or cell array of strings/character arrays.']; - error(errorId, errorMsg); + error('NWB:timestamp2datetime:MustBeCharCellArrayOrString', ... + [ 'Timestamps must be a string, character array, ', ... + 'or cell array of strings/character arrays.' ]); end end diff --git a/+io/writeAttribute.m b/+io/writeAttribute.m index 93f5b1eb..f257d029 100644 --- a/+io/writeAttribute.m +++ b/+io/writeAttribute.m @@ -3,22 +3,22 @@ function writeAttribute(fid, fullpath, data, varargin) [tid, sid, data] = io.mapData2H5(fid, data, varargin{:}); [path, name] = io.pathParts(fullpath); if isempty(path) - path = '/'; %weird case if the property is in root + path = '/'; % Weird case if the property is in root end oid = H5O.open(fid, path, 'H5P_DEFAULT'); +h5CleanupObj = onCleanup(@(sid_, oid_) closeSpaceAndObject(sid, oid) ); + try id = H5A.create(oid, name, tid, sid, 'H5P_DEFAULT'); catch ME - %when a dataset is copied over, it also copies all attributes with it. - %So we have to open the Attribute for overwriting instead. + % When a dataset is copied over, it also copies all attributes with it. + % So we have to open the Attribute for overwriting instead. % this may also happen if the attribute is a reference if contains(ME.message, 'H5A__create attribute already exists')... || contains(ME.message, 'H5A_create attribute already exists') H5A.delete(oid, name); id = H5A.create(oid, name, tid, sid, 'H5P_DEFAULT'); else - H5O.close(oid); - H5S.close(sid); rethrow(ME); end end @@ -26,6 +26,9 @@ function writeAttribute(fid, fullpath, data, varargin) H5A.write(id, tid, data); end H5A.close(id); -H5S.close(sid); -H5O.close(oid); + +function closeSpaceAndObject(spaceId, objectId) + H5S.close(spaceId); + H5O.close(objectId); +end end \ No newline at end of file diff --git a/+io/writeCompound.m b/+io/writeCompound.m index 503b03b2..eca732db 100644 --- a/+io/writeCompound.m +++ b/+io/writeCompound.m @@ -1,4 +1,32 @@ function writeCompound(fid, fullpath, data, varargin) +% writeCompound - Write structured data to an HDF5 compound dataset. +% +% io.writeCompound(fid, fullpath, data, varargin) converts data (in table, +% struct, or containers.Map format) into a scalar struct, optimizes it for +% HDF5 storage, and writes it to an HDF5 compound dataset specified by fid +% and fullpath. +% +% Inputs: +% fid - File identifier for an open HDF5 file. +% fullpath - Full path within the HDF5 file where data will be stored. +% data - Data to write, provided as a table, struct, or containers.Map. +% varargin - Additional optional arguments. +% +% Functionality: +% - Converts input data into a scalar struct, rearranging fields and types as needed. +% - Detects data types, sizes, and handles compound HDF5 type creation. +% - Optimizes data for HDF5 by transposing column vectors and converting logicals. +% - Manages references to external data objects, regions, or untyped views. +% - Attempts to extend or overwrite existing datasets if a compound dataset at +% the specified path already exists. +% +% Notes: +% - If `fullpath` already exists in the HDF5 file, the function tries to adjust +% dimensions if the dataset is chunked, and issues a warning if resizing is not allowed. +% +% Example: +% io.writeCompound(fid, '/group/dataset', data); + %convert to a struct if istable(data) data = table2struct(data); @@ -67,7 +95,7 @@ function writeCompound(fid, fullpath, data, varargin) % convert logical values boolNames = names(strcmp(classes, 'logical')); for iField = 1:length(boolNames) - data.(boolNames{iField}) = strcmp('TRUE', data.(boolNames{iField})); + data.(boolNames{iField}) = int8(data.(boolNames{iField})); end %transpose numeric column arrays to row arrays @@ -103,8 +131,12 @@ function writeCompound(fid, fullpath, data, varargin) if is_chunked H5D.set_extent(did, dims); else - warning('Attempted to change size of continuous compound `%s`. Skipping.',... + warning('NWB:WriteCompund:ContinuousCompoundResize', ... + 'Attempted to change size of continuous compound `%s`. Skipping.', ... fullpath); + H5D.close(did); + H5S.close(sid); + return end end H5P.close(create_plist); diff --git a/+io/writeDataset.m b/+io/writeDataset.m index 7601864b..031b1281 100644 --- a/+io/writeDataset.m +++ b/+io/writeDataset.m @@ -26,7 +26,8 @@ function writeDataset(fid, fullpath, data, varargin) if ~is_same_dims && is_chunked H5D.set_extent(did, dims); elseif ~is_same_dims - warning('Attempted to change size of continuous dataset `%s`. Skipping.',... + warning('NWB:WriteDataset:ContinuousDatasetResize', ... + 'Attempted to change size of continuous dataset `%s`. Skipping.',... fullpath); H5S.close(sid); H5D.close(did); diff --git a/+matnwb/+common/findLatestSchemaVersion.m b/+matnwb/+common/findLatestSchemaVersion.m new file mode 100644 index 00000000..5ab078e2 --- /dev/null +++ b/+matnwb/+common/findLatestSchemaVersion.m @@ -0,0 +1,20 @@ +function latestVersion = findLatestSchemaVersion() +% findLatestSchemaVersion - Find latest available schema version. + + schemaListing = dir(fullfile(misc.getMatnwbDir(), 'nwb-schema')); + schemaVersionNumbers = setdiff({schemaListing.name}, {'.', '..'}); + + % Split each version number into major, minor, and patch components + versionComponents = cellfun(@(v) sscanf(v, '%d.%d.%d'), ... + schemaVersionNumbers, 'UniformOutput', false); + + % Convert the components into an array for easy comparison + versionMatrix = cat(2, versionComponents{:})'; + + % Find the row with the highest version number, weighting major + % and minor with factors of 6 and 3 respectively + [~, latestIndex] = max(versionMatrix * [1e6; 1e3; 1]); % Weight major, minor, patch + + % Return the latest version + latestVersion = schemaVersionNumbers{latestIndex}; +end \ No newline at end of file diff --git a/+matnwb/+common/mustBeNwbFile.m b/+matnwb/+common/mustBeNwbFile.m new file mode 100644 index 00000000..50b21777 --- /dev/null +++ b/+matnwb/+common/mustBeNwbFile.m @@ -0,0 +1,7 @@ +function mustBeNwbFile(filePath) +% mustBeNwbFile - Check that file path points to existing file with .nwb extension + arguments + filePath (1,1) string {mustBeFile} + end + assert(endsWith(filePath, ".nwb", "IgnoreCase", true)) +end \ No newline at end of file diff --git a/+matnwb/+common/mustBeValidSchemaVersion.m b/+matnwb/+common/mustBeValidSchemaVersion.m new file mode 100644 index 00000000..a44c9d60 --- /dev/null +++ b/+matnwb/+common/mustBeValidSchemaVersion.m @@ -0,0 +1,30 @@ +function mustBeValidSchemaVersion(versionNumber) +% mustBeValidSchemaVersion - Validate version number against available schemas + arguments + versionNumber (1,1) string + end + + persistent schemaVersionNumbers + + if versionNumber == "latest" + return % Should be resolved downstream. + end + + versionPattern = "^\d+\.\d+\.\d+$"; % i.e 2.0.0 + if isempty(regexp(versionNumber, versionPattern, 'once')) + error('NWB:VersionValidator:InvalidVersionNumber', ... + "Version number should formatted as ..") + end + + % Validate supported schema version + if isempty(schemaVersionNumbers) + schemaListing = dir(fullfile(misc.getMatnwbDir(), 'nwb-schema')); + schemaVersionNumbers = setdiff({schemaListing.name}, {'.', '..'}); + end + + if ~any(strcmp(versionNumber, schemaVersionNumbers)) + error('NWB:VersionValidator:UnsupportedSchemaVersion', ... + "The provided version number ('%s') is not supported by this version of MatNWB", ... + versionNumber) + end +end diff --git a/+misc/str2validName.m b/+misc/str2validName.m index 126e6c8c..dd93de15 100644 --- a/+misc/str2validName.m +++ b/+misc/str2validName.m @@ -3,6 +3,7 @@ % Converts the property name into a valid matlab property name. % propname: the offending property name % prefix: optional prefix to use instead of the ambiguous "dyn" + if ~iscell(propname) && isvarname(propname) valid = propname; return; @@ -12,7 +13,8 @@ prefix = 'dyn_'; else if ~isvarname(prefix) - warning('Prefix contains invalid variable characters. Reverting to "dyn"'); + warning('NWB:CreateValidPropertyName:InvalidPrefix', ... + 'Prefix contains invalid variable characters. Reverting to "dyn"'); prefix = 'dyn_'; end end diff --git a/+schemes/Namespace.m b/+schemes/Namespace.m index 271d77d3..72a8c7e0 100644 --- a/+schemes/Namespace.m +++ b/+schemes/Namespace.m @@ -1,8 +1,8 @@ classdef Namespace < handle properties (SetAccess=private) - name; %name of this namespace - dependencies; %parent namespaces by [Namespace] - registry; %maps name to class + name = '' % name of this namespace + dependencies = [] % parent namespaces by [Namespace] + registry = [] % maps name to class end properties (Constant) @@ -13,10 +13,7 @@ methods function obj = Namespace(name, deplist, source) if nargin == 0 - obj.name = ''; - obj.dependencies = []; - obj.registry = []; - return; + return end obj.name = strrep(name, '-', '_'); @@ -40,19 +37,22 @@ function parent = getParent(obj, classname) class = obj.getClass(classname); - if isempty(class) - error('Could not find class %s', classname); - end + + assert( ... + ~isempty(class), ... + 'NWB:Namespace:ClassNotFound', ... + 'Could not find class %s', classname ... + ); parent = []; hasParentKey = isKey(class, obj.PARENT_KEYS); if any(hasParentKey) parentName = class(obj.PARENT_KEYS{hasParentKey}); parent = obj.getClass(parentName); - assert(~isempty(parent),... - 'Parent %s for class %s doesn''t exist! Missing Dependency?',... - parentName,... - classname); + assert(~isempty(parent), ... + 'NWB:Namespace:ParentNotFound', ... + 'Parent %s for class %s doesn''t exist! Missing Dependency?', ... + parentName, classname); end end diff --git a/+spec/generate.m b/+spec/generate.m index b452f38d..f6dfcffb 100644 --- a/+spec/generate.m +++ b/+spec/generate.m @@ -9,15 +9,13 @@ for iInfo = 1:length(Namespaces) Namespaces(iInfo).namespace = namespace; - if ischar(schemaSource) + if ischar(schemaSource) || isstring(schemaSource) schema = containers.Map; Namespace = Namespaces(iInfo); for iFilenames = 1:length(Namespace.filenames) filenameStub = Namespace.filenames{iFilenames}; filename = [filenameStub '.yaml']; - fid = fopen(fullfile(schemaSource, filename)); - schema(filenameStub) = fread(fid, '*char') .'; - fclose(fid); + schema(filenameStub) = fileread(fullfile(schemaSource, filename)); end schema = spec.getSourceInfo(schema); else % map of schemas with their locations diff --git a/+spec/loadCache.m b/+spec/loadCache.m index e448735d..d13cbe45 100644 --- a/+spec/loadCache.m +++ b/+spec/loadCache.m @@ -1,40 +1,34 @@ -function Cache = loadCache(varargin) +function Cache = loadCache(namespaceName, options) %LOADCACHE Loads Raw Namespace Metadata from cached directory -saveDirMask = strcmp(varargin, 'savedir'); -if any(saveDirMask) - assert(~saveDirMask(end),... - 'NWB:LoadCache:InvalidParameter',... - 'savedir must be paired with the desired save directory.'); - saveDir = varargin{find(saveDirMask, 1, 'last') + 1}; - saveDirParametersMask = saveDirMask | circshift(saveDirMask, 1); - namespaceList = varargin(~saveDirParametersMask); -else - saveDir = misc.getMatnwbDir(); - namespaceList = varargin; +arguments (Repeating) + namespaceName (1,1) string end +arguments + options.savedir (1,1) string = misc.getMatnwbDir() +end + +Cache = struct.empty; % Initialize output + +namespaceList = string(namespaceName); % Get the actual location of the matnwb directory. -namespaceDir = fullfile(saveDir, 'namespaces'); +namespaceDir = fullfile(options.savedir, 'namespaces'); fileList = dir(namespaceDir); fileList = fileList(~[fileList.isdir]); -if nargin > 0 - assert(iscellstr(namespaceList), 'Input arguments must be a list of namespace names.'); +if ~isempty(namespaceList) names = {fileList.name}; - whitelistIdx = ismember(names, strcat(namespaceList, '.mat')); + whitelistIdx = ismember(names, strcat(namespaceList + ".mat")); fileList = fileList(whitelistIdx); end -if isempty(fileList) - Cache = struct([]); - return; +if ~isempty(fileList) + matPath = fullfile(namespaceDir, fileList(1).name); + Cache = load(matPath); % initialize Cache first + for iMat = 2:length(fileList) + matPath = fullfile(namespaceDir, fileList(iMat).name); + Cache(iMat) = load(matPath); + end end - -matPath = fullfile(namespaceDir, fileList(1).name); -Cache = load(matPath); % initialize Cache first -for iMat = 2:length(fileList) - matPath = fullfile(namespaceDir, fileList(iMat).name); - Cache(iMat) = load(matPath); end -end \ No newline at end of file diff --git a/+spec/saveCache.m b/+spec/saveCache.m index 17d10b6e..9ed16074 100644 --- a/+spec/saveCache.m +++ b/+spec/saveCache.m @@ -2,7 +2,7 @@ function saveCache(NamespaceInfo, saveDir) %SAVECACHE saves namespace info as .mat in `namespaces` directory namespacePath = fullfile(saveDir, 'namespaces'); -if 7 ~= exist(namespacePath, 'dir') +if ~isfolder(namespacePath) mkdir(namespacePath); end diff --git a/+tests/+fixtures/ResetGeneratedTypesFixture.m b/+tests/+fixtures/ResetGeneratedTypesFixture.m new file mode 100644 index 00000000..312881c7 --- /dev/null +++ b/+tests/+fixtures/ResetGeneratedTypesFixture.m @@ -0,0 +1,16 @@ +classdef ResetGeneratedTypesFixture < matlab.unittest.fixtures.Fixture + % ResetGeneratedTypesFixture - Fixture for resetting generated NWB classes. + % + % ResetGeneratedTypesFixture clears all the generated classes for NWB + % types from the matnwb folder. When the fixture is set up, all generated + % class files for NWB types are deleted. When the fixture is torn down, + % generateCore is called to regenerate the classes for NWB types of the + % latest NWB version + + methods + function setup(fixture) + fixture.addTeardown( @generateCore ) + nwbClearGenerated() + end + end +end diff --git a/+tests/+sanity/GenerationTest.m b/+tests/+sanity/GenerationTest.m index 1b30f54c..bf6442b9 100644 --- a/+tests/+sanity/GenerationTest.m +++ b/+tests/+sanity/GenerationTest.m @@ -5,8 +5,13 @@ methods (TestClassSetup) function setupClass(testCase) - rootPath = fullfile(fileparts(mfilename('fullpath')), '..', '..'); - testCase.applyFixture(matlab.unittest.fixtures.PathFixture(rootPath)); + import matlab.unittest.fixtures.PathFixture + import tests.fixtures.ResetGeneratedTypesFixture + + rootPath = tests.util.getProjectDirectory(); + testCase.applyFixture( PathFixture(rootPath) ); + + testCase.applyFixture( ResetGeneratedTypesFixture ); end end diff --git a/+tests/+system/NWBFileIOTest.m b/+tests/+system/NWBFileIOTest.m index 2473e34d..03a38255 100644 --- a/+tests/+system/NWBFileIOTest.m +++ b/+tests/+system/NWBFileIOTest.m @@ -40,6 +40,13 @@ function writeMultipleFiles(testCase) nwbExport([fileA, fileB], {fileNameA, fileNameB}); end + function testLoadAll(testCase) + fileName = ['MatNWB.' testCase.className() '.testLoadAll.nwb']; + nwbExport(testCase.file, fileName) + nwb = nwbRead(fileName, "ignorecache"); + nwb.loadAll() + end + function readWithStringArg(testCase) fileName = ['MatNWB.' testCase.className() '.testReadWithStringArg.nwb']; fileName = string(fileName); @@ -61,7 +68,9 @@ function readFileWithoutSpecLoc(testCase) testCase.deleteAttributeFromFile(fileName, '/', '.specloc') - nwbRead(fileName); + % When specloc is missing, the specifications are not added to + % the blacklist, so it will get passed as an input to NwbFile. + testCase.verifyError(@(fn) nwbRead(fileName), 'MATLAB:TooManyInputs'); end function readFileWithUnsupportedVersion(testCase) @@ -74,7 +83,26 @@ function readFileWithUnsupportedVersion(testCase) io.writeAttribute(file_id, '/nwb_version', '1.0.0') H5F.close(file_id); - nwbRead(fileName); + testCase.verifyWarning(@(fn) nwbRead(fileName), 'NWB:Read:UnsupportedSchema') + end + + function readFileWithUnsupportedVersionAndNoSpecloc(testCase) + import matlab.unittest.fixtures.SuppressedWarningsFixture + testCase.applyFixture(SuppressedWarningsFixture('NWB:Read:UnsupportedSchema')) + + fileName = ['MatNWB.' testCase.className() '.testReadFileWithUnsupportedVersionAndNoSpecloc.nwb']; + nwbExport(testCase.file, fileName) + + testCase.deleteAttributeFromFile(fileName, '/', '.specloc') + testCase.deleteAttributeFromFile(fileName, '/', 'nwb_version') + + file_id = H5F.open(fileName, 'H5F_ACC_RDWR', 'H5P_DEFAULT'); + io.writeAttribute(file_id, '/nwb_version', '1.0.0') + H5F.close(file_id); + + % When specloc is missing, the specifications are not added to + % the blacklist, so it will get passed as an input to NwbFile. + testCase.verifyError(@(fn) nwbRead(fileName), 'MATLAB:TooManyInputs'); end end diff --git a/+tests/+system/PyNWBIOTest.m b/+tests/+system/PyNWBIOTest.m index d08c2053..fc3ceb32 100644 --- a/+tests/+system/PyNWBIOTest.m +++ b/+tests/+system/PyNWBIOTest.m @@ -34,10 +34,10 @@ function testInFromPyNWB(testCase) methods function [status, cmdout] = runPyTest(testCase, testName) - setenv('PYTHONPATH', fileparts(mfilename('fullpath'))); + tests.util.addFolderToPythonPath( fileparts(mfilename('fullpath')) ) envPath = fullfile('+tests', 'env.mat'); - if 2 == exist(envPath, 'file') + if isfile(envPath) Env = load(envPath, '-mat'); if isfield(Env, 'pythonPath') pythonPath = Env.pythonPath; diff --git a/+tests/+unit/+common/ValidatorTest.m b/+tests/+unit/+common/ValidatorTest.m new file mode 100644 index 00000000..c4a6d9e5 --- /dev/null +++ b/+tests/+unit/+common/ValidatorTest.m @@ -0,0 +1,11 @@ +classdef ValidatorTest < matlab.unittest.TestCase +% ValidatorTest - Unit test for validators. + + methods (Test) + function testInvalidVersionNumberFormat(testCase) + testCase.verifyError( ... + @(vn) matnwb.common.mustBeValidSchemaVersion('1.0'), ... + 'NWB:VersionValidator:InvalidVersionNumber') + end + end +end \ No newline at end of file diff --git a/+tests/+unit/+file/CloneNwbTest.m b/+tests/+unit/+file/CloneNwbTest.m new file mode 100644 index 00000000..1aeb11ab --- /dev/null +++ b/+tests/+unit/+file/CloneNwbTest.m @@ -0,0 +1,52 @@ +classdef CloneNwbTest < matlab.unittest.TestCase + + methods (TestClassSetup) + function setupClass(testCase) + % Get the root path of the matnwb repository + rootPath = misc.getMatnwbDir(); + + % Use a fixture to add the folder to the search path + testCase.applyFixture(matlab.unittest.fixtures.PathFixture(rootPath)); + + % Use a fixture to create a temporary working directory + testCase.applyFixture(matlab.unittest.fixtures.WorkingFolderFixture); + + generateCore('savedir', '.') + end + end + + methods (Test) + function testCloneNwbFile(testCase) + % Create a superclass + superClassDef = [... + 'classdef MyCustomNwbFile < types.core.NWBFile\n', ... + ' methods\n', ... + ' function sayHello(obj)\n', ... + ' fprintf(''Hello %%s\\n'', obj.general_experimenter)\n', ... + ' end\n', ... + ' end\n', ... + 'end\n']; + fid = fopen('MyCustomNwbFile.m', 'w'); + fprintf(fid, superClassDef); + fclose(fid); + + currentClassDef = fileread(fullfile(misc.getMatnwbDir(), 'NwbFile.m')); + cleanupObj = onCleanup(@(classDefStr) restoreNwbFileClass(currentClassDef)); + + file.cloneNwbFileClass(fullfile('NwbFile'), 'MyCustomNwbFile') + + testCase.verifyTrue( isfile(fullfile(misc.getMatnwbDir(), 'NwbFile.m')) ) + + nwbFile = NwbFile(); + nwbFile.general_experimenter = "Mouse McMouse"; + C = evalc('nwbFile.sayHello()'); + testCase.verifyEqual(C, sprintf('Hello Mouse McMouse\n')); + end + end +end + +function restoreNwbFileClass(classDefStr) + fid = fopen( fullfile(misc.getMatnwbDir(), 'NwbFile.m'), 'wt' ); + fwrite(fid, classDefStr); + fclose(fid); +end \ No newline at end of file diff --git a/+tests/+unit/+io/IsBoolTest.m b/+tests/+unit/+io/IsBoolTest.m new file mode 100644 index 00000000..ada77b57 --- /dev/null +++ b/+tests/+unit/+io/IsBoolTest.m @@ -0,0 +1,10 @@ +classdef IsBoolTest < matlab.unittest.TestCase +% IsBoolTest - Unit test for io.isBool function. + + methods (Test) + function testInvalidInput(testCase) + testCase.verifyError(@(x) io.isBool("string"), ... + 'NWB:IO:IsBool:InvalidArgument') + end + end +end \ No newline at end of file diff --git a/+tests/+unit/+io/PathPartsTest.m b/+tests/+unit/+io/PathPartsTest.m new file mode 100644 index 00000000..7036a467 --- /dev/null +++ b/+tests/+unit/+io/PathPartsTest.m @@ -0,0 +1,24 @@ +classdef PathPartsTest < matlab.unittest.TestCase +% PathPartsTest - Unit test for io.pathParts function. + +% Todo: Function has confusing naming of outputs. Should be fixed + methods (Test) + function testRootPath(testCase) + [stem, root] = io.pathParts('root'); + testCase.verifyEqual(root, 'root') + testCase.verifyEmpty(stem) + end + + function testRootWithStemPath(testCase) + [stem, root] = io.pathParts('root/stem'); + testCase.verifyEqual(root, 'stem') + testCase.verifyEqual(stem, 'root') + end + + function testRootWithLongerStemPath(testCase) + [stem, root] = io.pathParts('root/stem/leaf'); + testCase.verifyEqual(root, 'leaf') + testCase.verifyEqual(stem, 'root/stem') + end + end +end \ No newline at end of file diff --git a/+tests/+unit/+io/SpaceTest.m b/+tests/+unit/+io/SpaceTest.m new file mode 100644 index 00000000..9395d7a8 --- /dev/null +++ b/+tests/+unit/+io/SpaceTest.m @@ -0,0 +1,25 @@ +classdef SpaceTest < matlab.unittest.TestCase +% SpaceTest - Unit test for io.space.* namespace. + + methods (Test) + function testEmptyInput(testCase) + shape = io.space.findShapes([]); + + testCase.verifyClass(shape, 'cell') + testCase.verifyLength(shape, 1) + testCase.verifyClass(shape{1}, 'io.space.shape.Block') + end + + function testSegmentSelection(testCase) + shape = io.space.segmentSelection({1:10}, [1,100]); + + testCase.verifyClass(shape, 'cell') + end + + function testPoint(testCase) + point = io.space.shape.Point(1); + + testCase.verifyEqual(point.getMatlabIndex, 1) + end + end +end \ No newline at end of file diff --git a/+tests/+unit/+io/TypeConversionTest.m b/+tests/+unit/+io/TypeConversionTest.m new file mode 100644 index 00000000..7438289c --- /dev/null +++ b/+tests/+unit/+io/TypeConversionTest.m @@ -0,0 +1,123 @@ +classdef TypeConversionTest < matlab.unittest.TestCase +% TypeConversionTest - Unit test for io.getMatType and io.getBaseType functions. + + properties (TestParameter) + matlabType = {... + 'types.untyped.ObjectView', ... + 'types.untyped.RegionView', ... + 'char', ... + 'double', ... + 'single', ... + 'logical', ... + 'int8', 'int16', 'int32', 'int64', ... + 'uint8', 'uint16', 'uint32', 'uint64', ... + } + end + + methods (Test) + + function testRoundTrip(testCase, matlabType) + tid = io.getBaseType(matlabType); + testCase.verifyEqual(io.getMatType(tid), matlabType); + end + + function testRoundTripCell(testCase) + tid = io.getBaseType('cell'); + testCase.verifyEqual(io.getMatType(tid), 'char'); + end + + function testRoundTripDatetime(testCase) + tid = io.getBaseType('datetime'); + testCase.verifyEqual(io.getMatType(tid), 'char'); + end + + function testRoundTripStruct(testCase) + testCase.verifyError(@(type)io.getBaseType('struct'), ... + 'NWB:IO:UnsupportedBaseType'); + end + + function testDoubleType(testCase) + tid = H5T.copy('H5T_IEEE_F64LE'); + testCase.verifyEqual(io.getMatType(tid), 'double'); + end + + function testSingleType(testCase) + tid = H5T.copy('H5T_IEEE_F32LE'); + testCase.verifyEqual(io.getMatType(tid), 'single'); + end + + function testUint8Type(testCase) + tid = H5T.copy('H5T_STD_U8LE'); + testCase.verifyEqual(io.getMatType(tid), 'uint8'); + end + + function testInt8Type(testCase) + tid = H5T.copy('H5T_STD_I8LE'); + testCase.verifyEqual(io.getMatType(tid), 'int8'); + end + + function testUint16Type(testCase) + tid = H5T.copy('H5T_STD_U16LE'); + testCase.verifyEqual(io.getMatType(tid), 'uint16'); + end + + function testInt16Type(testCase) + tid = H5T.copy('H5T_STD_I16LE'); + testCase.verifyEqual(io.getMatType(tid), 'int16'); + end + + function testUint32Type(testCase) + tid = H5T.copy('H5T_STD_U32LE'); + testCase.verifyEqual(io.getMatType(tid), 'uint32'); + end + + function testInt32Type(testCase) + tid = H5T.copy('H5T_STD_I32LE'); + testCase.verifyEqual(io.getMatType(tid), 'int32'); + end + + function testUint64Type(testCase) + tid = H5T.copy('H5T_STD_U64LE'); + testCase.verifyEqual(io.getMatType(tid), 'uint64'); + end + + function testInt64Type(testCase) + tid = H5T.copy('H5T_STD_I64LE'); + testCase.verifyEqual(io.getMatType(tid), 'int64'); + end + + function testCharType(testCase) + tid = io.getBaseType('char'); % Assuming io.getBaseType exists + testCase.verifyEqual(io.getMatType(tid), 'char'); + end + + function testObjectViewType(testCase) + tid = H5T.copy('H5T_STD_REF_OBJ'); + testCase.verifyEqual(io.getMatType(tid), 'types.untyped.ObjectView'); + end + + function testRegionViewType(testCase) + tid = H5T.copy('H5T_STD_REF_DSETREG'); + testCase.verifyEqual(io.getMatType(tid), 'types.untyped.RegionView'); + end + + function testLogicalType(testCase) + % Simulate or define a logical type ID for testing + tid = H5T.enum_create('H5T_NATIVE_INT'); + H5T.enum_insert(tid, 'FALSE', 0); + H5T.enum_insert(tid, 'TRUE', 1); + + testCase.verifyEqual(io.getMatType(tid), 'logical'); + end + + function testTableType(testCase) + tid = H5T.create('H5T_COMPOUND', 10); + testCase.verifyEqual(io.getMatType(tid), 'table'); + end + + function testUnknownType(testCase) + tid = H5T.copy('H5T_NATIVE_B64'); % Example of an unknown type + testCase.verifyError(@() io.getMatType(tid), 'NWB:IO:GetMatlabType:UnknownTypeID'); + end + end +end diff --git a/+tests/+unit/+io/WriteTest.m b/+tests/+unit/+io/WriteTest.m new file mode 100644 index 00000000..ead733b0 --- /dev/null +++ b/+tests/+unit/+io/WriteTest.m @@ -0,0 +1,143 @@ +classdef WriteTest < matlab.unittest.TestCase +% WriteTest - Unit test for io.write* functions. + + methods (TestMethodSetup) + function setup(testCase) + % Use a fixture to create a temporary working directory + testCase.applyFixture(matlab.unittest.fixtures.WorkingFolderFixture); + end + end + + methods (Test) + + function testWriteBooleanAttribute(testCase) + filename = 'temp_test_file.h5'; + fid = H5F.create(filename, 'H5F_ACC_TRUNC', 'H5P_DEFAULT', 'H5P_DEFAULT'); + fileCleanupObj = onCleanup(@(id) H5F.close(fid)); + + targetPath = '/'; + io.writeGroup(fid, targetPath) + + % Define target dataset path and create it in the HDF5 file + io.writeAttribute(fid, '/test', true); % First write to create the dataset + + % Read using h5readatt and confirm value + value = h5readatt(filename, '/', 'test'); + testCase.verifyTrue( strcmp(value, 'TRUE')) + + % Read using io.parseAttributes and confirm value + blackList = struct(... + 'attributes', {{'.specloc', 'object_id'}},... + 'groups', {{}}); + + S = h5info(filename); + [attributeProperties, ~] =... + io.parseAttributes(filename, S.Attributes, S.Name, blackList); + testCase.verifyTrue(attributeProperties('test')) + end + + function testWriteDatasetOverwrite(testCase) + + % Create a temporary HDF5 file + filename = 'temp_test_file.h5'; + fullPath = '/test_dataset'; + fid = H5F.create(filename, 'H5F_ACC_TRUNC', 'H5P_DEFAULT', 'H5P_DEFAULT'); + fileCleanupObj = onCleanup(@(id) H5F.close(fid)); + + % Initial data to write (e.g., 10x10) + initialData = rand(10, 10); + io.writeDataset(fid, fullPath, initialData); % First write to create the dataset + + % Attempt to write data of a different size (e.g., 5x5) + newData = rand(5, 5); + testCase.verifyWarning(... + @(varargin) io.writeDataset(fid, fullPath, newData), ... + 'NWB:WriteDataset:ContinuousDatasetResize' ... + ) + end + + function testWriteCompound(testCase) + % Create a temporary HDF5 file + filename = 'temp_test_file.h5'; + fullPath = '/test_dataset'; + fid = H5F.create(filename, 'H5F_ACC_TRUNC', 'H5P_DEFAULT', 'H5P_DEFAULT'); + fileCleanupObj = onCleanup(@(id) H5F.close(fid)); + + % Data to write + data = struct('a', {1,2}, 'b', {true, false}, 'c', {'test', 'new test'}); + io.writeCompound(fid, fullPath, data); % First write to create the dataset + + loadedData = h5read(filename, '/test_dataset'); + tempT = struct2table(loadedData); + % Booleans are loaded as int8, need to manually fix + tempT.b = logical( tempT.b ); + loadedData = table2struct(tempT)'; + testCase.verifyEqual(data, loadedData); + + % Use parse compound + did = H5D.open(fid, '/test_dataset'); + fsid = H5D.get_space(did); + loadedData = H5D.read(did, 'H5ML_DEFAULT', fsid, fsid,... + 'H5P_DEFAULT'); + parsedData = io.parseCompound(did, loadedData); + H5S.close(fsid); + H5D.close(did); + + parsedData = table2struct( struct2table(parsedData) )'; + testCase.verifyEqual(data, parsedData); + end + + function testWriteCompoundOverWrite(testCase) + + % Create a temporary HDF5 file + filename = 'temp_test_file.h5'; + fullPath = '/test_dataset'; + fid = H5F.create(filename, 'H5F_ACC_TRUNC', 'H5P_DEFAULT', 'H5P_DEFAULT'); + fileCleanupObj = onCleanup(@(id) H5F.close(fid)); + + % Initial data to write (e.g., 10x10) + initialData = struct('a', 1, 'b', true, 'c', 'test'); + io.writeCompound(fid, fullPath, initialData); % First write to create the dataset + + % Attempt to write data of a different size (e.g., 5x5) + newData = cat(1, initialData, struct('a', 2, 'b', false, 'c', 'new test')); + testCase.verifyWarning(... + @(varargin) io.writeCompound(fid, fullPath, newData), ... + 'NWB:WriteCompund:ContinuousCompoundResize' ... + ) + end + + function testWriteGroupWithPathThatEndsWithSlash(testCase) + filename = 'temp_test_file.h5'; + fullPath = '/test_group/'; + fid = H5F.create(filename, 'H5F_ACC_TRUNC', 'H5P_DEFAULT', 'H5P_DEFAULT'); + fileCleanupObj = onCleanup(@(id) H5F.close(fid)); + groupExists = io.writeGroup(fid, fullPath); + testCase.verifyFalse(groupExists) + + S = h5info(filename); + testCase.verifyEqual(S.Groups.Name, '/test_group') + end + + function testWriteSoftLink(testCase) + % Create a temporary HDF5 file + filename = 'temp_test_file.h5'; + fid = H5F.create(filename, 'H5F_ACC_TRUNC', 'H5P_DEFAULT', 'H5P_DEFAULT'); + fileCleanupObj = onCleanup(@(id) H5F.close(fid)); + + % Define target dataset path and create it in the HDF5 file + targetPath = '/dataset'; + initialData = rand(10, 10); + io.writeDataset(fid, targetPath, initialData); % First write to create the dataset + + % Define soft link name and use writeSoftLink to create it + linkName = 'soft_link_to_dataset'; + io.writeSoftLink(targetPath, fid, linkName); + + S = h5info(filename); + testCase.verifyTrue(strcmp(S.Links.Name, linkName)) + testCase.verifyTrue(strcmp(S.Links.Type, 'soft link')) + testCase.verifyTrue(strcmp(S.Links.Value{1}, targetPath)) + end + end +end \ No newline at end of file diff --git a/+tests/+unit/+types/FunctionTests.m b/+tests/+unit/+types/FunctionTests.m new file mode 100644 index 00000000..2adce77a --- /dev/null +++ b/+tests/+unit/+types/FunctionTests.m @@ -0,0 +1,127 @@ +classdef FunctionTests < matlab.unittest.TestCase +% FunctionTests - Unit test for functions in +types namespace. + methods (TestClassSetup) + function setupClass(testCase) + % Get the root path of the matnwb repository + rootPath = misc.getMatnwbDir(); + + % Use a fixture to add the folder to the search path + testCase.applyFixture(matlab.unittest.fixtures.PathFixture(rootPath)); + + % Use a fixture to create a temporary working directory + testCase.applyFixture(matlab.unittest.fixtures.WorkingFolderFixture); + + generateCore('savedir', '.') + end + end + methods (Test) + function testcheckConstraint(testCase) + pname = 'vectordata'; + name = 'col1'; + namedprops = struct('col1', 'double'); + constrained = {'types.hdmf_common.VectorData'}; + val = []; + + % Should pass with no error + types.util.checkConstraint(pname, name, namedprops, constrained, val) + + val = 10; + types.util.checkConstraint(pname, name, namedprops, constrained, val) + + val = {10}; + testCase.verifyError(... + @(varargin) types.util.checkConstraint(pname, name, namedprops, constrained, val), ... + 'NWB:TypeCorrection:InvalidConversion') + + % Verify that checkConstraint fails if constrained is not a + % char describing a type (test unexpected error) + constrained = {false}; + namedprops = struct.empty; + testCase.verifyError(... + @(varargin) types.util.checkConstraint(pname, name, namedprops, constrained, val), ... + 'MATLAB:string:MustBeStringScalarOrCharacterVector') + end + + function testCheckDimsWithValidSize(testCase) + types.util.checkDims([3,5], {[3,5]}) + testCase.verifyTrue(true) + end + + function testCheckDimsWithInvalidSize(testCase) + testCase.verifyError(... + @(varargin) types.util.checkDims([3,5], {[1,10,4]}), ... + 'NWB:CheckDims:InvalidDimensions' ) + end + + function testCheckDtype(testCase) + % Example that triggers a block for non-scalar structs in + % compound data processing case. %Todo: simplify + ccss = types.core.VoltageClampStimulusSeries( ... + 'data', [1, 2, 3, 4, 5] ); + vcs = types.core.VoltageClampSeries( ... + 'data', [0.1, 0.2, 0.3, 0.4, 0.5] ); + + stimuli = types.core.IntracellularStimuliTable( ... + 'colnames', {'stimulus'}, ... + 'id', types.hdmf_common.ElementIdentifiers( ... + 'data', int64([0, 1, 2]) ... + ), ... + 'stimulus', types.core.TimeSeriesReferenceVectorData( ... + 'data', struct( ... + 'idx_start', {0, 1, -1}, ... + 'count', {5, 3, -1}, ... + 'timeseries', { ... + types.untyped.ObjectView(ccss), ... + types.untyped.ObjectView(ccss), ... + types.untyped.ObjectView(vcs) ... + } ... + )... + )... + ); + testCase.verifyClass(stimuli, 'types.core.IntracellularStimuliTable') + end + + function testParseConstrainedAppendMode(testCase) + + columnA = types.hdmf_common.VectorData( ... + 'description', 'first column', ... + 'data', rand(10,1) ... + ); + + % 1D column + idCol = types.hdmf_common.ElementIdentifiers('data', int64(0:9)'); + + % Create table + dynamicTable = types.hdmf_common.DynamicTable(... + 'description', 'test dynamic table column',... + 'colnames', {'colA'}, ... + 'colA', columnA, ... + 'id', idCol ... + ); + + columnB = types.hdmf_common.VectorData( ... + 'description', 'second column', ... + 'data', rand(10,1) ... + ); + + + [vectordata, ~] = types.util.parseConstrained(dynamicTable, ... + 'vectordata', 'types.hdmf_common.VectorData', ... + 'colB', columnB ); + + testCase.verifyEqual(vectordata.keys, {'colA', 'colB'}) + testCase.verifyEqual(vectordata.get('colA').data, columnA.data) + testCase.verifyEqual(vectordata.get('colB').data, columnB.data) + end + + function testCorrectType(testCase) + testCase.verifyEqual(types.util.correctType('5', 'double'), 5) + testCase.verifyEqual(types.util.correctType(uint8(5), 'int32'), int32(5)) + testCase.verifyEqual(types.util.correctType(uint32(5), 'int32'), int64(5)) + + testCase.verifyWarning(... + @(varargin) types.util.correctType('5i', 'double'), ... + 'NWB:TypeCorrection:DataLoss') + end + end +end \ No newline at end of file diff --git a/+tests/+unit/FunctionTests.m b/+tests/+unit/FunctionTests.m new file mode 100644 index 00000000..4b16b445 --- /dev/null +++ b/+tests/+unit/FunctionTests.m @@ -0,0 +1,38 @@ +classdef FunctionTests < matlab.unittest.TestCase +% FunctionTests - Unit test for functions. + + methods (Test) + function testString2ValidName(testCase) + testCase.verifyWarning( ... + @(n,p) misc.str2validName('Time-Series', "test-a"), ... + 'NWB:CreateValidPropertyName:InvalidPrefix' ) + + validName = misc.str2validName('@id', 'at'); + testCase.verifyEqual(string(validName), "at_id") + end + + function testWriteCompoundMap(testCase) + testCase.applyFixture(matlab.unittest.fixtures.WorkingFolderFixture) + fid = H5F.create('test.h5'); + data = containers.Map({'a', 'b'}, 1:2); + io.writeCompound(fid, '/map_data', data) + H5F.close(fid); + end + function testWriteCompoundEmpty(testCase) + testCase.applyFixture(matlab.unittest.fixtures.WorkingFolderFixture) + fid = H5F.create('test.h5'); + data = struct; + testCase.verifyError(... + @(varargin) io.writeCompound(fid, '/map_data', data), ... + 'MATLAB:imagesci:hdf5lib:libraryError') + H5F.close(fid); + end + function testWriteCompoundScalar(testCase) + testCase.applyFixture(matlab.unittest.fixtures.WorkingFolderFixture) + fid = H5F.create('test.h5'); + data = struct('a','b'); + io.writeCompound(fid, '/map_data', data) + H5F.close(fid); + end + end +end \ No newline at end of file diff --git a/+tests/+unit/PynwbTutorialTest.m b/+tests/+unit/PynwbTutorialTest.m index 6f687c15..ee6e9807 100644 --- a/+tests/+unit/PynwbTutorialTest.m +++ b/+tests/+unit/PynwbTutorialTest.m @@ -21,9 +21,11 @@ properties (Constant) % SkippedTutorials - Tutorials from pynwb to skip SkippedTutorials = {... + 'plot_read_basics.py', ... % Downloads file from dandi archive, does not export nwb file 'streaming.py', ... % Requires that HDF5 library is installed with the ROS3 driver enabled which is not a given 'object_id.py', ... % Does not export nwb file 'plot_configurator.py', ... % Does not export nwb file + 'plot_zarr_io', ... % Does not export nwb file in nwb format 'brain_observatory.py', ... % Requires allen sdk 'extensions.py'}; % Discrepancy between tutorial and schema: https://github.com/NeurodataWithoutBorders/pynwb/issues/1952 @@ -31,12 +33,14 @@ SkippedFiles = {'family_nwb_file_0.nwb'} % requires family driver from h5py % PythonDependencies - Package dependencies for running pynwb tutorials - PythonDependencies = {'hdmf-zarr', 'dataframe-image', 'matplotlib', 'dandi'} + PythonDependencies = {'dataframe-image', 'matplotlib'} end properties (Access = private) PythonEnvironment % Stores the value of the environment variable % "PYTHONPATH" to restore when test is finished. + + Debug (1,1) logical = false end methods (TestClassSetup) @@ -65,6 +69,12 @@ function setupClass(testCase) L = dir('temp_venv/lib/python*/site-*'); % Find the site-packages folder pythonPath = fullfile(L.folder, L.name); setenv('PYTHONPATH', pythonPath) + + pythonPath = tests.util.getPythonPath(); + + if testCase.Debug + [~, m] = system(sprintf('%s -m pip list', pythonPath)); disp(m) + end end end @@ -101,7 +111,7 @@ function testTutorial(testCase, tutorialFile) pythonPath = tests.util.getPythonPath(); - cmd = sprintf('"%s" %s', pythonPath, tutorialFile ); + cmd = sprintf('%s %s', pythonPath, tutorialFile); [status, cmdout] = system(cmd); if status == 1 @@ -171,7 +181,12 @@ function installPythonDependencies(testCase) for i = 1:numel(testCase.PythonDependencies) iName = testCase.PythonDependencies{i}; installCmdStr = sprintf('%s install %s', pipExecutable, iName); - evalc( "system(installCmdStr)" ); % Install without command window output + + if testCase.Debug + [~, m] = system(installCmdStr); disp(m) + else + evalc( "system(installCmdStr)" ); % Install without command window output + end end end end @@ -207,7 +222,7 @@ function installPythonDependencies(testCase) end function pynwbFolder = downloadPynwb() - githubUrl = 'https://github.com/NeurodataWithoutBorders/pynwb/archive/refs/heads/master.zip'; + githubUrl = 'https://github.com/NeurodataWithoutBorders/pynwb/archive/refs/heads/dev.zip'; pynwbFolder = downloadZippedGithubRepo(githubUrl, '.'); % Download in current directory end diff --git a/+tests/+unit/TutorialTest.m b/+tests/+unit/TutorialTest.m index 2d542129..554c0cb2 100644 --- a/+tests/+unit/TutorialTest.m +++ b/+tests/+unit/TutorialTest.m @@ -3,17 +3,27 @@ % % This test will test most tutorial files (while skipping tutorials with % dependencies) If the tutorial creates an nwb file, the test will also try -% to open this with pynwb. -% -% Note: -% - Requires MATLAB XXXX to run py.* commands. -% - pynwb must be installed in the python environment returned by -% pyenv() +% to open this with pynwb and run nwbinspector on the file. + +% Notes: +% - Requires MATLAB 2019b or later to run py.* commands. +% +% - pynwb must be installed in the python environment returned by pyenv() +% +% - Running NWBInspector as a Python package within MATLAB on GitHub runners +% currently encounters compatibility issues between the HDF5 library and +% h5py. As a workaround in this test, the CLI interface is used to run +% NWBInspector and the results are manually parsed. This approach is not +% ideal, and hopefully can be improved upon. properties MatNwbDirectory end + properties (Constant) + NwbInspectorSeverityLevel = 1 + end + properties (TestParameter) % TutorialFile - A cell array where each cell is the name of a % tutorial file. testTutorial will run on each file individually @@ -30,12 +40,22 @@ % SkippedFiles - Name of exported nwb files to skip reading with pynwb SkippedFiles = {'testFileWithDataPipes.nwb'} % does not produce a valid nwb file + + % PythonDependencies - Package dependencies for running pynwb tutorials + PythonDependencies = {'nwbinspector'} + end + + properties (Access = private) + NWBInspectorMode = "python" end methods (TestClassSetup) function setupClass(testCase) + + import tests.fixtures.ResetGeneratedTypesFixture + % Get the root path of the matnwb repository - rootPath = getMatNwbRootDirectory(); + rootPath = tests.util.getProjectDirectory(); tutorialsFolder = fullfile(rootPath, 'tutorials'); testCase.MatNwbDirectory = rootPath; @@ -44,29 +64,16 @@ function setupClass(testCase) testCase.applyFixture(matlab.unittest.fixtures.PathFixture(rootPath)); testCase.applyFixture(matlab.unittest.fixtures.PathFixture(tutorialsFolder)); - % Note: The following seems to not be working on the azure pipeline - % Keep for reference - - % % % Make sure pynwb is installed in MATLAB's Python Environment - % % args = py.list({py.sys.executable, "-m", "pip", "install", "pynwb"}); - % % py.subprocess.check_call(args); - % % - % % % Add pynwb to MATLAB's python environment path - % % pynwbPath = getenv('PYNWB_PATH'); - % % if count(py.sys.path, pynwbPath) == 0 - % % insert(py.sys.path,int32(0),pynwbPath); - % % end - - % % Alternative: Use python script for reading file with pynwb - setenv('PYTHONPATH', fileparts(mfilename('fullpath'))); - - nwbClearGenerated() - end - end + % Check if it is possible to call py.nwbinspector.* functions. + % When running these tests on Github Actions, calling + % py.nwbinspector does not work, whereas the CLI can be used instead. + try + py.nwbinspector.is_module_installed('nwbinspector'); + catch + testCase.NWBInspectorMode = "CLI"; + end - methods (TestClassTeardown) - function tearDownClass(testCase) %#ok - %generateCore() + testCase.applyFixture( ResetGeneratedTypesFixture ); end end @@ -79,64 +86,183 @@ function setupMethod(testCase) methods (Test) function testTutorial(testCase, tutorialFile) %#ok + % Intentionally capturing output, in order for tests to cover + % code which overloads display methods for nwb types/objects. C = evalc( 'run(tutorialFile)' ); %#ok - testCase.testReadTutorialNwbFileWithPynwb() + + testCase.readTutorialNwbFileWithPynwb() + testCase.inspectTutorialFileWithNwbInspector() end end methods - function testReadTutorialNwbFileWithPynwb(testCase) + function readTutorialNwbFileWithPynwb(testCase) % Retrieve all files generated by tutorial - nwbListing = dir('*.nwb'); + nwbFileNameList = testCase.listNwbFiles(); + for nwbFilename = nwbFileNameList + try + io = py.pynwb.NWBHDF5IO(nwbFilename); + nwbObject = io.read(); + testCase.verifyNotEmpty(nwbObject, 'The NWB file should not be empty.'); + io.close() + catch ME + error(ME.message) + end + end + end + + function inspectTutorialFileWithNwbInspector(testCase) + % Retrieve all files generated by tutorial + nwbFileNameList = testCase.listNwbFiles(); + for nwbFilename = nwbFileNameList + if testCase.NWBInspectorMode == "python" + results = py.list(py.nwbinspector.inspect_nwbfile(nwbfile_path=nwbFilename)); + results = testCase.convertNwbInspectorResultsToStruct(results); + elseif testCase.NWBInspectorMode == "CLI" + [s, m] = system(sprintf('nwbinspector %s --levels importance', nwbFilename)); + testCase.assertEqual(s,0, 'Failed to run NWB Inspector using system command.') + results = testCase.parseNWBInspectorTextOutput(m); + end + + if isempty(results) + return + end + + results = testCase.filterNWBInspectorResults(results); + % T = struct2table(results); disp(T) + + for j = 1:numel(results) + testCase.verifyLessThan(results(j).importance, testCase.NwbInspectorSeverityLevel, ... + sprintf('Message: %s\nLocation: %s\n File: %s\n', ... + string(results(j).message), results(j).location, results(j).filepath)) + end + end + end + end - for i = 1:numel(nwbListing) - nwbFilename = nwbListing(i).name; - if any(strcmp(nwbFilename, tests.unit.TutorialTest.SkippedFiles)) - continue + methods (Access = private) + function nwbFileNames = listNwbFiles(testCase) + nwbListing = dir('*.nwb'); + nwbFileNames = string( {nwbListing.name} ); + nwbFileNames = setdiff(nwbFileNames, testCase.SkippedFiles); + assert(isrow(nwbFileNames), 'Expected output to be a row vector') + if ~isscalar(nwbFileNames) + if iscolumn(nwbFileNames) + nwbFileNames = transpose(nwbFileNames); end + end + end + end + methods (Static) + function resultsOut = convertNwbInspectorResultsToStruct(resultsIn) + + resultsOut = tests.unit.TutorialTest.getEmptyNwbInspectorResultStruct(); + + C = cell(resultsIn); + for i = 1:numel(C) + resultsOut(i).importance = double( py.getattr(C{i}.importance, 'value') ); + resultsOut(i).severity = double( py.getattr(C{i}.severity, 'value') ); + try + resultsOut(i).location = string(C{i}.location); + catch + resultsOut(i).location = "N/A"; + end + + resultsOut(i).message = string(C{i}.message); + resultsOut(i).filepath = string(C{i}.file_path); + resultsOut(i).check_name = string(C{i}.check_function_name); + end + end + + function resultsOut = parseNWBInspectorTextOutput(systemCommandOutput) + resultsOut = tests.unit.TutorialTest.getEmptyNwbInspectorResultStruct(); + + importanceLevels = containers.Map(... + ["BEST_PRACTICE_SUGGESTION", ... + "BEST_PRACTICE_VIOLATION", ... + "CRITICAL", ... + "PYNWB_VALIDATION", ... + "ERROR"], 0:4 ); + + lines = splitlines(systemCommandOutput); + count = 0; + for i = 1:numel(lines) + % Example line: + % '.0 Importance.BEST_PRACTICE_VIOLATION: behavior_tutorial.nwb - check_regular_timestamps - 'SpatialSeries' object at location '/processing/behavior/Position/SpatialSeries' + % ^2 ^1 ^2 ^ ^ ^ 3 + % [-----------importance------------] [------filepath------] [------check_name------] [-----------------location----------------] + % Splitting and components is exemplified above. + + if ~isempty(regexp( lines{i}, '^\.\d{1}', 'once' ) ) + count = count+1; + % Split line into separate components + splitLine = strsplit(lines{i}, ':'); + splitLine = [... + strsplit(splitLine{1}, ' '), ... + strsplit(splitLine{2}, '-') ... + ]; + + resultsOut(count).importance = importanceLevels( extractAfter(splitLine{2}, 'Importance.') ); + resultsOut(count).filepath = string(strtrim( splitLine{3} )); + resultsOut(count).check_name = string(strtrim(splitLine{4} )); try - io = py.pynwb.NWBHDF5IO(nwbListing(i).name); - nwbObject = io.read(); - testCase.verifyNotEmpty(nwbObject, 'The NWB file should not be empty.'); - io.close() - - catch ME - if strcmp(ME.identifier, 'MATLAB:undefinedVarOrClass') && ... - contains(ME.message, 'py.pynwb.NWBHDF5IO') - - pythonExecutable = tests.util.getPythonPath(); - cmd = sprintf('"%s" -B -m read_nwbfile_with_pynwb %s',... - pythonExecutable, nwbFilename); - - status = system(cmd); - if status ~= 0 - error('Failed to read NWB file "%s" using pynwb', nwbFilename) - end - else - rethrow(ME) - end + locationInfo = strsplit(splitLine{end}, 'at location'); + resultsOut(count).location = string(strtrim(eval(locationInfo{2}))); + catch + resultsOut(count).location = 'N/A'; end + resultsOut(count).message = string(strtrim(lines{i+1})); + end + end + end - catch ME - error(ME.message) - %testCase.verifyFail(sprintf('Failed to read file %s with error: %s', nwbListing(i).name, ME.message)); + function emptyResults = getEmptyNwbInspectorResultStruct() + emptyResults = struct(... + 'importance', {}, ... + 'severity', {}, ... + 'location', {}, ... + 'filepath', {}, ... + 'check_name', {}, ... + 'ignore', {}); + end + + function resultsOut = filterNWBInspectorResults(resultsIn) + CHECK_IGNORE = [... + "check_image_series_external_file_valid", ... + "check_regular_timestamps" + ]; + + for i = 1:numel(resultsIn) + resultsIn(i).ignore = any(strcmp(CHECK_IGNORE, resultsIn(i).check_name)); + + % Special cases to ignore + if resultsIn(i).location == "/acquisition/ExternalVideos" && ... + resultsIn(i).check_name == "check_timestamps_match_first_dimension" + resultsIn(i).ignore = true; + elseif resultsIn(i).location == "/acquisition/SpikeEvents_Shank0" && ... + resultsIn(i).check_name == "check_data_orientation" + % Data for this example is actually longer in another dimension + % than time. + resultsIn(i).ignore = true; end end + resultsOut = resultsIn; + resultsOut([resultsOut.ignore]) = []; end end end function tutorialNames = listTutorialFiles() % listTutorialFiles - List names of all tutorial files (exclude skipped files) - rootPath = getMatNwbRootDirectory(); - L = dir(fullfile(rootPath, 'tutorials')); + rootPath = tests.util.getProjectDirectory(); + L = cat(1, ... + dir(fullfile(rootPath, 'tutorials', '*.mlx')), ... + dir(fullfile(rootPath, 'tutorials', '*.m')) ... + ); + L( [L.isdir] ) = []; % Ignore folders tutorialNames = setdiff({L.name}, tests.unit.TutorialTest.SkippedTutorials); end - -function folderPath = getMatNwbRootDirectory() - folderPath = fileparts(fileparts(fileparts(mfilename('fullpath')))); -end diff --git a/+tests/+unit/dataPipeTest.m b/+tests/+unit/dataPipeTest.m index 63970472..724e4937 100644 --- a/+tests/+unit/dataPipeTest.m +++ b/+tests/+unit/dataPipeTest.m @@ -15,16 +15,22 @@ function setup(testCase) function testInit(testCase) import types.untyped.datapipe.*; - + import matlab.unittest.fixtures.SuppressedWarningsFixture + %testCase.applyFixture(SuppressedWarningsFixture('NWB:DataPipeTest:Debug')) + warnDebugId = 'NWB:DataPipeTest:Debug'; warning('off', warnDebugId); warning(warnDebugId, ''); %% extra data type data = rand(100, 1); - types.untyped.DataPipe('data', data, 'dataType', 'double'); - [~,lastId] = lastwarn(); - testCase.verifyEqual(lastId, 'NWB:DataPipe:RedundantDataType'); + % types.untyped.DataPipe('data', data, 'dataType', 'double'); + % [~,lastId] = lastwarn(); + % testCase.verifyEqual(lastId, 'NWB:DataPipe:RedundantDataType'); + + testCase.verifyWarning(... + @(varargin) types.untyped.DataPipe('data', data, 'dataType', 'double'), ... + 'NWB:DataPipe:RedundantDataType') warning(warnDebugId, ''); @@ -51,9 +57,14 @@ function testInit(testCase) pipe.export(fid, datasetName, {}); H5F.close(fid); + testCase.verifyWarning(... + @(varargin) types.untyped.DataPipe('filename', filename, 'path', datasetName, 'dataType', 'double'), ... + 'NWB:DataPipe:UnusedArguments') + + testCase.applyFixture(SuppressedWarningsFixture('NWB:DataPipe:UnusedArguments')) pipe = types.untyped.DataPipe('filename', filename, 'path', datasetName, 'dataType', 'double'); - [~,lastId] = lastwarn(); - testCase.verifyEqual(lastId, 'NWB:DataPipe:UnusedArguments'); + % [~,lastId] = lastwarn(); + % testCase.verifyEqual(lastId, 'NWB:DataPipe:UnusedArguments'); testCase.verifyEqual(pipe.compressionLevel, 2); testCase.verifyTrue(pipe.hasShuffle); @@ -131,6 +142,9 @@ function testExternalFilters(testCase) import types.untyped.datapipe.properties.DynamicFilter; import types.untyped.datapipe.properties.Shuffle; + % TODO: Why is Filter.LZ4 not part of the exported Pipe, i.e when the + % Pipe.internal goes from Blueprint to Bound + testCase.assumeTrue(logical(H5Z.filter_avail(uint32(Filter.LZ4)))); filename = 'testExternalWrite.h5'; @@ -153,7 +167,7 @@ function testExternalFilters(testCase) OneDimensionPipe.export(fid, '/test_one_dim_data', {}); H5F.close(fid); - + %% append data totalLength = 3; appendData = zeros([10 13 totalLength], Pipe.dataType); @@ -260,6 +274,113 @@ function testConfigurationFromData(testCase) testCase.verifyClass(conf, 'types.untyped.datapipe.Configuration') end +function testPropertySetGet(testCase) + data = rand(100, 1); + pipe = types.untyped.DataPipe('data', data); + + pipe.axis = 1; + testCase.verifyEqual(pipe.axis, 1) + + pipe.offset = 4; + testCase.verifyEqual(pipe.offset, 4) + + pipe.dataType = 'double'; + testCase.verifyEqual(pipe.dataType, 'double') + + pipe.chunkSize = 10; + testCase.verifyEqual(pipe.chunkSize, 10) + + pipe.compressionLevel = -1; + % Todo: make verification + + pipe.hasShuffle = false; + testCase.verifyFalse(pipe.hasShuffle) + + pipe.hasShuffle = true; + testCase.verifyTrue(pipe.hasShuffle) +end + +function testAppendVectorToBlueprintPipe(testCase) + % Column vector: + data = rand(10, 1); + pipe = types.untyped.DataPipe('data', data); + + pipe.append([1;2]); + newData = pipe.load(); + testCase.verifyEqual(newData, cat(1, data, [1;2])) + + testCase.verifyError(@(X) pipe.append([1,2]), 'MATLAB:catenate:dimensionMismatch') + + % Row vector: + data = rand(1, 10); + pipe = types.untyped.DataPipe('data', data); + + pipe.append([1,2]); + newData = pipe.load(); + testCase.verifyEqual(newData, cat(2, data, [1,2])) + + testCase.verifyError(@(X) pipe.append([1;2]), 'MATLAB:catenate:dimensionMismatch') +end + +function testSubsrefWithNonScalarSubs(testCase) + data = rand(100, 1); + pipe = types.untyped.DataPipe('data', data); + + % This syntax should not be supported. Not clear what a valid + % non-scalar subsref would be... + subData = pipe{1:10}(1:5); + testCase.verifyEqual(subData, data(1:5)) +end + +function testOverrideBoundPipeProperties(testCase) + import matlab.unittest.fixtures.SuppressedWarningsFixture + testCase.applyFixture(SuppressedWarningsFixture('NWB:DataPipe:UnusedArguments')) + + data = rand(10, 1); + pipe = types.untyped.DataPipe('data', data); + + filename = 'testInit.h5'; + datasetName = '/test_data'; + fid = H5F.create(filename); + pipe.export(fid, datasetName, {}); + H5F.close(fid); + + loadedPipe = types.untyped.DataPipe('filename', filename, 'path', datasetName, 'dataType', 'double'); + + % Using verifyError did not work for the following statements, i.e this: + % testCase.verifyError(@(x) eval('loadedPipe.chunkSize = 2'), 'NWB:BoundPipe:CannotSetPipeProperty') %#ok + % fails with the following error: Attempt to add "loadedPipe" to a static workspace. + try + loadedPipe.chunkSize = 2; + catch ME + testCase.verifyEqual(ME.identifier, 'NWB:BoundPipe:CannotSetPipeProperty') + end + + try + loadedPipe.hasShuffle = false; + catch ME + testCase.verifyEqual(ME.identifier, 'NWB:BoundPipe:CannotSetPipeProperty') + end + +end + +function testDynamicFilterIsInDatasetCreationPropertyList(testCase) + import types.untyped.datapipe.dynamic.Filter; + import types.untyped.datapipe.properties.DynamicFilter; + + dcpl = H5P.create('H5P_DATASET_CREATE'); + dynamicFilter = DynamicFilter(Filter.LZ4); + + tf = dynamicFilter.isInDcpl(dcpl); + testCase.verifyFalse(tf) + + % Add filter + dynamicFilter.addTo(dcpl) + tf = dynamicFilter.isInDcpl(dcpl); + testCase.verifyTrue(tf) +end + function data = createData(dataType, size) data = randi(intmax(dataType), size, dataType); end + diff --git a/+tests/+unit/dynamicTableTest.m b/+tests/+unit/dynamicTableTest.m index e82267e2..a4acdb06 100644 --- a/+tests/+unit/dynamicTableTest.m +++ b/+tests/+unit/dynamicTableTest.m @@ -41,6 +41,39 @@ function testNwbToTableWithReferencedTablesAsTableRows(testCase) testCase.verifyClass(row1colB, 'table') end +function testClearDynamicTable(testCase) + dtr_table = createDynamicTableWithTableRegionReferences(); + types.util.dynamictable.clear(dtr_table) + + % testCase.verifyEmpty(dtr_table.vectordata) %todo when PR merged + testCase.verifyEqual(size(dtr_table.vectordata), uint64([0,1])) +end + +function testClearDynamicTableV2_1(testCase) + + import matlab.unittest.fixtures.SuppressedWarningsFixture + testCase.applyFixture(SuppressedWarningsFixture('NWB:CheckUnset:InvalidProperties')) + + nwbClearGenerated('.', 'ClearCache', true) + generateCore("2.1.0", "savedir", '.') + rehash(); + table = types.core.DynamicTable( ... + 'description', 'test table with DynamicTableRegion', ... + 'colnames', {'dtr_col_a', 'dtr_col_b'}, ... + 'dtr_col_a', 1:4, ... + 'dtr_col_b', 5:8, ... + 'id', types.core.ElementIdentifiers('data', [0; 1; 2; 3]) ); + + types.util.dynamictable.clear(table) + + % testCase.verifyEmpty(dtr_table.vectordata) %todo when PR merged + testCase.verifyEqual(size(table.vectordata), uint64([0,1])) + + nwbClearGenerated('.','ClearCache',true) + generateCore('savedir', '.'); + rehash(); +end + % Non-test functions function dtr_table = createDynamicTableWithTableRegionReferences() % Create a dynamic table with two columns, where the data of each column is diff --git a/+tests/+unit/untypedSetTest.m b/+tests/+unit/untypedSetTest.m index 4d18716f..588a8667 100644 --- a/+tests/+unit/untypedSetTest.m +++ b/+tests/+unit/untypedSetTest.m @@ -32,13 +32,15 @@ function testCreateSetFromNvPairsPlusFunctionHandle(testCase) end function testDisplayEmptyObject(testCase) - emptyUntypedSet = types.untyped.Set(); - disp(emptyUntypedSet) + emptyUntypedSet = types.untyped.Set(); %#ok + C = evalc( 'disp(emptyUntypedSet)' ); + testCase.verifyClass(C, 'char') end function testDisplayScalarObject(testCase) - scalarSet = types.untyped.Set('a',1) - disp(scalarSet) + scalarSet = types.untyped.Set('a', 1); %#ok + C = evalc( 'disp(scalarSet)' ); + testCase.verifyClass(C, 'char') end function testGetSetSize(testCase) @@ -65,4 +67,10 @@ function testVerticalConcatenation(testCase) untypedSetB = types.untyped.Set( struct('c',3, 'd', 3) ); testCase.verifyError(@() [untypedSetA; untypedSetB], 'NWB:Set:Unsupported') +end + +function testSetCharValue(testCase) + untypedSet = types.untyped.Set( struct('a', 'a', 'b', 'b') ); + untypedSet.set('c', 'c') + testCase.verifyEqual(untypedSet.get('c'), 'c') end \ No newline at end of file diff --git a/+tests/+util/addFolderToPythonPath.m b/+tests/+util/addFolderToPythonPath.m new file mode 100644 index 00000000..ac21c350 --- /dev/null +++ b/+tests/+util/addFolderToPythonPath.m @@ -0,0 +1,13 @@ +function addFolderToPythonPath(folderPath) + pythonPath = getenv('PYTHONPATH'); + if isempty(pythonPath) + updatedPythonPath = folderPath; + else + if ~contains(pythonPath, folderPath) + updatedPythonPath = strjoin({pythonPath, folderPath}, pathsep); + else + return + end + end + setenv('PYTHONPATH', updatedPythonPath); +end diff --git a/+tests/+util/getProjectDirectory.m b/+tests/+util/getProjectDirectory.m new file mode 100644 index 00000000..8cdad9fa --- /dev/null +++ b/+tests/+util/getProjectDirectory.m @@ -0,0 +1,3 @@ +function projectDirectory = getProjectDirectory() + projectDirectory = fullfile(fileparts(mfilename('fullpath')), '..', '..'); +end diff --git a/+tests/+util/getPythonPath.m b/+tests/+util/getPythonPath.m index 3a43dc0a..304cdd10 100644 --- a/+tests/+util/getPythonPath.m +++ b/+tests/+util/getPythonPath.m @@ -1,7 +1,7 @@ function pythonPath = getPythonPath() envPath = fullfile('+tests', 'env.mat'); - if 2 == exist(envPath, 'file') + if isfile(fullfile(misc.getMatnwbDir, envPath)) Env = load(envPath, '-mat'); if isfield(Env, 'pythonPath') pythonPath = Env.pythonPath; diff --git a/+tests/requirements.txt b/+tests/requirements.txt index da0e7629..12070c83 100644 --- a/+tests/requirements.txt +++ b/+tests/requirements.txt @@ -1,2 +1,3 @@ -pynwb -hdf5plugin \ No newline at end of file +hdf5plugin +git+https://github.com/NeurodataWithoutBorders/nwbinspector.git@dev +git+https://github.com/NeurodataWithoutBorders/pynwb.git@dev \ No newline at end of file diff --git a/+types/+core/AnnotationSeries.m b/+types/+core/AnnotationSeries.m index b199c3af..c821b8d6 100644 --- a/+types/+core/AnnotationSeries.m +++ b/+types/+core/AnnotationSeries.m @@ -52,14 +52,14 @@ if isequal(val, -1) val = -1; else - error('Unable to set the ''data_resolution'' property of class ''AnnotationSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_resolution'' property of class ''AnnotationSeries'' because it is read-only.') end end function val = validate_data_unit(obj, val) if isequal(val, 'n/a') val = 'n/a'; else - error('Unable to set the ''data_unit'' property of class ''AnnotationSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''AnnotationSeries'' because it is read-only.') end end %% EXPORT diff --git a/+types/+core/CurrentClampSeries.m b/+types/+core/CurrentClampSeries.m index 8def6770..fe27e0cb 100644 --- a/+types/+core/CurrentClampSeries.m +++ b/+types/+core/CurrentClampSeries.m @@ -109,7 +109,7 @@ if isequal(val, 'volts') val = 'volts'; else - error('Unable to set the ''data_unit'' property of class ''CurrentClampSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''CurrentClampSeries'' because it is read-only.') end end %% EXPORT diff --git a/+types/+core/CurrentClampStimulusSeries.m b/+types/+core/CurrentClampStimulusSeries.m index ec31d0db..ab8d7a4d 100644 --- a/+types/+core/CurrentClampStimulusSeries.m +++ b/+types/+core/CurrentClampStimulusSeries.m @@ -35,7 +35,7 @@ if isequal(val, 'amperes') val = 'amperes'; else - error('Unable to set the ''data_unit'' property of class ''CurrentClampStimulusSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''CurrentClampStimulusSeries'' because it is read-only.') end end %% EXPORT diff --git a/+types/+core/Device.m b/+types/+core/Device.m index a626863b..426a9bd5 100644 --- a/+types/+core/Device.m +++ b/+types/+core/Device.m @@ -4,8 +4,11 @@ % OPTIONAL PROPERTIES properties - description; % (char) Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text. - manufacturer; % (char) The name of the manufacturer of the device. + description; % (char) Description of the device as free-form text. If there is any software/firmware associated with the device, the names and versions of those can be added to NWBFile.was_generated_by. + manufacturer; % (char) The name of the manufacturer of the device, e.g., Imec, Plexon, Thorlabs. + model_name; % (char) The model name of the device, e.g., Neuropixels 1.0, V-Probe, Bergamo III. + model_number; % (char) The model number (or part/product number) of the device, e.g., PRB_1_4_0480_1, PLX-VP-32-15SE(75)-(260-80)(460-10)-300-(1)CON/32m-V, BERGAMO. + serial_number; % (char) The serial number of the device. end methods @@ -20,9 +23,15 @@ p.StructExpand = false; addParameter(p, 'description',[]); addParameter(p, 'manufacturer',[]); + addParameter(p, 'model_name',[]); + addParameter(p, 'model_number',[]); + addParameter(p, 'serial_number',[]); misc.parseSkipInvalidName(p, varargin); obj.description = p.Results.description; obj.manufacturer = p.Results.manufacturer; + obj.model_name = p.Results.model_name; + obj.model_number = p.Results.model_number; + obj.serial_number = p.Results.serial_number; if strcmp(class(obj), 'types.core.Device') cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); types.util.checkUnset(obj, unique(cellStringArguments)); @@ -35,6 +44,15 @@ function set.manufacturer(obj, val) obj.manufacturer = obj.validate_manufacturer(val); end + function set.model_name(obj, val) + obj.model_name = obj.validate_model_name(val); + end + function set.model_number(obj, val) + obj.model_number = obj.validate_model_number(val); + end + function set.serial_number(obj, val) + obj.serial_number = obj.validate_serial_number(val); + end %% VALIDATORS function val = validate_description(obj, val) @@ -73,6 +91,60 @@ validshapes = {[1]}; types.util.checkDims(valsz, validshapes); end + function val = validate_model_name(obj, val) + val = types.util.checkDtype('model_name', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_model_number(obj, val) + val = types.util.checkDtype('model_number', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_serial_number(obj, val) + val = types.util.checkDtype('serial_number', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end %% EXPORT function refs = export(obj, fid, fullpath, refs) refs = export@types.core.NWBContainer(obj, fid, fullpath, refs); @@ -85,6 +157,15 @@ if ~isempty(obj.manufacturer) io.writeAttribute(fid, [fullpath '/manufacturer'], obj.manufacturer); end + if ~isempty(obj.model_name) + io.writeAttribute(fid, [fullpath '/model_name'], obj.model_name); + end + if ~isempty(obj.model_number) + io.writeAttribute(fid, [fullpath '/model_number'], obj.model_number); + end + if ~isempty(obj.serial_number) + io.writeAttribute(fid, [fullpath '/serial_number'], obj.serial_number); + end end end diff --git a/+types/+core/ElectricalSeries.m b/+types/+core/ElectricalSeries.m index 212e88ff..9a81aa87 100644 --- a/+types/+core/ElectricalSeries.m +++ b/+types/+core/ElectricalSeries.m @@ -97,7 +97,7 @@ if isequal(val, 'volts') val = 'volts'; else - error('Unable to set the ''data_unit'' property of class ''ElectricalSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''ElectricalSeries'' because it is read-only.') end end function val = validate_electrodes(obj, val) diff --git a/+types/+core/ElectrodeGroup.m b/+types/+core/ElectrodeGroup.m index 174c0e6d..d44737b8 100644 --- a/+types/+core/ElectrodeGroup.m +++ b/+types/+core/ElectrodeGroup.m @@ -93,7 +93,7 @@ return; end if ~istable(val) && ~isstruct(val) && ~isa(val, 'containers.Map') - error('Property `position` must be a table,struct, or containers.Map.'); + error('NWB:Type:InvalidPropertyType', 'Property `position` must be a table, struct, or containers.Map.'); end vprops = struct(); vprops.x = 'single'; diff --git a/+types/+core/EventWaveform.m b/+types/+core/EventWaveform.m index 375c758c..3915db7c 100644 --- a/+types/+core/EventWaveform.m +++ b/+types/+core/EventWaveform.m @@ -1,5 +1,5 @@ classdef EventWaveform < types.core.NWBDataInterface & types.untyped.GroupClass -% EVENTWAVEFORM Represents either the waveforms of detected events, as extracted from a raw data trace in /acquisition, or the event waveforms that were stored during experiment acquisition. +% EVENTWAVEFORM DEPRECATED. Represents either the waveforms of detected events, as extracted from a raw data trace in /acquisition, or the event waveforms that were stored during experiment acquisition. % OPTIONAL PROPERTIES diff --git a/+types/+core/IZeroClampSeries.m b/+types/+core/IZeroClampSeries.m index 39e5814e..880a25c2 100644 --- a/+types/+core/IZeroClampSeries.m +++ b/+types/+core/IZeroClampSeries.m @@ -6,7 +6,7 @@ methods function obj = IZeroClampSeries(varargin) % IZEROCLAMPSERIES Constructor for IZeroClampSeries - varargin = [{'stimulus_description' 'N/A'} varargin]; + varargin = [{'bias_current' types.util.correctType(0, 'single') 'bridge_balance' types.util.correctType(0, 'single') 'capacitance_compensation' types.util.correctType(0, 'single') 'stimulus_description' 'N/A'} varargin]; obj = obj@types.core.CurrentClampSeries(varargin{:}); @@ -33,64 +33,31 @@ %% VALIDATORS function val = validate_bias_current(obj, val) - val = types.util.checkDtype('bias_current', 'single', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; + if isequal(val, 0) + val = 0; else - valsz = size(val); + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''bias_current'' property of class ''IZeroClampSeries'' because it is read-only.') end - validshapes = {[1]}; - types.util.checkDims(valsz, validshapes); end function val = validate_bridge_balance(obj, val) - val = types.util.checkDtype('bridge_balance', 'single', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; + if isequal(val, 0) + val = 0; else - valsz = size(val); + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''bridge_balance'' property of class ''IZeroClampSeries'' because it is read-only.') end - validshapes = {[1]}; - types.util.checkDims(valsz, validshapes); end function val = validate_capacitance_compensation(obj, val) - val = types.util.checkDtype('capacitance_compensation', 'single', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; + if isequal(val, 0) + val = 0; else - valsz = size(val); + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''capacitance_compensation'' property of class ''IZeroClampSeries'' because it is read-only.') end - validshapes = {[1]}; - types.util.checkDims(valsz, validshapes); end function val = validate_stimulus_description(obj, val) if isequal(val, 'N/A') val = 'N/A'; else - error('Unable to set the ''stimulus_description'' property of class ''IZeroClampSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''stimulus_description'' property of class ''IZeroClampSeries'' because it is read-only.') end end %% EXPORT diff --git a/+types/+core/ImageMaskSeries.m b/+types/+core/ImageMaskSeries.m index 7514ac8b..ae2c1710 100644 --- a/+types/+core/ImageMaskSeries.m +++ b/+types/+core/ImageMaskSeries.m @@ -1,5 +1,5 @@ classdef ImageMaskSeries < types.core.ImageSeries & types.untyped.GroupClass -% IMAGEMASKSERIES An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed. +% IMAGEMASKSERIES DEPRECATED. An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed. % OPTIONAL PROPERTIES diff --git a/+types/+core/IndexSeries.m b/+types/+core/IndexSeries.m index 1c47e17f..8ba26b10 100644 --- a/+types/+core/IndexSeries.m +++ b/+types/+core/IndexSeries.m @@ -124,7 +124,7 @@ if isequal(val, 'N/A') val = 'N/A'; else - error('Unable to set the ''data_unit'' property of class ''IndexSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''IndexSeries'' because it is read-only.') end end function val = validate_indexed_images(obj, val) diff --git a/+types/+core/IntervalSeries.m b/+types/+core/IntervalSeries.m index 22edd6fc..27fd745a 100644 --- a/+types/+core/IntervalSeries.m +++ b/+types/+core/IntervalSeries.m @@ -52,14 +52,14 @@ if isequal(val, -1) val = -1; else - error('Unable to set the ''data_resolution'' property of class ''IntervalSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_resolution'' property of class ''IntervalSeries'' because it is read-only.') end end function val = validate_data_unit(obj, val) if isequal(val, 'n/a') val = 'n/a'; else - error('Unable to set the ''data_unit'' property of class ''IntervalSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''IntervalSeries'' because it is read-only.') end end %% EXPORT diff --git a/+types/+core/IntracellularElectrodesTable.m b/+types/+core/IntracellularElectrodesTable.m index b3f2b4fd..ce75faef 100644 --- a/+types/+core/IntracellularElectrodesTable.m +++ b/+types/+core/IntracellularElectrodesTable.m @@ -41,7 +41,7 @@ if isequal(val, 'Table for storing intracellular electrode related metadata.') val = 'Table for storing intracellular electrode related metadata.'; else - error('Unable to set the ''description'' property of class ''IntracellularElectrodesTable'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''description'' property of class ''IntracellularElectrodesTable'' because it is read-only.') end end function val = validate_electrode(obj, val) diff --git a/+types/+core/IntracellularRecordingsTable.m b/+types/+core/IntracellularRecordingsTable.m index 41be93db..1fc8d5c9 100644 --- a/+types/+core/IntracellularRecordingsTable.m +++ b/+types/+core/IntracellularRecordingsTable.m @@ -53,7 +53,7 @@ if isequal(val, 'A table to group together a stimulus and response from a single electrode and a single simultaneous recording and for storing metadata about the intracellular recording.') val = 'A table to group together a stimulus and response from a single electrode and a single simultaneous recording and for storing metadata about the intracellular recording.'; else - error('Unable to set the ''description'' property of class ''IntracellularRecordingsTable'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''description'' property of class ''IntracellularRecordingsTable'' because it is read-only.') end end function val = validate_electrodes(obj, val) diff --git a/+types/+core/IntracellularResponsesTable.m b/+types/+core/IntracellularResponsesTable.m index 2f98eee7..1b4979b7 100644 --- a/+types/+core/IntracellularResponsesTable.m +++ b/+types/+core/IntracellularResponsesTable.m @@ -41,7 +41,7 @@ if isequal(val, 'Table for storing intracellular response related metadata.') val = 'Table for storing intracellular response related metadata.'; else - error('Unable to set the ''description'' property of class ''IntracellularResponsesTable'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''description'' property of class ''IntracellularResponsesTable'' because it is read-only.') end end function val = validate_response(obj, val) diff --git a/+types/+core/IntracellularStimuliTable.m b/+types/+core/IntracellularStimuliTable.m index 8e933b92..b0cf39e9 100644 --- a/+types/+core/IntracellularStimuliTable.m +++ b/+types/+core/IntracellularStimuliTable.m @@ -50,7 +50,7 @@ if isequal(val, 'Table for storing intracellular stimulus related metadata.') val = 'Table for storing intracellular stimulus related metadata.'; else - error('Unable to set the ''description'' property of class ''IntracellularStimuliTable'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''description'' property of class ''IntracellularStimuliTable'' because it is read-only.') end end function val = validate_stimulus(obj, val) diff --git a/+types/+core/NWBFile.m b/+types/+core/NWBFile.m index 9b6a1417..2b05d58e 100644 --- a/+types/+core/NWBFile.m +++ b/+types/+core/NWBFile.m @@ -50,6 +50,7 @@ general_subject; % (Subject) Information about the animal or person from which the data was measured. general_surgery; % (char) Narrative description about surgery/surgeries, including date(s) and who performed surgery. general_virus; % (char) Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. + general_was_generated_by; % (char) Name and version of software package(s) used to generate data contained in this NWB File. For each software package or library, include the name of the software as the first value and the version as the second value. intervals; % (TimeIntervals) Optional additional table(s) for describing other experimental time intervals. intervals_epochs; % (TimeIntervals) Divisions in time marking experimental stages or sub-divisions of a single recording session. intervals_invalid_times; % (TimeIntervals) Time intervals that should be removed from analysis. @@ -64,7 +65,7 @@ methods function obj = NWBFile(varargin) % NWBFILE Constructor for NWBFile - varargin = [{'nwb_version' '2.7.0'} varargin]; + varargin = [{'nwb_version' '2.8.0'} varargin]; obj = obj@types.core.NWBContainer(varargin{:}); @@ -107,6 +108,7 @@ addParameter(p, 'general_subject',[]); addParameter(p, 'general_surgery',[]); addParameter(p, 'general_virus',[]); + addParameter(p, 'general_was_generated_by',[]); addParameter(p, 'identifier',[]); addParameter(p, 'intervals',types.untyped.Set()); addParameter(p, 'intervals_epochs',[]); @@ -157,6 +159,7 @@ obj.general_subject = p.Results.general_subject; obj.general_surgery = p.Results.general_surgery; obj.general_virus = p.Results.general_virus; + obj.general_was_generated_by = p.Results.general_was_generated_by; obj.identifier = p.Results.identifier; obj.intervals = p.Results.intervals; obj.intervals_epochs = p.Results.intervals_epochs; @@ -282,6 +285,9 @@ function set.general_virus(obj, val) obj.general_virus = obj.validate_general_virus(val); end + function set.general_was_generated_by(obj, val) + obj.general_was_generated_by = obj.validate_general_was_generated_by(val); + end function set.identifier(obj, val) obj.identifier = obj.validate_identifier(val); end @@ -727,6 +733,24 @@ validshapes = {[1]}; types.util.checkDims(valsz, validshapes); end + function val = validate_general_was_generated_by(obj, val) + val = types.util.checkDtype('general_was_generated_by', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[2,Inf]}; + types.util.checkDims(valsz, validshapes); + end function val = validate_identifier(obj, val) val = types.util.checkDtype('identifier', 'char', val); if isa(val, 'types.untyped.DataStub') @@ -1041,6 +1065,14 @@ io.writeDataset(fid, [fullpath '/general/virus'], obj.general_virus); end end + io.writeGroup(fid, [fullpath '/general']); + if ~isempty(obj.general_was_generated_by) + if startsWith(class(obj.general_was_generated_by), 'types.untyped.') + refs = obj.general_was_generated_by.export(fid, [fullpath '/general/was_generated_by'], refs); + elseif ~isempty(obj.general_was_generated_by) + io.writeDataset(fid, [fullpath '/general/was_generated_by'], obj.general_was_generated_by, 'forceArray'); + end + end if startsWith(class(obj.identifier), 'types.untyped.') refs = obj.identifier.export(fid, [fullpath '/identifier'], refs); elseif ~isempty(obj.identifier) diff --git a/+types/+core/OptogeneticSeries.m b/+types/+core/OptogeneticSeries.m index 5741153c..92176f50 100644 --- a/+types/+core/OptogeneticSeries.m +++ b/+types/+core/OptogeneticSeries.m @@ -58,7 +58,7 @@ if isequal(val, 'watts') val = 'watts'; else - error('Unable to set the ''data_unit'' property of class ''OptogeneticSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''OptogeneticSeries'' because it is read-only.') end end function val = validate_site(obj, val) diff --git a/+types/+core/SpikeEventSeries.m b/+types/+core/SpikeEventSeries.m index 80611ca7..95b53c18 100644 --- a/+types/+core/SpikeEventSeries.m +++ b/+types/+core/SpikeEventSeries.m @@ -1,5 +1,5 @@ classdef SpikeEventSeries < types.core.ElectricalSeries & types.untyped.GroupClass -% SPIKEEVENTSERIES Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode). +% SPIKEEVENTSERIES Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode). @@ -56,7 +56,7 @@ if isequal(val, 'volts') val = 'volts'; else - error('Unable to set the ''data_unit'' property of class ''SpikeEventSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''SpikeEventSeries'' because it is read-only.') end end function val = validate_timestamps(obj, val) @@ -81,14 +81,14 @@ if isequal(val, 1) val = 1; else - error('Unable to set the ''timestamps_interval'' property of class ''SpikeEventSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''timestamps_interval'' property of class ''SpikeEventSeries'' because it is read-only.') end end function val = validate_timestamps_unit(obj, val) if isequal(val, 'seconds') val = 'seconds'; else - error('Unable to set the ''timestamps_unit'' property of class ''SpikeEventSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''timestamps_unit'' property of class ''SpikeEventSeries'' because it is read-only.') end end %% EXPORT diff --git a/+types/+core/TimeSeries.m b/+types/+core/TimeSeries.m index 5245289c..2fad2f65 100644 --- a/+types/+core/TimeSeries.m +++ b/+types/+core/TimeSeries.m @@ -418,9 +418,11 @@ %% CUSTOM CONSTRAINTS function checkCustomConstraint(obj) assert(~isempty(obj.timestamps) || ~isempty(obj.starting_time), ... - "'timestamps' or 'starting_time' must be specified") + 'NWB:TimeSeries:TimeNotSpecified', ... + "'timestamps' or 'starting_time' must be specified") if ~isempty(obj.starting_time) assert(~isempty(obj.starting_time_rate), ... + 'NWB:TimeSeries:RateMissing', ... "'starting_time_rate' must be specified when 'starting_time' is specified") end end diff --git a/+types/+core/TimeSeriesReferenceVectorData.m b/+types/+core/TimeSeriesReferenceVectorData.m index 20f77944..719c12d2 100644 --- a/+types/+core/TimeSeriesReferenceVectorData.m +++ b/+types/+core/TimeSeriesReferenceVectorData.m @@ -30,7 +30,7 @@ return; end if ~istable(val) && ~isstruct(val) && ~isa(val, 'containers.Map') - error('Property `data` must be a table,struct, or containers.Map.'); + error('NWB:Type:InvalidPropertyType', 'Property `data` must be a table, struct, or containers.Map.'); end vprops = struct(); vprops.idx_start = 'int32'; diff --git a/+types/+core/Units.m b/+types/+core/Units.m index e84b982a..75f2f708 100644 --- a/+types/+core/Units.m +++ b/+types/+core/Units.m @@ -13,9 +13,9 @@ spike_times_index; % (VectorIndex) Index into the spike_times dataset. waveform_mean; % (VectorData) Spike waveform mean for each spike unit. waveform_sd; % (VectorData) Spike waveform standard deviation for each spike unit. - waveforms; % (VectorData) Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. - waveforms_index; % (VectorIndex) Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. - waveforms_index_index; % (VectorIndex) Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail. + waveforms; % (VectorData) Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be the same as the order of the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. + waveforms_index; % (VectorIndex) Index into the 'waveforms' dataset. One value for every spike event. See 'waveforms' for more detail. + waveforms_index_index; % (VectorIndex) Index into the 'waveforms_index' dataset. One value for every unit (row in the table). See 'waveforms' for more detail. end methods diff --git a/+types/+core/VoltageClampSeries.m b/+types/+core/VoltageClampSeries.m index 7591fdd9..50984fc5 100644 --- a/+types/+core/VoltageClampSeries.m +++ b/+types/+core/VoltageClampSeries.m @@ -139,7 +139,7 @@ if isequal(val, 'amperes') val = 'amperes'; else - error('Unable to set the ''data_unit'' property of class ''VoltageClampSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''VoltageClampSeries'' because it is read-only.') end end function val = validate_resistance_comp_bandwidth(obj, val) diff --git a/+types/+core/VoltageClampStimulusSeries.m b/+types/+core/VoltageClampStimulusSeries.m index e81651bd..2df68b7e 100644 --- a/+types/+core/VoltageClampStimulusSeries.m +++ b/+types/+core/VoltageClampStimulusSeries.m @@ -35,7 +35,7 @@ if isequal(val, 'volts') val = 'volts'; else - error('Unable to set the ''data_unit'' property of class ''VoltageClampStimulusSeries'' because it is read-only.') + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''VoltageClampStimulusSeries'' because it is read-only.') end end %% EXPORT diff --git a/+types/+hdmf_common/DynamicTableRegion.m b/+types/+hdmf_common/DynamicTableRegion.m index e24dc59e..506b6b02 100644 --- a/+types/+hdmf_common/DynamicTableRegion.m +++ b/+types/+hdmf_common/DynamicTableRegion.m @@ -4,7 +4,7 @@ % OPTIONAL PROPERTIES properties - table; % (Object Reference to DynamicTable) Reference to the DynamicTable object that this region applies to. + table; % (Object reference to DynamicTable) Reference to the DynamicTable object that this region applies to. end methods diff --git a/+types/+hdmf_common/VectorIndex.m b/+types/+hdmf_common/VectorIndex.m index 75a9e683..4a40a936 100644 --- a/+types/+hdmf_common/VectorIndex.m +++ b/+types/+hdmf_common/VectorIndex.m @@ -4,7 +4,7 @@ % OPTIONAL PROPERTIES properties - target; % (Object Reference to VectorData) Reference to the target dataset that this index applies to. + target; % (Object reference to VectorData) Reference to the target dataset that this index applies to. end methods diff --git a/+types/+hdmf_experimental/EnumData.m b/+types/+hdmf_experimental/EnumData.m index 1608dea0..e22843e1 100644 --- a/+types/+hdmf_experimental/EnumData.m +++ b/+types/+hdmf_experimental/EnumData.m @@ -4,7 +4,7 @@ % OPTIONAL PROPERTIES properties - elements; % (Object Reference to VectorData) Reference to the VectorData object that contains the enumerable elements + elements; % (Object reference to VectorData) Reference to the VectorData object that contains the enumerable elements end methods diff --git a/+types/+untyped/+datapipe/+properties/DynamicFilter.m b/+types/+untyped/+datapipe/+properties/DynamicFilter.m index 4579ec27..15174644 100644 --- a/+types/+untyped/+datapipe/+properties/DynamicFilter.m +++ b/+types/+untyped/+datapipe/+properties/DynamicFilter.m @@ -10,14 +10,15 @@ methods function obj = DynamicFilter(filter, parameters) - validateattributes(filter, ... - {'types.untyped.datapipe.dynamic.Filter'}, ... - {'scalar'}, ... - 'DynamicFilter', 'filter'); + arguments + filter (1,1) types.untyped.datapipe.dynamic.Filter + parameters = [] + end + assert(~verLessThan('matlab', '9.12'), ... ['Your MATLAB version `%s` does not support writing with ' ... 'dynamically loaded filters. Please upgrade to version R2022a ' ... - 'or higher in order to use this feature.'], version); + 'or higher in order to use this feature.'], version); %#ok assert(H5Z.filter_avail(uint32(filter)), ... ['Filter `%s` does not appear to be installed on this system. ' ... 'Please doublecheck `%s` for more information about writing ' ... @@ -26,15 +27,10 @@ 'https://www.mathworks.com/help/matlab/import_export/read-and-write-hdf5-datasets-using-dynamically-loaded-filters.html'); obj.dynamicFilter = filter; - - if (1 < nargin) - obj.parameters = parameters; - else - obj.parameters = []; - end + obj.parameters = parameters; end - function tf = isInDcpl(dcpl) + function tf = isInDcpl(obj, dcpl) tf = false; for i = 0:(H5P.get_nfilters(dcpl) - 1) diff --git a/+types/+untyped/+datapipe/BoundPipe.m b/+types/+untyped/+datapipe/BoundPipe.m index 4539b692..d9e63d53 100644 --- a/+types/+untyped/+datapipe/BoundPipe.m +++ b/+types/+untyped/+datapipe/BoundPipe.m @@ -242,13 +242,13 @@ function setPipeProperty(~, ~) end function tf = hasPipeProperty(obj, name) + tf = false; for i = 1:length(obj.pipeProperties) if isa(obj.pipeProperties{i}, name) tf = true; return; end end - tf = false; end function removePipeProperty(~, ~) diff --git a/+types/+untyped/@DataStub/load_mat_style.m b/+types/+untyped/@DataStub/load_mat_style.m index a6fa1f16..f32312c0 100644 --- a/+types/+untyped/@DataStub/load_mat_style.m +++ b/+types/+untyped/@DataStub/load_mat_style.m @@ -46,6 +46,18 @@ end points = cell(length(dataDimensions), 1); + + if isscalar(dataDimensions) + % Starting in MATLAB R2024b, the input argument for the size + % of an array in ind2sub must be a vector of positive integers + % with two or more elements. This fix replicates the behavior of + % older MATLAB versions, where it was assumed that the a scalar + % size referred to the row dimension. For scalar dimensions + % (i.e., row or column vectors), we can still assume this + % to be true in matnwb. + dataDimensions = [dataDimensions, 1]; + end + [points{:}] = ind2sub(dataDimensions, orderedSelection); readSpaceId = H5S.copy(spaceId); H5S.select_none(readSpaceId); @@ -187,4 +199,4 @@ indexKeyIndex(indexKeyIndexNextIndex) = indexKeyIndex(indexKeyIndexNextIndex) + 1; indexKeyIndex((indexKeyIndexNextIndex+1):end) = 1; end -end \ No newline at end of file +end diff --git a/+types/+untyped/ExternalLink.m b/+types/+untyped/ExternalLink.m index e5d566ad..23f0af16 100644 --- a/+types/+untyped/ExternalLink.m +++ b/+types/+untyped/ExternalLink.m @@ -28,7 +28,7 @@ % if path is valid hdf5 path, then returns either a Nwb Object, DataStub, or Link Object % otherwise errors, probably. assert(ischar(Link.filename), 'expecting filename to be a char array.'); - assert(2 == exist(Link.filename, 'file'), '%s does not exist.', Link.filename); + assert(isfile(Link.filename), '%s does not exist.', Link.filename); fid = H5F.open(Link.filename, 'H5F_ACC_RDONLY', 'H5P_DEFAULT'); LinkedInfo = h5info(Link.filename, Link.path); diff --git a/+types/+untyped/Set.m b/+types/+untyped/Set.m index 0fb36837..4790f762 100644 --- a/+types/+untyped/Set.m +++ b/+types/+untyped/Set.m @@ -220,19 +220,4 @@ function displayNonScalarObject(obj) disp([hdr newline body newline footer]); end end - - methods(Access=private) - %converts to cell string. Does not do type checking. - function cellval = merge_stringtypes(obj, val) - if isstring(val) - val = convertStringsToChars(val); - end - - if ischar(val) - cellval = {val}; - else - cellval = val; - end - end - end end \ No newline at end of file diff --git a/+types/+util/+dynamictable/addRow.m b/+types/+util/+dynamictable/addRow.m index 3afceb70..328223c1 100644 --- a/+types/+util/+dynamictable/addRow.m +++ b/+types/+util/+dynamictable/addRow.m @@ -40,7 +40,7 @@ function addRow(DynamicTable, varargin) 'If this was produced with pynwb, please enable chunking for this table.']); if istable(varargin{1}) - error("NWB:DynamicTable", ... + error('NWB:DynamicTable', ... ['Using MATLAB tables as input to the addRow DynamicTable method has '... 'been deprecated. Please, use key-value pairs instead']); else diff --git a/+types/+util/+dynamictable/addTableColumn.m b/+types/+util/+dynamictable/addTableColumn.m index 38503809..dfdba534 100644 --- a/+types/+util/+dynamictable/addTableColumn.m +++ b/+types/+util/+dynamictable/addTableColumn.m @@ -1,4 +1,4 @@ function addTableColumn(DynamicTable, subTable) -error("NWB:DynamicTable", ... +error('NWB:DynamicTable', ... ['Using MATLAB tables as input to the addColumn DynamicTable method has '... 'been deprecated. Please, use key-value pairs instead']) \ No newline at end of file diff --git a/+types/+util/+dynamictable/checkConfig.m b/+types/+util/+dynamictable/checkConfig.m index 2725c6df..66061a8e 100644 --- a/+types/+util/+dynamictable/checkConfig.m +++ b/+types/+util/+dynamictable/checkConfig.m @@ -1,4 +1,4 @@ -function checkConfig(DynamicTable, varargin) +function checkConfig(DynamicTable, ignoreList) % CHECKCONFIG Given a DynamicTable object, this functions checks for proper % DynamicTable configuration % @@ -13,10 +13,9 @@ function checkConfig(DynamicTable, varargin) % 1) The length of all columns in the dynamic table is the same. % 2) All rows have a corresponding id. If none exist, this function creates them. % 3) No index loops exist. - if nargin<2 - ignoreList = {}; - else - ignoreList = varargin{1}; + arguments + DynamicTable + ignoreList (1,:) cell = {}; end if isempty(DynamicTable.colnames) diff --git a/+types/+util/+dynamictable/clear.m b/+types/+util/+dynamictable/clear.m index 83056b87..cff60981 100644 --- a/+types/+util/+dynamictable/clear.m +++ b/+types/+util/+dynamictable/clear.m @@ -1,13 +1,23 @@ function clear(DynamicTable) %CLEAR Given a valid DynamicTable object, clears all rows and type % information in the table. -validateattributes(DynamicTable, {'types.hdmf_common.DynamicTable'}, {'scalar'}); -DynamicTable.id = types.hdmf_common.ElementIdentifiers(); +validateattributes(DynamicTable, {'types.hdmf_common.DynamicTable', 'types.core.DynamicTable'}, {'scalar'}); +if isa(DynamicTable, 'types.core.DynamicTable') % Schema version <2.2.0 + elementIdentifierClass = @types.core.ElementIdentifiers; + vectorDataClassName = 'types.core.VectorData'; + vectorIndexClassName = 'types.core.VectorIndex'; +else + elementIdentifierClass = @types.hdmf_common.ElementIdentifiers; + vectorDataClassName = 'types.hdmf_common.VectorData'; + vectorIndexClassName = 'types.hdmf_common.VectorIndex'; +end + +DynamicTable.id = elementIdentifierClass(); DynamicTable.vectordata = types.untyped.Set(@(nm, val)types.util.checkConstraint(... - 'vectordata', nm, struct(), {'types.hdmf_common.VectorData'}, val)); + 'vectordata', nm, struct(), {vectorDataClassName}, val)); if isprop(DynamicTable, 'vectorindex') % Schema version <2.3.0 DynamicTable.vectorindex = types.untyped.Set(@(nm, val)types.util.checkConstraint(... - 'vectorindex', nm, struct(), {'types.hdmf_common.VectorIndex'}, val)); + 'vectorindex', nm, struct(), {vectorIndexClassName}, val)); +end end -end \ No newline at end of file diff --git a/+types/+util/+dynamictable/getRow.m b/+types/+util/+dynamictable/getRow.m index bc5c00a6..97850f21 100644 --- a/+types/+util/+dynamictable/getRow.m +++ b/+types/+util/+dynamictable/getRow.m @@ -63,7 +63,7 @@ if is_row_dim(1) && is_row_dim(end) % Last dimension takes precedence is_row_dim(1:end-1) = false; - warning(... + warning('NWB:DynamicTable:VectorDataAmbiguousSize', ... ['The length of the first and last dimensions of ', ... 'VectorData for column "%s" match the number of ', ... 'rows in the dynamic table. Data is rearranged based on ', ... diff --git a/+types/+util/checkDependent.m b/+types/+util/checkDependent.m deleted file mode 100644 index 8de45586..00000000 --- a/+types/+util/checkDependent.m +++ /dev/null @@ -1,10 +0,0 @@ -function checkDependent(parent, children, unconstructed) - if ~any(strcmp(parent, unconstructed)) - for i=1:length(children) - child = children{i}; - if any(strcmp(child, unconstructed)) - error('Dependent type `%s` is required for parent property `%s`', child, parent); - end - end - end -end \ No newline at end of file diff --git a/+util/loadTimeSeriesData.m b/+util/loadTimeSeriesData.m index 4fc58d5b..6c6ce212 100644 --- a/+util/loadTimeSeriesData.m +++ b/+util/loadTimeSeriesData.m @@ -30,7 +30,8 @@ if isfinite(interval(2)) data = NaN(ceil(diff(interval) * fs), length(electrodes)); else - error('must specify time interval'); + error('NWB:LoadTimeSeries:UnspecifiedTimeInterval', ... + 'must specify time interval'); end for i = 1:length(electrodes) data(:,i) = util.loadTimeSeriesData(timeseries, interval, ... @@ -47,7 +48,8 @@ fs = timeseries.starting_time_rate; t0 = timeseries.starting_time; if interval(1) < t0 - error('interval bounds outside of time range'); + error('NWB:LoadTimeSeries:InvalidTimeInterval', ... + 'interval bounds outside of time range'); end start_ind = (interval(1) - t0) * fs; end @@ -63,7 +65,8 @@ fs = timeseries.starting_time_rate; t0 = timeseries.starting_time; if interval(2) > (dims(end) * fs + t0) - error('interval bounds outside of time range'); + error('NWB:LoadTimeSeries:InvalidTimeInterval', ... + 'interval bounds outside of time range'); end end_ind = (interval(2) - t0) * fs; end diff --git a/+util/loadTimeSeriesTimestamps.m b/+util/loadTimeSeriesTimestamps.m index 1d5b5efc..c2579c2f 100644 --- a/+util/loadTimeSeriesTimestamps.m +++ b/+util/loadTimeSeriesTimestamps.m @@ -28,8 +28,9 @@ else if downsample_factor ~= 1 - warning(['Downsampling a timestamps of a timeseries that may'... - 'not be uniformly sampled. This may have unintended behavior']) + warning('NWB:LoadTimeStamps:DownsampleNonUniformTimestamps', ... + [ 'Downsampling timestamps of a timeseries that may' ... + 'not be uniformly sampled. This may have unintended behavior']) end start_ind = fastsearch(timeseries.timestamps, interval(1), 1); if isinf(interval(2)) diff --git a/+util/read_indexed_column.m b/+util/read_indexed_column.m index 90af97ec..34e0c9c6 100644 --- a/+util/read_indexed_column.m +++ b/+util/read_indexed_column.m @@ -1,5 +1,5 @@ function data = read_indexed_column(vector_index, vector_data, row) -error("NWB:read_indexed_column", ... +error('NWB:read_indexed_column', ... ['The utility function read_indexed_column has been reprecated. Please,' ... ' use the getRow method of DynamicTable objects instead'] ... ) \ No newline at end of file diff --git a/+util/table2nwb.m b/+util/table2nwb.m index b2513b94..98553cb3 100644 --- a/+util/table2nwb.m +++ b/+util/table2nwb.m @@ -28,8 +28,8 @@ for col = T if ~strcmp(col.Properties.VariableNames{1},'id') - - if ~isempty(col.Properties.VariableDescriptions{1}) + if ~isempty(col.Properties.VariableDescriptions) ... + && ~isempty(col.Properties.VariableDescriptions{1}) description = col.Properties.VariableDescriptions{1}; else description = 'no description provided'; diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 0d6230c5..89877a0f 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -28,7 +28,9 @@ jobs: run: | python -m pip install -U pip pip install -r +tests/requirements.txt + python -m pip list echo "HDF5_PLUGIN_PATH=$(python -c "import hdf5plugin; print(hdf5plugin.PLUGINS_PATH)")" >> "$GITHUB_ENV" + echo $( python -m pip show nwbinspector | grep ^Location: | awk '{print $2}' ) - name: Install MATLAB uses: matlab-actions/setup-matlab@v2 with: @@ -36,7 +38,10 @@ jobs: - name: Run tests uses: matlab-actions/run-command@v2 with: - command: results = assertSuccess(nwbtest); assert(~isempty(results), 'No tests ran'); + command: | + pyenv("ExecutionMode", "OutOfProcess"); + results = assertSuccess(nwbtest); + assert(~isempty(results), 'No tests ran'); - name: Upload JUnit results if: always() uses: actions/upload-artifact@v4 diff --git a/NwbFile.m b/NwbFile.m index cc88b1dd..e7dcddbc 100644 --- a/NwbFile.m +++ b/NwbFile.m @@ -12,17 +12,29 @@ % See also NWBREAD, GENERATECORE, GENERATEEXTENSION methods - function obj = NwbFile(varargin) - obj = obj@types.core.NWBFile(varargin{:}); - if strcmp(class(obj), 'NwbFile') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + function obj = NwbFile(propValues) + arguments + propValues.?types.core.NWBFile + propValues.nwb_version + end + nameValuePairs = namedargs2cell(propValues); + obj = obj@types.core.NWBFile(nameValuePairs{:}); + if strcmp(class(obj), 'NwbFile') %#ok + cellStringArguments = convertContainedStringsToChars(nameValuePairs(1:2:end)); types.util.checkUnset(obj, unique(cellStringArguments)); end end - function export(obj, filename) - %add to file create date - + function export(obj, filename, mode) + % export - Export NWB file object + + arguments + obj (1,1) NwbFile + filename (1,1) string + mode (1,1) string {mustBeMember(mode, ["edit", "overwrite"])} = "edit" + end + + % add to file create date if isa(obj.file_create_date, 'types.untyped.DataStub') obj.file_create_date = obj.file_create_date.load(); end @@ -43,26 +55,22 @@ function export(obj, filename) obj.timestamps_reference_time = obj.session_start_time; end - try - output_file_id = H5F.create(filename); - isEditingFile = false; - catch ME % if file exists, open and edit - if verLessThan('matlab', '9.9') % < 2020b - isEditingFile = strcmp(ME.identifier, 'MATLAB:imagesci:hdf5lib:libraryError')... - && contains(ME.message, '''File exists'''); - else - isEditingFile = strcmp(ME.identifier, 'MATLAB:imagesci:hdf5io:resourceAlreadyExists'); - end + isEditingFile = false; - if isEditingFile + if isfile(filename) + if mode == "edit" output_file_id = H5F.open(filename, 'H5F_ACC_RDWR', 'H5P_DEFAULT'); - else - rethrow(ME); + isEditingFile = true; + elseif mode == "overwrite" + output_file_id = H5F.create(filename, 'H5F_ACC_TRUNC', 'H5P_DEFAULT', 'H5P_DEFAULT'); end + else + output_file_id = H5F.create(filename); end try - obj.embedSpecifications(output_file_id); + jsonSpecs = schemes.exportJson(); + io.spec.writeEmbeddedSpecifications(output_file_id, jsonSpecs); refs = export@types.core.NWBFile(obj, output_file_id, '/', {}); obj.resolveReferences(output_file_id, refs); H5F.close(output_file_id); @@ -116,72 +124,26 @@ function resolveReferences(obj, fid, references) resolved(iRef) = exportSuccess; end - if ~any(resolved) - errorFormat = ['object(s) could not be created:\n%s\n\nThe '... - 'listed object(s) above contain an ObjectView, '... - 'RegionView , or SoftLink object that has failed to resolve itself. '... - 'Please check for any references that were not assigned to the root '... - ' NwbFile or if any of the above paths are incorrect.']; - unresolvedRefs = strjoin(references, newline); - error('NWB:NwbFile:UnresolvedReferences',... - errorFormat, file.addSpaces(unresolvedRefs, 4)); - end - - references(resolved) = []; - end - end + errorMessage = sprintf(... + ['Object(s) could not be created:\n%s\n\nThe listed '... + 'object(s) above contain an ObjectView, RegionView, or ' ... + 'SoftLink object that has failed to resolve itself. '... + 'Please check for any references that were not assigned ' ... + 'to the root NwbFile or if any of the above paths are ' ... + 'incorrect.'], file.addSpaces(strjoin(references, newline), 4)); - function embedSpecifications(~, fid) - try - attrId = H5A.open(fid, '/.specloc'); - specLocation = H5R.get_name(fid, 'H5R_OBJECT', H5A.read(attrId)); - H5A.close(attrId); - catch - specLocation = '/specifications'; - io.writeGroup(fid, specLocation); - specView = types.untyped.ObjectView(specLocation); - io.writeAttribute(fid, '/.specloc', specView); - end + assert( ... + all(resolved), ... + 'NWB:NwbFile:UnresolvedReferences', ... + errorMessage ... + ) - JsonData = schemes.exportJson(); - for iJson = 1:length(JsonData) - JsonDatum = JsonData(iJson); - schemaNamespaceLocation = strjoin({specLocation, JsonDatum.name}, '/'); - namespaceExists = io.writeGroup(fid, schemaNamespaceLocation); - if namespaceExists - namespaceGroupId = H5G.open(fid, schemaNamespaceLocation); - names = getVersionNames(namespaceGroupId); - H5G.close(namespaceGroupId); - for iNames = 1:length(names) - H5L.delete(fid, [schemaNamespaceLocation '/' names{iNames}],... - 'H5P_DEFAULT'); - end - end - schemaLocation =... - strjoin({schemaNamespaceLocation, JsonDatum.version}, '/'); - io.writeGroup(fid, schemaLocation); - Json = JsonDatum.json; - schemeNames = keys(Json); - for iScheme = 1:length(schemeNames) - name = schemeNames{iScheme}; - path = [schemaLocation '/' name]; - io.writeDataset(fid, path, Json(name)); - end + references(resolved) = []; end end end end -function versionNames = getVersionNames(namespaceGroupId) - [~, ~, versionNames] = H5L.iterate(namespaceGroupId,... - 'H5_INDEX_NAME', 'H5_ITER_NATIVE',... - 0, @removeGroups, {}); - function [status, versionNames] = removeGroups(~, name, versionNames) - versionNames{end+1} = name; - status = 0; - end -end - function tf = metaHasType(mc, typeSuffix) assert(isa(mc, 'meta.class')); tf = false; diff --git a/functionSignatures.json b/functionSignatures.json deleted file mode 100644 index 727d7537..00000000 --- a/functionSignatures.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "generateCore": - { - "inputs": - [ - { - "name":"core namespace", - "kind":"optional", - "type":"filepath=*.yaml,*.yml" - }, - { - "name":"extensions namespace(s)", - "kind":"optional", - "type":"filepath=*.yaml,*.yml", - "multiplicity":"append" - } - ] - }, - "generateExtension": - { - "inputs": - [ - { - "name":"namespace", - "kind":"required", - "type":"filepath=*.yaml,*.yml" - } - ] - }, - "nwbRead": - { - "inputs": - [ - {"name":"NWB File", "kind":"required", "type":"filepath=*.nwb,*.h5"} - ], - "outputs": - [ - {"name":"NwbFile Object", "type":"nwbfile"} - ] - }, - "nwbExport": - { - "inputs": - [ - {"name":"NwbFile Object", "kind":"required", "type":"NwbFile"}, - {"name":"path to output file", "kind":"required", "type":"filepath=*.nwb"} - ] - } -} diff --git a/generateCore.m b/generateCore.m index 65ed9a27..7349b8e1 100644 --- a/generateCore.m +++ b/generateCore.m @@ -1,4 +1,4 @@ -function generateCore(varargin) +function generateCore(version, options) % GENERATECORE Generate Matlab classes from NWB core schema files % GENERATECORE() Generate classes (Matlab m-files) from the % NWB core namespace file. By default, generates off of the most recent nwb-schema @@ -22,34 +22,31 @@ function generateCore(varargin) % generateCore('2.2.3'); % % See also GENERATEEXTENSION - - latestVersion = '2.7.0'; - - if nargin == 0 || strcmp(varargin{1}, 'savedir') - version = latestVersion; - else - version = varargin{1}; - validateattributes(version, {'char', 'string'}, {'scalartext'}, 'generateCore', 'version', 1); - version = char(version); - varargin = varargin(2:end); + + arguments + version (1,1) string {matnwb.common.mustBeValidSchemaVersion} = "latest" + options.savedir (1,1) string = misc.getMatnwbDir() end - - if strcmp(version, 'latest') - version = latestVersion; + + if version == "latest" + version = matnwb.common.findLatestSchemaVersion(); end - - schemaPath = fullfile(misc.getMatnwbDir(), 'nwb-schema', version); - corePath = fullfile(schemaPath, 'core', 'nwb.namespace.yaml'); - commonPath = fullfile(schemaPath,... - 'hdmf-common-schema', ... - 'common',... - 'namespace.yaml'); - assert(2 == exist(corePath, 'file'),... - 'NWB:GenerateCore:MissingCoreSchema',... - 'Cannot find suitable core namespace for schema version `%s`',... + + schemaPath = fullfile(misc.getMatnwbDir(), "nwb-schema", version); + corePath = fullfile(schemaPath, "core", "nwb.namespace.yaml"); + commonPath = fullfile(schemaPath, ... + "hdmf-common-schema", ... + "common", ... + "namespace.yaml"); + assert(isfile(corePath), ... + 'NWB:GenerateCore:MissingCoreSchema', ... + 'Cannot find suitable core namespace for schema version `%s`', ... version); - if 2 == exist(commonPath, 'file') - generateExtension(commonPath, varargin{:}); + + namespaceFiles = corePath; + if isfile(commonPath) + % Important: generate common before core if common is available + namespaceFiles = [commonPath, namespaceFiles]; end - generateExtension(corePath, varargin{:}); + generateExtension(namespaceFiles{:}, 'savedir', options.savedir); end diff --git a/generateExtension.m b/generateExtension.m index 67085df3..f4184350 100644 --- a/generateExtension.m +++ b/generateExtension.m @@ -1,4 +1,4 @@ -function generateExtension(varargin) +function generateExtension(namespaceFilePath, options) % GENERATEEXTENSION Generate Matlab classes from NWB extension schema file % GENERATEEXTENSION(extension_path...) Generate classes % (Matlab m-files) from one or more NWB schema extension namespace @@ -17,46 +17,42 @@ function generateExtension(varargin) % % See also GENERATECORE - for iOption = 1:length(varargin) - option = varargin{iOption}; - validateattributes(option, {'char', 'string'}, {'scalartext'} ... - , 'generateExtension', 'extension name', iOption); - if isstring(option) - varargin{iOption} = char(option); - end + arguments (Repeating) + namespaceFilePath (1,1) string {mustBeYamlFile} end - - saveDirMask = strcmp(varargin, 'savedir'); - if any(saveDirMask) - assert(~saveDirMask(end),... - 'NWB:GenerateExtenion:InvalidParameter',... - 'savedir must be paired with the desired save directory.'); - saveDir = varargin{find(saveDirMask, 1, 'last') + 1}; - saveDirParametersMask = saveDirMask | circshift(saveDirMask, 1); - sourceList = varargin(~saveDirParametersMask); - else - saveDir = misc.getMatnwbDir(); - sourceList = varargin; + arguments + options.savedir (1,1) string = misc.getMatnwbDir() end - - for iNamespaceFiles = 1:length(sourceList) - source = sourceList{iNamespaceFiles}; - validateattributes(source, {'char', 'string'}, {'scalartext'}); - - [localpath, ~, ~] = fileparts(source); - assert(2 == exist(source, 'file'),... - 'NWB:GenerateExtension:FileNotFound', 'Path to file `%s` could not be found.', source); - fid = fopen(source); - namespaceText = fread(fid, '*char') .'; - fclose(fid); + + assert( ... + ~isempty(namespaceFilePath), ... + 'NWB:GenerateExtension:NamespaceMissing', ... + 'Please provide the file path to at least one namespace specification file.' ... + ) + + for iNamespaceFiles = 1:length(namespaceFilePath) + + source = namespaceFilePath{iNamespaceFiles}; + namespaceText = fileread(source); + + [namespaceRootFolder, ~, ~] = fileparts(source); + parsedNamespaceList = spec.generate(namespaceText, namespaceRootFolder); - Namespaces = spec.generate(namespaceText, localpath); - - for iNamespace = 1:length(Namespaces) - Namespace = Namespaces(iNamespace); - spec.saveCache(Namespace, saveDir); - file.writeNamespace(Namespace.name, saveDir); - rehash(); + for iNamespace = 1:length(parsedNamespaceList) + parsedNamespace = parsedNamespaceList(iNamespace); + spec.saveCache(parsedNamespace, options.savedir); + file.writeNamespace(parsedNamespace.name, options.savedir); end end + rehash() +end + +function mustBeYamlFile(filePath) + arguments + filePath (1,1) string {mustBeFile} + end + + assert(endsWith(filePath, [".yaml", ".yml"], "IgnoreCase", true), ... + 'NWB:GenerateExtension:MustBeYaml', ... + 'Expected file to point to a yaml file', filePath) end diff --git a/nwb-schema/2.8.0/core/nwb.base.yaml b/nwb-schema/2.8.0/core/nwb.base.yaml new file mode 100644 index 00000000..859e9045 --- /dev/null +++ b/nwb-schema/2.8.0/core/nwb.base.yaml @@ -0,0 +1,264 @@ +datasets: +- neurodata_type_def: NWBData + neurodata_type_inc: Data + doc: An abstract data type for a dataset. + +- neurodata_type_def: TimeSeriesReferenceVectorData + neurodata_type_inc: VectorData + default_name: timeseries + dtype: + - name: idx_start + dtype: int32 + doc: Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced + TimeSeries. The first dimension of those arrays is always time. + - name: count + dtype: int32 + doc: Number of data samples available in this time series, during this epoch + - name: timeseries + dtype: + target_type: TimeSeries + reftype: object + doc: The TimeSeries that this index applies to + doc: Column storing references to a TimeSeries (rows). For each TimeSeries this + VectorData column stores the start_index and count to indicate the range in time + to be selected as well as an object reference to the TimeSeries. + +- neurodata_type_def: Image + neurodata_type_inc: NWBData + dtype: numeric + dims: + - - x + - y + - - x + - y + - r, g, b + - - x + - y + - r, g, b, a + shape: + - - null + - null + - - null + - null + - 3 + - - null + - null + - 4 + doc: An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the + third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or + (x, y, (r, g, b, a)). + attributes: + - name: resolution + dtype: float32 + doc: Pixel resolution of the image, in pixels per centimeter. + required: false + - name: description + dtype: text + doc: Description of the image. + required: false + +- neurodata_type_def: ImageReferences + neurodata_type_inc: NWBData + dtype: + target_type: Image + reftype: object + dims: + - num_images + shape: + - null + doc: Ordered dataset of references to Image objects. + +groups: +- neurodata_type_def: NWBContainer + neurodata_type_inc: Container + doc: An abstract data type for a generic container storing collections of data and + metadata. Base type for all data and metadata containers. + +- neurodata_type_def: NWBDataInterface + neurodata_type_inc: NWBContainer + doc: An abstract data type for a generic container storing collections of data, + as opposed to metadata. + +- neurodata_type_def: TimeSeries + neurodata_type_inc: NWBDataInterface + doc: General purpose time series. + attributes: + - name: description + dtype: text + default_value: no description + doc: Description of the time series. + required: false + - name: comments + dtype: text + default_value: no comments + doc: Human-readable comments about the TimeSeries. This second descriptive field + can be used to store additional information, or descriptive information if the + primary description field is populated with a computer-readable string. + required: false + datasets: + - name: data + dims: + - - num_times + - - num_times + - num_DIM2 + - - num_times + - num_DIM2 + - num_DIM3 + - - num_times + - num_DIM2 + - num_DIM3 + - num_DIM4 + shape: + - - null + - - null + - null + - - null + - null + - null + - - null + - null + - null + - null + doc: Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension + should always represent time. This can also be used to store binary data + (e.g., image frames). This can also be a link to data stored in an external file. + attributes: + - name: conversion + dtype: float32 + default_value: 1.0 + doc: Scalar to multiply each element in data to convert it to the specified 'unit'. + If the data are stored in acquisition system units or other units + that require a conversion to be interpretable, multiply the data by 'conversion' + to convert the data to the specified 'unit'. e.g. if the data acquisition system + stores values in this object as signed 16-bit integers (int16 range + -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data + acquisition system gain is 8000X, then the 'conversion' multiplier to get from + raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + required: false + - name: offset + dtype: float32 + default_value: 0.0 + doc: Scalar to add to the data after scaling by 'conversion' to finalize its coercion + to the specified 'unit'. Two common examples of this include (a) data stored in an + unsigned type that requires a shift after scaling to re-center the data, + and (b) specialized recording devices that naturally cause a scalar offset with + respect to the true units. + required: false + - name: resolution + dtype: float32 + default_value: -1.0 + doc: Smallest meaningful difference between values in data, stored in the specified + by unit, e.g., the change in value of the least significant bit, or a larger + number if signal noise is known to be present. If unknown, use -1.0. + required: false + - name: unit + dtype: text + doc: Base unit of measurement for working with the data. Actual stored values are + not necessarily stored in these units. To access the data in these units, + multiply 'data' by 'conversion' and add 'offset'. + - name: continuity + dtype: text + doc: Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or + "step". For example, a voltage trace would be "continuous", because samples + are recorded from a continuous process. An array of lick times would be "instantaneous", + because the data represents distinct moments in time. Times of image presentations would be + "step" because the picture remains the same until the next timepoint. This field is optional, + but is useful in providing information about the underlying data. It may inform the way this + data is interpreted, the way it is visualized, and what analysis methods are applicable. + required: false + - name: starting_time + dtype: float64 + doc: Timestamp of the first sample in seconds. When timestamps are uniformly + spaced, the timestamp of the first sample can be specified and all subsequent + ones calculated from the sampling rate attribute. + quantity: '?' + attributes: + - name: rate + dtype: float32 + doc: Sampling rate, in Hz. + - name: unit + dtype: text + value: seconds + doc: Unit of measurement for time, which is fixed to 'seconds'. + - name: timestamps + dtype: float64 + dims: + - num_times + shape: + - null + doc: Timestamps for samples stored in data, in seconds, relative to the + common experiment master-clock stored in NWBFile.timestamps_reference_time. + quantity: '?' + attributes: + - name: interval + dtype: int32 + value: 1 + doc: Value is '1' + - name: unit + dtype: text + value: seconds + doc: Unit of measurement for timestamps, which is fixed to 'seconds'. + - name: control + dtype: uint8 + dims: + - num_times + shape: + - null + doc: Numerical labels that apply to each time point in data for the purpose of + querying and slicing data by these values. If present, the length of this + array should be the same size as the first dimension of data. + quantity: '?' + - name: control_description + dtype: text + dims: + - num_control_values + shape: + - null + doc: Description of each control value. Must be present if control is present. + If present, control_description[0] should describe time points where control == 0. + quantity: '?' + groups: + - name: sync + doc: Lab-specific time and sync information as provided directly from hardware + devices and that is necessary for aligning all acquired time information to + a common timebase. The timestamp array stores time in the common timebase. + This group will usually only be populated in TimeSeries that are + stored external to the NWB file, in files storing raw data. Once timestamp + data is calculated, the contents of 'sync' are mostly for archival purposes. + quantity: '?' + +- neurodata_type_def: ProcessingModule + neurodata_type_inc: NWBContainer + doc: A collection of processed data. + attributes: + - name: description + dtype: text + doc: Description of this collection of processed data. + groups: + - neurodata_type_inc: NWBDataInterface + doc: Data objects stored in this collection. + quantity: '*' + - neurodata_type_inc: DynamicTable + doc: Tables stored in this collection. + quantity: '*' + +- neurodata_type_def: Images + neurodata_type_inc: NWBDataInterface + default_name: Images + doc: A collection of images with an optional way to specify the order of the images + using the "order_of_images" dataset. An order must be specified if the images are + referenced by index, e.g., from an IndexSeries. + attributes: + - name: description + dtype: text + doc: Description of this collection of images. + datasets: + - neurodata_type_inc: Image + doc: Images stored in this collection. + quantity: '+' + - name: order_of_images + neurodata_type_inc: ImageReferences + doc: Ordered dataset of references to Image objects stored in the parent group. + Each Image object in the Images group should be stored once and only once, so + the dataset should have the same length as the number of images. + quantity: '?' diff --git a/nwb-schema/2.8.0/core/nwb.behavior.yaml b/nwb-schema/2.8.0/core/nwb.behavior.yaml new file mode 100644 index 00000000..1a951b2f --- /dev/null +++ b/nwb-schema/2.8.0/core/nwb.behavior.yaml @@ -0,0 +1,124 @@ +groups: +- neurodata_type_def: SpatialSeries + neurodata_type_inc: TimeSeries + doc: "Direction, e.g., of gaze or travel, or position. The TimeSeries::data field\ + \ is a 2D array storing position or direction relative to some reference frame.\ + \ Array structure: [num measurements] [num dimensions]. Each SpatialSeries has\ + \ a text dataset reference_frame that indicates the zero-position, or the zero-axes\ + \ for direction. For example, if representing gaze direction, 'straight-ahead'\ + \ might be a specific pixel on the monitor, or some other point in space. For\ + \ position data, the 0,0 point might be the top-left corner of an enclosure, as\ + \ viewed from the tracking camera. The unit of data will indicate how to interpret\ + \ SpatialSeries values." + datasets: + - name: data + dtype: numeric + dims: + - - num_times + - - num_times + - x + - - num_times + - x,y + - - num_times + - x,y,z + shape: + - - null + - - null + - 1 + - - null + - 2 + - - null + - 3 + doc: 1-D or 2-D array storing position or direction relative to some reference frame. + attributes: + - name: unit + dtype: text + default_value: meters + doc: Base unit of measurement for working with the data. The default value + is 'meters'. Actual stored values are not necessarily stored in these units. + To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + required: false + - name: reference_frame + dtype: text + doc: Description defining what exactly 'straight-ahead' means. + quantity: '?' + +- neurodata_type_def: BehavioralEpochs + neurodata_type_inc: NWBDataInterface + default_name: BehavioralEpochs + doc: TimeSeries for storing behavioral epochs. The objective of this and the other + two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries) is + to provide generic hooks for software tools/scripts. This allows a tool/script + to take the output one specific interface (e.g., UnitTimes) and plot that data + relative to another data modality (e.g., behavioral events) without having to + define all possible modalities in advance. Declaring one of these interfaces means + that one or more TimeSeries of the specified type is published. These TimeSeries + should reside in a group having the same name as the interface. For example, if + a BehavioralTimeSeries interface is declared, the module will have one or more + TimeSeries defined in the module sub-group 'BehavioralTimeSeries'. BehavioralEpochs + should use IntervalSeries. BehavioralEvents is used for irregular events. BehavioralTimeSeries + is for continuous data. + groups: + - neurodata_type_inc: IntervalSeries + doc: IntervalSeries object containing start and stop times of epochs. + quantity: '*' + +- neurodata_type_def: BehavioralEvents + neurodata_type_inc: NWBDataInterface + default_name: BehavioralEvents + doc: TimeSeries for storing behavioral events. See description of BehavioralEpochs + for more details. + groups: + - neurodata_type_inc: TimeSeries + doc: TimeSeries object containing behavioral events. + quantity: '*' + +- neurodata_type_def: BehavioralTimeSeries + neurodata_type_inc: NWBDataInterface + default_name: BehavioralTimeSeries + doc: TimeSeries for storing Behavoioral time series data. See description of BehavioralEpochs + for more details. + groups: + - neurodata_type_inc: TimeSeries + doc: TimeSeries object containing continuous behavioral data. + quantity: '*' + +- neurodata_type_def: PupilTracking + neurodata_type_inc: NWBDataInterface + default_name: PupilTracking + doc: Eye-tracking data, representing pupil size. + groups: + - neurodata_type_inc: TimeSeries + doc: TimeSeries object containing time series data on pupil size. + quantity: '+' + +- neurodata_type_def: EyeTracking + neurodata_type_inc: NWBDataInterface + default_name: EyeTracking + doc: Eye-tracking data, representing direction of gaze. + groups: + - neurodata_type_inc: SpatialSeries + doc: SpatialSeries object containing data measuring direction of gaze. + quantity: '*' + +- neurodata_type_def: CompassDirection + neurodata_type_inc: NWBDataInterface + default_name: CompassDirection + doc: With a CompassDirection interface, a module publishes a SpatialSeries object + representing a floating point value for theta. The SpatialSeries::reference_frame + field should indicate what direction corresponds to 0 and which is the direction + of rotation (this should be clockwise). The si_unit for the SpatialSeries should + be radians or degrees. + groups: + - neurodata_type_inc: SpatialSeries + doc: SpatialSeries object containing direction of gaze travel. + quantity: '*' + +- neurodata_type_def: Position + neurodata_type_inc: NWBDataInterface + default_name: Position + doc: Position data, whether along the x, x/y or x/y/z axis. + groups: + - neurodata_type_inc: SpatialSeries + doc: SpatialSeries object containing position data. + quantity: '+' diff --git a/nwb-schema/2.8.0/core/nwb.device.yaml b/nwb-schema/2.8.0/core/nwb.device.yaml new file mode 100644 index 00000000..5f1a993d --- /dev/null +++ b/nwb-schema/2.8.0/core/nwb.device.yaml @@ -0,0 +1,26 @@ +groups: +- neurodata_type_def: Device + neurodata_type_inc: NWBContainer + doc: Metadata about a data acquisition device, e.g., recording system, electrode, microscope. + attributes: + - name: description + dtype: text + doc: Description of the device as free-form text. If there is any software/firmware associated + with the device, the names and versions of those can be added to NWBFile.was_generated_by. + required: false + - name: manufacturer + dtype: text + doc: The name of the manufacturer of the device, e.g., Imec, Plexon, Thorlabs. + required: false + - name: model_number + dtype: text + doc: The model number (or part/product number) of the device, e.g., PRB_1_4_0480_1, PLX-VP-32-15SE(75)-(260-80)(460-10)-300-(1)CON/32m-V, BERGAMO. + required: false + - name: model_name + dtype: text + doc: The model name of the device, e.g., Neuropixels 1.0, V-Probe, Bergamo III. + required: false + - name: serial_number + dtype: text + doc: The serial number of the device. + required: false diff --git a/nwb-schema/2.8.0/core/nwb.ecephys.yaml b/nwb-schema/2.8.0/core/nwb.ecephys.yaml new file mode 100644 index 00000000..69a66639 --- /dev/null +++ b/nwb-schema/2.8.0/core/nwb.ecephys.yaml @@ -0,0 +1,332 @@ +groups: +- neurodata_type_def: ElectricalSeries + neurodata_type_inc: TimeSeries + doc: A time series of acquired voltage data from extracellular recordings. + The data field is an int or float array storing data in volts. The first + dimension should always represent time. The second dimension, if present, + should represent channels. + attributes: + - name: filtering + dtype: text + doc: Filtering applied to all channels of the data. For example, if this ElectricalSeries represents + high-pass-filtered data (also known as AP Band), then this value could be "High-pass 4-pole Bessel filter + at 500 Hz". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, + then this value could be "Low-pass filter at 300 Hz". If a non-standard filter type is used, provide as much + detail about the filter properties as possible. + required: false + datasets: + - name: data + dtype: numeric + dims: + - - num_times + - - num_times + - num_channels + - - num_times + - num_channels + - num_samples + shape: + - - null + - - null + - null + - - null + - null + - null + doc: Recorded voltage data. + attributes: + - name: unit + dtype: text + value: volts + doc: Base unit of measurement for working with the data. This value is fixed to + 'volts'. Actual stored values are not necessarily stored in these units. To + access the data in these units, multiply 'data' by 'conversion', followed by + 'channel_conversion' (if present), and then add 'offset'. + - name: electrodes + neurodata_type_inc: DynamicTableRegion + doc: DynamicTableRegion pointer to the electrodes that this time series was generated from. + - name: channel_conversion + dtype: float32 + dims: + - num_channels + shape: + - null + doc: Channel-specific conversion factor. Multiply the data in the 'data' dataset by these + values along the channel axis (as indicated by axis attribute) AND by the global + conversion factor in the 'conversion' attribute of 'data' to get the data values in + Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This + approach allows for both global and per-channel data conversion factors needed + to support the storage of electrical recordings as native values generated by data + acquisition systems. If this dataset is not present, then there is no channel-specific + conversion factor, i.e. it is 1 for all channels. + quantity: '?' + attributes: + - name: axis + dtype: int32 + value: 1 + doc: The zero-indexed axis of the 'data' dataset that the channel-specific conversion + factor corresponds to. This value is fixed to 1. + +- neurodata_type_def: SpikeEventSeries + neurodata_type_inc: ElectricalSeries + doc: "Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This + may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description + field should describe how events were detected. All events span the same recording + channels and store + snapshots of equal duration. TimeSeries::data array structure: [num events] + [num channels] [num samples] (or [num events] [num samples] for single electrode)." + datasets: + - name: data + dtype: numeric + dims: + - - num_events + - num_samples + - - num_events + - num_channels + - num_samples + shape: + - - null + - null + - - null + - null + - null + doc: Spike waveforms. + attributes: + - name: unit + dtype: text + value: volts + doc: Unit of measurement for waveforms, which is fixed to 'volts'. + - name: timestamps + dtype: float64 + dims: + - num_times + shape: + - null + doc: Timestamps for samples stored in data, in seconds, relative to the + common experiment master-clock stored in NWBFile.timestamps_reference_time. + Timestamps are required for the events. Unlike for TimeSeries, timestamps are + required for SpikeEventSeries and are thus re-specified here. + attributes: + - name: interval + dtype: int32 + value: 1 + doc: Value is '1' + - name: unit + dtype: text + value: seconds + doc: Unit of measurement for timestamps, which is fixed to 'seconds'. + +- neurodata_type_def: FeatureExtraction + neurodata_type_inc: NWBDataInterface + default_name: FeatureExtraction + doc: Features, such as PC1 and PC2, that are extracted from signals stored in a + SpikeEventSeries or other source. + datasets: + - name: description + dtype: text + dims: + - num_features + shape: + - null + doc: Description of features (eg, ''PC1'') for each of the extracted features. + - name: features + dtype: float32 + dims: + - num_events + - num_channels + - num_features + shape: + - null + - null + - null + doc: Multi-dimensional array of features extracted from each event. + - name: times + dtype: float64 + dims: + - num_events + shape: + - null + doc: Times of events that features correspond to (can be a link). + - name: electrodes + neurodata_type_inc: DynamicTableRegion + doc: DynamicTableRegion pointer to the electrodes that this time series was generated from. + +- neurodata_type_def: EventDetection + neurodata_type_inc: NWBDataInterface + default_name: EventDetection + doc: Detected spike events from voltage trace(s). + datasets: + - name: detection_method + dtype: text + doc: Description of how events were detected, such as voltage threshold, or dV/dT + threshold, as well as relevant values. + - name: source_idx + dtype: int32 + dims: + - num_events + shape: + - null + doc: Indices (zero-based) into source ElectricalSeries::data array corresponding + to time of event. ''description'' should define what is meant by time of + event (e.g., .25 ms before action potential peak, zero-crossing time, etc). + The index points to each event from the raw data. + - name: times + dtype: float64 + dims: + - num_events + shape: + - null + doc: Timestamps of events, in seconds. + attributes: + - name: unit + dtype: text + value: seconds + doc: Unit of measurement for event times, which is fixed to 'seconds'. + links: + - name: source_electricalseries + target_type: ElectricalSeries + doc: Link to the ElectricalSeries that this data was calculated from. Metadata + about electrodes and their position can be read from that ElectricalSeries so + it's not necessary to include that information here. + +- neurodata_type_def: EventWaveform + neurodata_type_inc: NWBDataInterface + default_name: EventWaveform + doc: DEPRECATED. Represents either the waveforms of detected events, as extracted from a raw + data trace in /acquisition, or the event waveforms that were stored during experiment + acquisition. + groups: + - neurodata_type_inc: SpikeEventSeries + doc: SpikeEventSeries object(s) containing detected spike event waveforms. + quantity: '*' + +- neurodata_type_def: FilteredEphys + neurodata_type_inc: NWBDataInterface + default_name: FilteredEphys + doc: Electrophysiology data from one or more channels that has been subjected to filtering. + Examples of filtered data include Theta and Gamma (LFP has its own interface). + FilteredEphys modules publish an ElectricalSeries for each filtered channel or + set of channels. The name of each ElectricalSeries is arbitrary but should be + informative. The source of the filtered data, whether this is from analysis of + another time series or as acquired by hardware, should be noted in each's TimeSeries::description + field. There is no assumed 1::1 correspondence between filtered ephys signals + and electrodes, as a single signal can apply to many nearby electrodes, and one + electrode may have different filtered (e.g., theta and/or gamma) signals represented. + Filter properties should be noted in the ElectricalSeries 'filtering' attribute. + groups: + - neurodata_type_inc: ElectricalSeries + doc: ElectricalSeries object(s) containing filtered electrophysiology data. + quantity: '+' + +- neurodata_type_def: LFP + neurodata_type_inc: NWBDataInterface + default_name: LFP + doc: LFP data from one or more channels. The electrode map in each published ElectricalSeries + will identify which channels are providing LFP data. Filter properties should + be noted in the ElectricalSeries 'filtering' attribute. + groups: + - neurodata_type_inc: ElectricalSeries + doc: ElectricalSeries object(s) containing LFP data for one or more channels. + quantity: '+' + +- neurodata_type_def: ElectrodeGroup + neurodata_type_inc: NWBContainer + doc: A physical grouping of electrodes, e.g. a shank of an array. + attributes: + - name: description + dtype: text + doc: Description of this electrode group. + - name: location + dtype: text + doc: Location of electrode group. Specify the area, layer, comments on estimation + of area/layer, etc. Use standard atlas names for anatomical regions when possible. + datasets: + - name: position + dtype: + - name: x + dtype: float32 + doc: x coordinate + - name: y + dtype: float32 + doc: y coordinate + - name: z + dtype: float32 + doc: z coordinate + doc: stereotaxic or common framework coordinates + quantity: '?' + links: + - name: device + target_type: Device + doc: Link to the device that was used to record from this electrode group. + +# The types below have been deprecated +- neurodata_type_def: ClusterWaveforms + neurodata_type_inc: NWBDataInterface + default_name: ClusterWaveforms + doc: DEPRECATED The mean waveform shape, including standard deviation, of the different + clusters. Ideally, the waveform analysis should be performed on data that is only + high-pass filtered. This is a separate module because it is expected to require + updating. For example, IMEC probes may require different storage requirements + to store/display mean waveforms, requiring a new interface or an extension of + this one. + datasets: + - name: waveform_filtering + dtype: text + doc: Filtering applied to data before generating mean/sd + - name: waveform_mean + dtype: float32 + dims: + - num_clusters + - num_samples + shape: + - null + - null + doc: The mean waveform for each cluster, using the same indices for each wave + as cluster numbers in the associated Clustering module (i.e, cluster 3 is in + array slot [3]). Waveforms corresponding to gaps in cluster sequence should + be empty (e.g., zero- filled) + - name: waveform_sd + dtype: float32 + dims: + - num_clusters + - num_samples + shape: + - null + - null + doc: Stdev of waveforms for each cluster, using the same indices as in mean + links: + - name: clustering_interface + target_type: Clustering + doc: Link to Clustering interface that was the source of the clustered data + +- neurodata_type_def: Clustering + neurodata_type_inc: NWBDataInterface + default_name: Clustering + doc: DEPRECATED Clustered spike data, whether from automatic clustering tools (e.g., + klustakwik) or as a result of manual sorting. + datasets: + - name: description + dtype: text + doc: Description of clusters or clustering, (e.g. cluster 0 is noise, clusters + curated using Klusters, etc) + - name: num + dtype: int32 + dims: + - num_events + shape: + - null + doc: Cluster number of each event + - name: peak_over_rms + dtype: float32 + dims: + - num_clusters + shape: + - null + doc: Maximum ratio of waveform peak to RMS on any channel in the cluster (provides + a basic clustering metric). + - name: times + dtype: float64 + dims: + - num_events + shape: + - null + doc: Times of clustered events, in seconds. This may be a link to times field + in associated FeatureExtraction module. diff --git a/nwb-schema/2.8.0/core/nwb.epoch.yaml b/nwb-schema/2.8.0/core/nwb.epoch.yaml new file mode 100644 index 00000000..a4c06a90 --- /dev/null +++ b/nwb-schema/2.8.0/core/nwb.epoch.yaml @@ -0,0 +1,31 @@ +groups: +- neurodata_type_def: TimeIntervals + neurodata_type_inc: DynamicTable + doc: A container for aggregating epoch data and the TimeSeries that each epoch applies + to. + datasets: + - name: start_time + neurodata_type_inc: VectorData + dtype: float32 + doc: Start time of epoch, in seconds. + - name: stop_time + neurodata_type_inc: VectorData + dtype: float32 + doc: Stop time of epoch, in seconds. + - name: tags + neurodata_type_inc: VectorData + dtype: text + doc: User-defined tags that identify or categorize events. + quantity: '?' + - name: tags_index + neurodata_type_inc: VectorIndex + doc: Index for tags. + quantity: '?' + - name: timeseries + neurodata_type_inc: TimeSeriesReferenceVectorData + doc: An index into a TimeSeries object. + quantity: '?' + - name: timeseries_index + neurodata_type_inc: VectorIndex + doc: Index for timeseries. + quantity: '?' diff --git a/nwb-schema/2.8.0/core/nwb.file.yaml b/nwb-schema/2.8.0/core/nwb.file.yaml new file mode 100644 index 00000000..e9d146e7 --- /dev/null +++ b/nwb-schema/2.8.0/core/nwb.file.yaml @@ -0,0 +1,524 @@ +groups: +- neurodata_type_def: NWBFile + neurodata_type_inc: NWBContainer + name: root + doc: An NWB file storing cellular-based neurophysiology data from a single + experimental session. + attributes: + - name: nwb_version + dtype: text + value: "2.8.0" + doc: File version string. Use semantic versioning, e.g. 1.2.1. This will be the + name of the format with trailing major, minor and patch numbers. + datasets: + - name: file_create_date + dtype: isodatetime + dims: + - num_modifications + shape: + - null + doc: 'A record of the date the file was created and of subsequent modifications. + The date is stored in UTC with local timezone offset as ISO 8601 + extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in + UTC end in "Z" with no timezone offset. Date accuracy is up to milliseconds. + The file can be created after the experiment was run, so this may differ from + the experiment start time. Each modification to the nwb file adds a new entry + to the array.' + - name: identifier + dtype: text + doc: A unique text identifier for the file. For example, concatenated lab name, + file creation date/time and experimentalist, or a hash of these and/or other + values. The goal is that the string should be unique to all other files. + - name: session_description + dtype: text + doc: A description of the experimental session and data in the file. + - name: session_start_time + dtype: isodatetime + doc: 'Date and time of the experiment/session start. The date is stored + in UTC with local timezone offset as ISO 8601 extended formatted string: + 2018-09-28T14:43:54.123+02:00. + Dates stored in UTC end in "Z" with no timezone offset. Date accuracy is + up to milliseconds.' + - name: timestamps_reference_time + dtype: isodatetime + doc: 'Date and time corresponding to time zero of all timestamps. The + date is stored in UTC with local timezone offset as ISO 8601 extended formatted + string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with + no timezone offset. Date accuracy is up to milliseconds. All times stored + in the file use this time as reference (i.e., time zero).' + groups: + - name: acquisition + doc: Data streams recorded from the system, including ephys, ophys, tracking, + etc. This group should be read-only after the experiment is completed and + timestamps are corrected to a common timebase. The data stored here may be links + to raw data stored in external NWB files. This will allow keeping bulky raw + data out of the file while preserving the option of keeping some/all in the + file. Acquired data includes tracking and experimental data streams + (i.e., everything measured from the system). If bulky data is stored in the /acquisition + group, the data can exist in a separate NWB file that is linked to by the file + being used for processing and analysis. + groups: + - neurodata_type_inc: NWBDataInterface + doc: Acquired, raw data. + quantity: '*' + - neurodata_type_inc: DynamicTable + doc: Tabular data that is relevant to acquisition + quantity: '*' + - name: analysis + doc: Lab-specific and custom scientific analysis of data. There is no defined + format for the content of this group - the format is up to the individual user/lab. + To facilitate sharing analysis data between labs, the contents here + should be stored in standard types (e.g., neurodata_types) and appropriately documented. + The file can store lab-specific and custom data analysis without + restriction on its form or schema, reducing data formatting restrictions on + end users. Such data should be placed in the analysis group. The analysis data + should be documented so that it could be shared with other labs. + groups: + - neurodata_type_inc: NWBContainer + doc: Custom analysis results. + quantity: '*' + - neurodata_type_inc: DynamicTable + doc: Tabular data that is relevant to data stored in analysis + quantity: '*' + - name: scratch + doc: 'A place to store one-off analysis results. Data placed here is not intended for + sharing. By placing data here, users acknowledge that there is no guarantee that + their data meets any standard.' + quantity: '?' + groups: + - neurodata_type_inc: NWBContainer + doc: Any one-off containers + quantity: '*' + - neurodata_type_inc: DynamicTable + doc: Any one-off tables + quantity: '*' + datasets: + - neurodata_type_inc: ScratchData + doc: Any one-off datasets + quantity: '*' + - name: processing + doc: "The home for ProcessingModules. These modules perform intermediate analysis\ + \ of data that is necessary to perform before scientific analysis. Examples\ + \ include spike clustering, extracting position from tracking data, stitching\ + \ together image slices. ProcessingModules can be large\ + \ and express many data sets from relatively complex analysis (e.g., spike detection\ + \ and clustering) or small, representing extraction of position information\ + \ from tracking video, or even binary lick/no-lick decisions. Common software\ + \ tools (e.g., klustakwik, MClust) are expected to read/write data here. \ + \ 'Processing' refers to intermediate analysis of the acquired data to make\ + \ it more amenable to scientific analysis." + groups: + - neurodata_type_inc: ProcessingModule + doc: Intermediate analysis of acquired data. + quantity: '*' + - name: stimulus + doc: 'Data pushed into the system (eg, video stimulus, sound, voltage, etc) and + secondary representations of that data (eg, measurements of something used as + a stimulus). This group should be made read-only after experiment complete and timestamps + are corrected to common timebase. Stores both presented stimuli and stimulus + templates, the latter in case the same stimulus is presented multiple times, + or is pulled from an external stimulus library. Stimuli are here + defined as any signal that is pushed into the system as part of the experiment + (eg, sound, video, voltage, etc). Many different experiments can use the same + stimuli, and stimuli can be re-used during an experiment. The stimulus group + is organized so that one version of template stimuli can be stored and these + be used multiple times. These templates can exist in the present file or can + be linked to a remote library file.' + groups: + - name: presentation + doc: Stimuli presented during the experiment. + groups: + - neurodata_type_inc: TimeSeries + doc: TimeSeries objects containing data of presented stimuli. + quantity: '*' + - neurodata_type_inc: NWBDataInterface + doc: 'Generic NWB data interfaces, usually from an extension, + containing data of presented stimuli.' + quantity: '*' + - neurodata_type_inc: DynamicTable + doc: DynamicTable objects containing data of presented stimuli. + quantity: '*' + # even though TimeSeries is a child type of NWBDataInterface, we do not remove TimeSeries + # in order to maintain backwards compatibility in the APIs that + # use the neurodata_type_inc from the schema to set the variable name + - name: templates + doc: 'Template stimuli. Timestamps in templates are based on stimulus + design and are relative to the beginning of the stimulus. When templates are + used, the stimulus instances must convert presentation times to the experiment`s + time reference frame.' + groups: + - neurodata_type_inc: TimeSeries + doc: TimeSeries objects containing template data of presented stimuli. + quantity: '*' + - neurodata_type_inc: Images + doc: Images objects containing images of presented stimuli. + quantity: '*' + - name: general + doc: "Experimental metadata, including protocol, notes and description of hardware\ + \ device(s). The metadata stored in this section should be used to\ + \ describe the experiment. Metadata necessary for interpreting the data is stored\ + \ with the data. General experimental metadata, including animal\ + \ strain, experimental protocols, experimenter, devices, etc, are stored under\ + \ 'general'. Core metadata (e.g., that required to interpret data fields) is\ + \ stored with the data itself, and implicitly defined by the file specification\ + \ (e.g., time is in seconds). The strategy used here for storing non-core metadata\ + \ is to use free-form text fields, such as would appear in sentences or paragraphs\ + \ from a Methods section. Metadata fields are text to enable them to be more\ + \ general, for example to represent ranges instead of numerical values. Machine-readable\ + \ metadata is stored as attributes to these free-form datasets. All entries\ + \ in the below table are to be included when data is present. Unused groups\ + \ (e.g., intracellular_ephys in an optophysiology experiment) should not be\ + \ created unless there is data to store within them." + datasets: + - name: data_collection + dtype: text + doc: Notes about data collection and analysis. + quantity: '?' + - name: experiment_description + dtype: text + doc: General description of the experiment. + quantity: '?' + - name: experimenter + dtype: text + doc: Name of person(s) who performed the experiment. Can also specify roles + of different people involved. + quantity: '?' + dims: + - num_experimenters + shape: + - null + - name: institution + dtype: text + doc: Institution(s) where experiment was performed. + quantity: '?' + - name: keywords + dtype: text + dims: + - num_keywords + shape: + - null + doc: Terms to search over. + quantity: '?' + - name: lab + dtype: text + doc: Laboratory where experiment was performed. + quantity: '?' + - name: notes + dtype: text + doc: Notes about the experiment. + quantity: '?' + - name: pharmacology + dtype: text + doc: Description of drugs used, including how and when they were administered. + Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc. + quantity: '?' + - name: protocol + dtype: text + doc: Experimental protocol, if applicable. e.g., include IACUC protocol number. + quantity: '?' + - name: related_publications + dtype: text + doc: Publication information. PMID, DOI, URL, etc. + dims: + - num_publications + shape: + - null + quantity: '?' + - name: session_id + dtype: text + doc: Lab-specific ID for the session. + quantity: '?' + - name: slices + dtype: text + doc: Description of slices, including information about preparation thickness, + orientation, temperature, and bath solution. + quantity: '?' + - name: source_script + dtype: text + doc: Script file or link to public source code used to create this NWB file. + quantity: '?' + attributes: + - name: file_name + dtype: text + doc: Name of script file. + - name: was_generated_by + dtype: text + doc: Name and version of software package(s) used to generate data contained in + this NWB File. For each software package or library, include the name of the + software as the first value and the version as the second value. + dims: + - num_sources + - name, version + shape: + - null + - 2 + quantity: '?' + - name: stimulus + dtype: text + doc: Notes about stimuli, such as how and where they were presented. + quantity: '?' + - name: surgery + dtype: text + doc: Narrative description about surgery/surgeries, including date(s) and who + performed surgery. + quantity: '?' + - name: virus + dtype: text + doc: Information about virus(es) used in experiments, including virus ID, source, + date made, injection location, volume, etc. + quantity: '?' + groups: + - neurodata_type_inc: LabMetaData + doc: Place-holder than can be extended so that lab-specific meta-data can be + placed in /general. + quantity: '*' + - name: devices + doc: Description of hardware devices used during experiment, e.g., monitors, + ADC boards, microscopes, etc. + quantity: '?' + groups: + - neurodata_type_inc: Device + doc: Data acquisition devices. + quantity: '*' + - name: subject + neurodata_type_inc: Subject + doc: Information about the animal or person from which the data was measured. + quantity: '?' + - name: extracellular_ephys + doc: Metadata related to extracellular electrophysiology. + quantity: '?' + groups: + - neurodata_type_inc: ElectrodeGroup + doc: Physical group of electrodes. + quantity: '*' + - name: electrodes + neurodata_type_inc: DynamicTable + doc: A table of all electrodes (i.e. channels) used for recording. + quantity: '?' + datasets: + - name: x + neurodata_type_inc: VectorData + dtype: float32 + doc: x coordinate of the channel location in the brain (+x is posterior). + quantity: '?' + - name: y + neurodata_type_inc: VectorData + dtype: float32 + doc: y coordinate of the channel location in the brain (+y is inferior). + quantity: '?' + - name: z + neurodata_type_inc: VectorData + dtype: float32 + doc: z coordinate of the channel location in the brain (+z is right). + quantity: '?' + - name: imp + neurodata_type_inc: VectorData + dtype: float32 + doc: Impedance of the channel, in ohms. + quantity: '?' + - name: location + neurodata_type_inc: VectorData + dtype: text + doc: Location of the electrode (channel). Specify the area, layer, comments + on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use + standard atlas names for anatomical regions when possible. + - name: filtering + neurodata_type_inc: VectorData + dtype: text + doc: Description of hardware filtering, including the filter name and frequency cutoffs. + quantity: '?' + - name: group + neurodata_type_inc: VectorData + dtype: + target_type: ElectrodeGroup + reftype: object + doc: Reference to the ElectrodeGroup this electrode is a part of. + - name: group_name + neurodata_type_inc: VectorData + dtype: text + doc: Name of the ElectrodeGroup this electrode is a part of. + - name: rel_x + neurodata_type_inc: VectorData + dtype: float32 + doc: x coordinate in electrode group + quantity: '?' + - name: rel_y + neurodata_type_inc: VectorData + dtype: float32 + doc: y coordinate in electrode group + quantity: '?' + - name: rel_z + neurodata_type_inc: VectorData + dtype: float32 + doc: z coordinate in electrode group + quantity: '?' + - name: reference + neurodata_type_inc: VectorData + dtype: text + doc: Description of the reference electrode and/or reference scheme used for this electrode, e.g., + "stainless steel skull screw" or "online common average referencing". + quantity: '?' + - name: intracellular_ephys + doc: Metadata related to intracellular electrophysiology. + quantity: '?' + datasets: + - name: filtering + dtype: text + doc: '[DEPRECATED] Use IntracellularElectrode.filtering instead. Description + of filtering used. Includes filtering type and parameters, frequency fall-off, + etc. If this changes between TimeSeries, filter description should be stored + as a text attribute for each TimeSeries.' + quantity: '?' + groups: + - neurodata_type_inc: IntracellularElectrode + doc: An intracellular electrode. + quantity: '*' + - name: sweep_table + neurodata_type_inc: SweepTable + doc: '[DEPRECATED] Table used to group different PatchClampSeries. SweepTable + is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable + tables. Additional SequentialRecordingsTable, RepetitionsTable and + ExperimentalConditions tables provide enhanced support for experiment metadata.' + quantity: '?' + - name: intracellular_recordings + neurodata_type_inc: IntracellularRecordingsTable + doc: A table to group together a stimulus and response from a single electrode + and a single simultaneous recording. Each row in the table represents a + single recording consisting typically of a stimulus and a corresponding + response. In some cases, however, only a stimulus or a response are recorded + as as part of an experiment. In this case both, the stimulus and response + will point to the same TimeSeries while the idx_start and count of the invalid + column will be set to -1, thus, indicating that no values have been recorded + for the stimulus or response, respectively. Note, a recording MUST contain + at least a stimulus or a response. Typically the stimulus and response are + PatchClampSeries. However, the use of AD/DA channels that are not associated + to an electrode is also common in intracellular electrophysiology, in which + case other TimeSeries may be used. + quantity: '?' + - name: simultaneous_recordings + neurodata_type_inc: SimultaneousRecordingsTable + doc: A table for grouping different intracellular recordings from the IntracellularRecordingsTable + table together that were recorded simultaneously from different electrodes + quantity: '?' + - name: sequential_recordings + neurodata_type_inc: SequentialRecordingsTable + doc: A table for grouping different sequential recordings from the SimultaneousRecordingsTable + table together. This is typically used to group together sequential recordings + where the a sequence of stimuli of the same type with varying parameters + have been presented in a sequence. + quantity: '?' + - name: repetitions + neurodata_type_inc: RepetitionsTable + doc: A table for grouping different sequential intracellular recordings together. + With each SequentialRecording typically representing a particular type of + stimulus, the RepetitionsTable table is typically used to group sets of + stimuli applied in sequence. + quantity: '?' + - name: experimental_conditions + neurodata_type_inc: ExperimentalConditionsTable + doc: A table for grouping different intracellular recording repetitions together + that belong to the same experimental experimental_conditions. + quantity: '?' + - name: optogenetics + doc: Metadata describing optogenetic stimuluation. + quantity: '?' + groups: + - neurodata_type_inc: OptogeneticStimulusSite + doc: An optogenetic stimulation site. + quantity: '*' + - name: optophysiology + doc: Metadata related to optophysiology. + quantity: '?' + groups: + - neurodata_type_inc: ImagingPlane + doc: An imaging plane. + quantity: '*' + - name: intervals + doc: Experimental intervals, whether that be logically distinct sub-experiments + having a particular scientific goal, trials (see trials subgroup) during an + experiment, or epochs (see epochs subgroup) deriving from analysis of data. + quantity: '?' + groups: + - name: epochs + neurodata_type_inc: TimeIntervals + doc: Divisions in time marking experimental stages or sub-divisions of a single + recording session. + quantity: '?' + - name: trials + neurodata_type_inc: TimeIntervals + doc: Repeated experimental events that have a logical grouping. + quantity: '?' + - name: invalid_times + neurodata_type_inc: TimeIntervals + doc: Time intervals that should be removed from analysis. + quantity: '?' + - neurodata_type_inc: TimeIntervals + doc: Optional additional table(s) for describing other experimental time intervals. + quantity: '*' + - name: units + neurodata_type_inc: Units + doc: Data about sorted spike units. + quantity: '?' + +- neurodata_type_def: LabMetaData + neurodata_type_inc: NWBContainer + doc: Lab-specific meta-data. + +- neurodata_type_def: Subject + neurodata_type_inc: NWBContainer + doc: Information about the animal or person from which the data was measured. + datasets: + - name: age + dtype: text + doc: Age of subject. Can be supplied instead of 'date_of_birth'. + quantity: '?' + attributes: + - name: reference + doc: "Age is with reference to this event. Can be 'birth' or + 'gestational'. If reference is omitted, 'birth' is implied." + dtype: text + required: false + default_value: birth + - name: date_of_birth + dtype: isodatetime + doc: Date of birth of subject. Can be supplied instead of 'age'. + quantity: '?' + - name: description + dtype: text + doc: Description of subject and where subject came from (e.g., breeder, if + animal). + quantity: '?' + - name: genotype + dtype: text + doc: Genetic strain. If absent, assume Wild Type (WT). + quantity: '?' + - name: sex + dtype: text + doc: Gender of subject. + quantity: '?' + - name: species + dtype: text + doc: Species of subject. + quantity: '?' + - name: strain + dtype: text + doc: Strain of subject. + quantity: '?' + - name: subject_id + dtype: text + doc: ID of animal/person used/participating in experiment (lab convention). + quantity: '?' + - name: weight + dtype: text + doc: Weight at time of experiment, at time of surgery and at other important + times. + quantity: '?' + +datasets: +- neurodata_type_def: ScratchData + neurodata_type_inc: NWBData + doc: Any one-off datasets + attributes: + - name: notes + doc: 'Any notes the user has about the dataset being stored' + dtype: text diff --git a/nwb-schema/2.8.0/core/nwb.icephys.yaml b/nwb-schema/2.8.0/core/nwb.icephys.yaml new file mode 100644 index 00000000..62c1e46b --- /dev/null +++ b/nwb-schema/2.8.0/core/nwb.icephys.yaml @@ -0,0 +1,430 @@ +groups: +- neurodata_type_def: PatchClampSeries + neurodata_type_inc: TimeSeries + doc: An abstract base class for patch-clamp data - stimulus or response, + current or voltage. + attributes: + - name: stimulus_description + dtype: text + doc: Protocol/stimulus name for this patch-clamp dataset. + - name: sweep_number + dtype: uint32 + doc: Sweep number, allows to group different PatchClampSeries together. + required: false + datasets: + - name: data + dtype: numeric + dims: + - num_times + shape: + - null + doc: Recorded voltage or current. + attributes: + - name: unit + dtype: text + doc: Base unit of measurement for working with the data. Actual stored values are + not necessarily stored in these units. To access the data in these units, + multiply 'data' by 'conversion' and add 'offset'. + - name: gain + dtype: float32 + doc: Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). + quantity: '?' + links: + - name: electrode + target_type: IntracellularElectrode + doc: Link to IntracellularElectrode object that describes the electrode that was + used to apply or record this data. + +- neurodata_type_def: CurrentClampSeries + neurodata_type_inc: PatchClampSeries + doc: Voltage data from an intracellular current-clamp recording. A + corresponding CurrentClampStimulusSeries (stored separately as a stimulus) is + used to store the current injected. + datasets: + - name: data + doc: Recorded voltage. + attributes: + - name: unit + dtype: text + value: volts + doc: Base unit of measurement for working with the data. which is fixed to 'volts'. + Actual stored values are not necessarily stored in these units. To access the data in these units, + multiply 'data' by 'conversion' and add 'offset'. + - name: bias_current + dtype: float32 + doc: Bias current, in amps. + quantity: '?' + - name: bridge_balance + dtype: float32 + doc: Bridge balance, in ohms. + quantity: '?' + - name: capacitance_compensation + dtype: float32 + doc: Capacitance compensation, in farads. + quantity: '?' + +- neurodata_type_def: IZeroClampSeries + neurodata_type_inc: CurrentClampSeries + doc: Voltage data from an intracellular recording when all current + and amplifier settings are off (i.e., CurrentClampSeries fields will be zero). + There is no CurrentClampStimulusSeries associated with an IZero series because + the amplifier is disconnected and no stimulus can reach the cell. + attributes: + - name: stimulus_description + dtype: text + doc: An IZeroClampSeries has no stimulus, so this attribute is automatically set to "N/A" + value: N/A + datasets: + - name: bias_current + dtype: float32 + value: 0.0 + doc: Bias current, in amps, fixed to 0.0. + - name: bridge_balance + dtype: float32 + value: 0.0 + doc: Bridge balance, in ohms, fixed to 0.0. + - name: capacitance_compensation + dtype: float32 + value: 0.0 + doc: Capacitance compensation, in farads, fixed to 0.0. + +- neurodata_type_def: CurrentClampStimulusSeries + neurodata_type_inc: PatchClampSeries + doc: Stimulus current applied during current clamp recording. + datasets: + - name: data + doc: Stimulus current applied. + attributes: + - name: unit + dtype: text + value: amperes + doc: Base unit of measurement for working with the data. which is fixed to 'amperes'. + Actual stored values are not necessarily stored in these units. To access the data in these units, + multiply 'data' by 'conversion' and add 'offset'. + +- neurodata_type_def: VoltageClampSeries + neurodata_type_inc: PatchClampSeries + doc: Current data from an intracellular voltage-clamp recording. A + corresponding VoltageClampStimulusSeries (stored separately as a stimulus) is + used to store the voltage injected. + datasets: + - name: data + doc: Recorded current. + attributes: + - name: unit + dtype: text + value: amperes + doc: Base unit of measurement for working with the data. which is fixed to 'amperes'. + Actual stored values are not necessarily stored in these units. To access the data in these units, + multiply 'data' by 'conversion' and add 'offset'. + - name: capacitance_fast + dtype: float32 + doc: Fast capacitance, in farads. + quantity: '?' + attributes: + - name: unit + dtype: text + value: farads + doc: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + - name: capacitance_slow + dtype: float32 + doc: Slow capacitance, in farads. + quantity: '?' + attributes: + - name: unit + dtype: text + value: farads + doc: Unit of measurement for capacitance_fast, which is fixed to 'farads'. + - name: resistance_comp_bandwidth + dtype: float32 + doc: Resistance compensation bandwidth, in hertz. + quantity: '?' + attributes: + - name: unit + dtype: text + value: hertz + doc: Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'. + - name: resistance_comp_correction + dtype: float32 + doc: Resistance compensation correction, in percent. + quantity: '?' + attributes: + - name: unit + dtype: text + value: percent + doc: Unit of measurement for resistance_comp_correction, which is fixed to 'percent'. + - name: resistance_comp_prediction + dtype: float32 + doc: Resistance compensation prediction, in percent. + quantity: '?' + attributes: + - name: unit + dtype: text + value: percent + doc: Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'. + - name: whole_cell_capacitance_comp + dtype: float32 + doc: Whole cell capacitance compensation, in farads. + quantity: '?' + attributes: + - name: unit + dtype: text + value: farads + doc: Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'. + - name: whole_cell_series_resistance_comp + dtype: float32 + doc: Whole cell series resistance compensation, in ohms. + quantity: '?' + attributes: + - name: unit + dtype: text + value: ohms + doc: Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'. + +- neurodata_type_def: VoltageClampStimulusSeries + neurodata_type_inc: PatchClampSeries + doc: Stimulus voltage applied during a voltage clamp recording. + datasets: + - name: data + doc: Stimulus voltage applied. + attributes: + - name: unit + dtype: text + value: volts + doc: Base unit of measurement for working with the data. which is fixed to 'volts'. + Actual stored values are not necessarily stored in these units. To access the data in these units, + multiply 'data' by 'conversion' and add 'offset'. + +- neurodata_type_def: IntracellularElectrode + neurodata_type_inc: NWBContainer + doc: An intracellular electrode and its metadata. + datasets: + - name: cell_id + dtype: text + doc: unique ID of the cell + quantity: '?' + - name: description + dtype: text + doc: Description of electrode (e.g., whole-cell, sharp, etc.). + - name: filtering + dtype: text + doc: Electrode specific filtering. + quantity: '?' + - name: initial_access_resistance + dtype: text + doc: Initial access resistance. + quantity: '?' + - name: location + dtype: text + doc: Location of the electrode. Specify the area, layer, comments on estimation + of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas + names for anatomical regions when possible. + quantity: '?' + - name: resistance + dtype: text + doc: Electrode resistance, in ohms. + quantity: '?' + - name: seal + dtype: text + doc: Information about seal used for recording. + quantity: '?' + - name: slice + dtype: text + doc: Information about slice used for recording. + quantity: '?' + links: + - name: device + target_type: Device + doc: Device that was used to record from this electrode. + +- neurodata_type_def: SweepTable + neurodata_type_inc: DynamicTable + doc: '[DEPRECATED] Table used to group different PatchClampSeries. SweepTable + is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable + tables. Additional SequentialRecordingsTable, RepetitionsTable, and + ExperimentalConditions tables provide enhanced support for experiment metadata.' + datasets: + - name: sweep_number + neurodata_type_inc: VectorData + dtype: uint32 + doc: Sweep number of the PatchClampSeries in that row. + - name: series + neurodata_type_inc: VectorData + dtype: + target_type: PatchClampSeries + reftype: object + doc: The PatchClampSeries with the sweep number in that row. + - name: series_index + neurodata_type_inc: VectorIndex + doc: Index for series. + +- neurodata_type_def: IntracellularElectrodesTable + neurodata_type_inc: DynamicTable + doc: Table for storing intracellular electrode related metadata. + attributes: + - name: description + dtype: text + value: Table for storing intracellular electrode related metadata. + doc: Description of what is in this dynamic table. + datasets: + - name: electrode + neurodata_type_inc: VectorData + dtype: + target_type: IntracellularElectrode + reftype: object + doc: Column for storing the reference to the intracellular electrode. + +- neurodata_type_def: IntracellularStimuliTable + neurodata_type_inc: DynamicTable + doc: Table for storing intracellular stimulus related metadata. + attributes: + - name: description + dtype: text + value: Table for storing intracellular stimulus related metadata. + doc: Description of what is in this dynamic table. + datasets: + - name: stimulus + neurodata_type_inc: TimeSeriesReferenceVectorData + doc: Column storing the reference to the recorded stimulus for the recording (rows). + - name: stimulus_template + neurodata_type_inc: TimeSeriesReferenceVectorData + doc: Column storing the reference to the stimulus template for the recording (rows). + quantity: '?' + +- neurodata_type_def: IntracellularResponsesTable + neurodata_type_inc: DynamicTable + doc: Table for storing intracellular response related metadata. + attributes: + - name: description + dtype: text + value: Table for storing intracellular response related metadata. + doc: Description of what is in this dynamic table. + datasets: + - name: response + neurodata_type_inc: TimeSeriesReferenceVectorData + doc: Column storing the reference to the recorded response for the recording (rows) + +- neurodata_type_def: IntracellularRecordingsTable + neurodata_type_inc: AlignedDynamicTable + name: intracellular_recordings + doc: A table to group together a stimulus and response from a single electrode and + a single simultaneous recording. Each row in the table represents a single recording + consisting typically of a stimulus and a corresponding response. In some cases, + however, only a stimulus or a response is recorded as part of an experiment. + In this case, both the stimulus and response will point to the same TimeSeries + while the idx_start and count of the invalid column will be set to -1, thus, indicating + that no values have been recorded for the stimulus or response, respectively. + Note, a recording MUST contain at least a stimulus or a response. Typically the + stimulus and response are PatchClampSeries. However, the use of AD/DA channels + that are not associated to an electrode is also common in intracellular electrophysiology, + in which case other TimeSeries may be used. + attributes: + - name: description + dtype: text + value: A table to group together a stimulus and response from a single electrode + and a single simultaneous recording and for storing metadata about the intracellular + recording. + doc: Description of the contents of this table. Inherited from AlignedDynamicTable + and overwritten here to fix the value of the attribute. + groups: + - name: electrodes + neurodata_type_inc: IntracellularElectrodesTable + doc: Table for storing intracellular electrode related metadata. + - name: stimuli + neurodata_type_inc: IntracellularStimuliTable + doc: Table for storing intracellular stimulus related metadata. + - name: responses + neurodata_type_inc: IntracellularResponsesTable + doc: Table for storing intracellular response related metadata. + +- neurodata_type_def: SimultaneousRecordingsTable + neurodata_type_inc: DynamicTable + name: simultaneous_recordings + doc: A table for grouping different intracellular recordings from the IntracellularRecordingsTable + table together that were recorded simultaneously from different electrodes. + datasets: + - name: recordings + neurodata_type_inc: DynamicTableRegion + doc: A reference to one or more rows in the IntracellularRecordingsTable table. + attributes: + - name: table + dtype: + target_type: IntracellularRecordingsTable + reftype: object + doc: Reference to the IntracellularRecordingsTable table that this table region + applies to. This specializes the attribute inherited from DynamicTableRegion + to fix the type of table that can be referenced here. + - name: recordings_index + neurodata_type_inc: VectorIndex + doc: Index dataset for the recordings column. + +- neurodata_type_def: SequentialRecordingsTable + neurodata_type_inc: DynamicTable + name: sequential_recordings + doc: A table for grouping different sequential recordings from the SimultaneousRecordingsTable + table together. This is typically used to group together sequential recordings + where a sequence of stimuli of the same type with varying parameters have + been presented in a sequence. + datasets: + - name: simultaneous_recordings + neurodata_type_inc: DynamicTableRegion + doc: A reference to one or more rows in the SimultaneousRecordingsTable table. + attributes: + - name: table + dtype: + target_type: SimultaneousRecordingsTable + reftype: object + doc: Reference to the SimultaneousRecordingsTable table that this table region + applies to. This specializes the attribute inherited from DynamicTableRegion + to fix the type of table that can be referenced here. + - name: simultaneous_recordings_index + neurodata_type_inc: VectorIndex + doc: Index dataset for the simultaneous_recordings column. + - name: stimulus_type + neurodata_type_inc: VectorData + dtype: text + doc: The type of stimulus used for the sequential recording. + +- neurodata_type_def: RepetitionsTable + neurodata_type_inc: DynamicTable + name: repetitions + doc: A table for grouping different sequential intracellular recordings together. + With each SequentialRecording typically representing a particular type of stimulus, + the RepetitionsTable table is typically used to group sets of stimuli applied + in sequence. + datasets: + - name: sequential_recordings + neurodata_type_inc: DynamicTableRegion + doc: A reference to one or more rows in the SequentialRecordingsTable table. + attributes: + - name: table + dtype: + target_type: SequentialRecordingsTable + reftype: object + doc: Reference to the SequentialRecordingsTable table that this table region + applies to. This specializes the attribute inherited from DynamicTableRegion + to fix the type of table that can be referenced here. + - name: sequential_recordings_index + neurodata_type_inc: VectorIndex + doc: Index dataset for the sequential_recordings column. + +- neurodata_type_def: ExperimentalConditionsTable + neurodata_type_inc: DynamicTable + name: experimental_conditions + doc: A table for grouping different intracellular recording repetitions together + that belong to the same experimental condition. + datasets: + - name: repetitions + neurodata_type_inc: DynamicTableRegion + doc: A reference to one or more rows in the RepetitionsTable table. + attributes: + - name: table + dtype: + target_type: RepetitionsTable + reftype: object + doc: Reference to the RepetitionsTable table that this table region applies + to. This specializes the attribute inherited from DynamicTableRegion to fix + the type of table that can be referenced here. + - name: repetitions_index + neurodata_type_inc: VectorIndex + doc: Index dataset for the repetitions column. diff --git a/nwb-schema/2.8.0/core/nwb.image.yaml b/nwb-schema/2.8.0/core/nwb.image.yaml new file mode 100644 index 00000000..3f782a63 --- /dev/null +++ b/nwb-schema/2.8.0/core/nwb.image.yaml @@ -0,0 +1,222 @@ +datasets: +- neurodata_type_def: GrayscaleImage + neurodata_type_inc: Image + dims: + - x + - y + shape: + - null + - null + doc: A grayscale image. + dtype: numeric + +- neurodata_type_def: RGBImage + neurodata_type_inc: Image + dims: + - x + - y + - r, g, b + shape: + - null + - null + - 3 + doc: A color image. + dtype: numeric + +- neurodata_type_def: RGBAImage + neurodata_type_inc: Image + dims: + - x + - y + - r, g, b, a + shape: + - null + - null + - 4 + doc: A color image with transparency. + dtype: numeric + +groups: +- neurodata_type_def: ImageSeries + neurodata_type_inc: TimeSeries + doc: General image data that is common between acquisition and stimulus time series. + Sometimes the image data is stored in the file in a raw format while other + times it will be stored as a series of external image files in the host file system. + The data field will either be binary data, if the data is stored in the NWB file, or + empty, if the data is stored in an external image stack. [frame][x][y] or [frame][x][y][z]. + datasets: + - name: data + dtype: numeric + dims: + - - frame + - x + - y + - - frame + - x + - y + - z + shape: + - - null + - null + - null + - - null + - null + - null + - null + doc: Binary data representing images across frames. If data are stored in an external + file, this should be an empty 3D array. + - name: dimension + dtype: int32 + dims: + - rank + shape: + - null + doc: Number of pixels on x, y, (and z) axes. + quantity: '?' + - name: external_file + dtype: text + dims: + - num_files + shape: + - null + doc: Paths to one or more external file(s). The field is only present if format='external'. + This is only relevant if the image series is stored in the file system as one + or more image file(s). This field should NOT be used if the image is stored + in another NWB file and that file is linked to this file. + quantity: '?' + attributes: + - name: starting_frame + dtype: int32 + dims: + - num_files + shape: + - null + doc: Each external image may contain one or more consecutive frames of the full + ImageSeries. This attribute serves as an index to indicate which frames each file + contains, to facilitate random access. The 'starting_frame' attribute, hence, + contains a list of frame numbers within the full ImageSeries of the first frame + of each file listed in the parent 'external_file' dataset. Zero-based indexing is + used (hence, the first element will always be zero). For example, if the + 'external_file' dataset has three paths to files and the first file has 5 frames, + the second file has 10 frames, and the third file has 20 frames, then this + attribute will have values [0, 5, 15]. If there is a single external file that + holds all of the frames of the ImageSeries (and so there is a single element in + the 'external_file' dataset), then this attribute should have value [0]. + - name: format + dtype: text + default_value: raw + doc: Format of image. If this is 'external', then the attribute 'external_file' + contains the path information to the image files. If this is 'raw', then the raw + (single-channel) binary data is stored in the 'data' dataset. If this attribute + is not present, then the default format='raw' case is assumed. + quantity: '?' + links: + - name: device + target_type: Device + doc: Link to the Device object that was used to capture these images. + quantity: '?' + +- neurodata_type_def: ImageMaskSeries + neurodata_type_inc: ImageSeries + doc: DEPRECATED. An alpha mask that is applied to a presented visual stimulus. The 'data' array + contains an array of mask values that are applied to the displayed image. Mask + values are stored as RGBA. Mask can vary with time. The timestamps array indicates + the starting time of a mask, and that mask pattern continues until it's explicitly + changed. + links: + - name: masked_imageseries + target_type: ImageSeries + doc: Link to ImageSeries object that this image mask is applied to. + +- neurodata_type_def: OpticalSeries + neurodata_type_inc: ImageSeries + doc: Image data that is presented or recorded. A stimulus template movie will be + stored only as an image. When the image is presented as stimulus, additional data + is required, such as field of view (e.g., how much of the visual field the image + covers, or how what is the area of the target being imaged). If the OpticalSeries + represents acquired imaging data, orientation is also important. + datasets: + - name: distance + dtype: float32 + doc: Distance from camera/monitor to target/eye. + quantity: '?' + - name: field_of_view + dtype: float32 + dims: + - - width, height + - - width, height, depth + shape: + - - 2 + - - 3 + doc: Width, height and depth of image, or imaged area, in meters. + quantity: '?' + - name: data + dtype: numeric + dims: + - - frame + - x + - y + - - frame + - x + - y + - r, g, b + shape: + - - null + - null + - null + - - null + - null + - null + - 3 + doc: Images presented to subject, either grayscale or RGB + - name: orientation + dtype: text + doc: Description of image relative to some reference frame (e.g., which way is + up). Must also specify frame of reference. + quantity: '?' + +- neurodata_type_def: IndexSeries + neurodata_type_inc: TimeSeries + doc: Stores indices to image frames stored in an ImageSeries. The purpose of the + IndexSeries is to allow a static image stack to be stored in an Images + object, and the images in the stack to be referenced out-of-order. This can be for + the display of individual images, or of movie segments (as a movie is simply a + series of images). The data field stores the index of the frame in the referenced + Images object, and the timestamps array indicates when that image + was displayed. + datasets: + - name: data + dtype: uint32 + dims: + - num_times + shape: + - null + doc: Index of the image (using zero-indexing) in the linked Images object. + attributes: + - name: conversion + dtype: float32 + doc: This field is unused by IndexSeries. + required: false + - name: resolution + dtype: float32 + doc: This field is unused by IndexSeries. + required: false + - name: offset + dtype: float32 + doc: This field is unused by IndexSeries. + required: false + - name: unit + dtype: text + value: N/A + doc: This field is unused by IndexSeries and has the value N/A. + links: + - name: indexed_timeseries + target_type: ImageSeries + doc: Link to ImageSeries object containing images that are indexed. Use of this link + is discouraged and will be deprecated. Link to an Images type instead. + quantity: '?' + - name: indexed_images + target_type: Images + doc: Link to Images object containing an ordered set of images that are indexed. The Images object + must contain a 'ordered_images' dataset specifying the order of the images in the Images type. + quantity: '?' diff --git a/nwb-schema/2.8.0/core/nwb.misc.yaml b/nwb-schema/2.8.0/core/nwb.misc.yaml new file mode 100644 index 00000000..2e4d3f78 --- /dev/null +++ b/nwb-schema/2.8.0/core/nwb.misc.yaml @@ -0,0 +1,331 @@ +groups: +- neurodata_type_def: AbstractFeatureSeries + neurodata_type_inc: TimeSeries + doc: Abstract features, such as quantitative descriptions of sensory stimuli. The + TimeSeries::data field is a 2D array, storing those features (e.g., for visual + grating stimulus this might be orientation, spatial frequency and contrast). Null + stimuli (eg, uniform gray) can be marked as being an independent feature (eg, + 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, + or through use of the TimeSeries::control fields. A set of features is considered + to persist until the next set of features is defined. The final set of features + stored should be the null set. This is useful when storing the raw stimulus + is impractical. + datasets: + - name: data + dtype: numeric + dims: + - - num_times + - - num_times + - num_features + shape: + - - null + - - null + - null + doc: Values of each feature at each time. + attributes: + - name: unit + dtype: text + default_value: see 'feature_units' + doc: Since there can be different units for different features, store the units + in 'feature_units'. The default value for this attribute is "see 'feature_units'". + required: false + - name: feature_units + dtype: text + dims: + - num_features + shape: + - null + doc: Units of each feature. + quantity: '?' + - name: features + dtype: text + dims: + - num_features + shape: + - null + doc: Description of the features represented in TimeSeries::data. + +- neurodata_type_def: AnnotationSeries + neurodata_type_inc: TimeSeries + doc: Stores user annotations made during an experiment. The data[] + field stores a text array, and timestamps are stored for each annotation (ie, + interval=1). This is largely an alias to a standard TimeSeries storing a text + array but that is identifiable as storing annotations in a machine-readable way. + datasets: + - name: data + dtype: text + dims: + - num_times + shape: + - null + doc: Annotations made during an experiment. + attributes: + - name: resolution + dtype: float32 + value: -1.0 + doc: Smallest meaningful difference between values in data. Annotations have + no units, so the value is fixed to -1.0. + - name: unit + dtype: text + value: n/a + doc: Base unit of measurement for working with the data. Annotations have + no units, so the value is fixed to 'n/a'. + +- neurodata_type_def: IntervalSeries + neurodata_type_inc: TimeSeries + doc: Stores intervals of data. The timestamps field stores the beginning and end + of intervals. The data field stores whether the interval just started (>0 value) + or ended (<0 value). Different interval types can be represented in the same series + by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature + C, etc). The field data stores an 8-bit integer. This is largely an alias of a + standard TimeSeries but that is identifiable as representing time intervals in + a machine-readable way. + datasets: + - name: data + dtype: int8 + dims: + - num_times + shape: + - null + doc: Use values >0 if interval started, <0 if interval ended. + attributes: + - name: resolution + dtype: float32 + value: -1.0 + doc: Smallest meaningful difference between values in data. Annotations have + no units, so the value is fixed to -1.0. + - name: unit + dtype: text + value: n/a + doc: Base unit of measurement for working with the data. Annotations have + no units, so the value is fixed to 'n/a'. + +- neurodata_type_def: DecompositionSeries + neurodata_type_inc: TimeSeries + doc: Spectral analysis of a time series, e.g. of an LFP or a speech signal. + datasets: + - name: data + dtype: numeric + dims: + - num_times + - num_channels + - num_bands + shape: + - null + - null + - null + doc: Data decomposed into frequency bands. + attributes: + - name: unit + dtype: text + default_value: no unit + doc: Base unit of measurement for working with the data. Actual stored values are + not necessarily stored in these units. To access the data in these units, + multiply 'data' by 'conversion'. + - name: metric + dtype: text + doc: The metric used, e.g. phase, amplitude, power. + - name: source_channels + neurodata_type_inc: DynamicTableRegion + doc: DynamicTableRegion pointer to the channels that this decomposition series was generated from. + quantity: '?' + groups: + - name: bands + neurodata_type_inc: DynamicTable + doc: Table for describing the bands that this series was generated from. There + should be one row in this table for each band. + datasets: + - name: band_name + neurodata_type_inc: VectorData + dtype: text + doc: Name of the band, e.g. theta. + - name: band_limits + neurodata_type_inc: VectorData + dtype: float32 + dims: + - num_bands + - low, high + shape: + - null + - 2 + doc: Low and high limit of each band in Hz. If it is a Gaussian filter, use + 2 SD on either side of the center. + - name: band_mean + neurodata_type_inc: VectorData + dtype: float32 + dims: + - num_bands + shape: + - null + doc: The mean Gaussian filters, in Hz. + quantity: '?' + - name: band_stdev + neurodata_type_inc: VectorData + dtype: float32 + dims: + - num_bands + shape: + - null + doc: The standard deviation of Gaussian filters, in Hz. + quantity: '?' + links: + - name: source_timeseries + target_type: TimeSeries + doc: Link to TimeSeries object that this data was calculated from. Metadata about + electrodes and their position can be read from that ElectricalSeries so it is + not necessary to store that information here. + quantity: '?' + +- neurodata_type_def: Units + neurodata_type_inc: DynamicTable + default_name: Units + doc: Data about spiking units. Event times of observed units (e.g. cell, synapse, + etc.) should be concatenated and stored in spike_times. + datasets: + - name: spike_times_index + neurodata_type_inc: VectorIndex + doc: Index into the spike_times dataset. + quantity: '?' + - name: spike_times + neurodata_type_inc: VectorData + dtype: float64 + doc: Spike times for each unit in seconds. + quantity: '?' + attributes: + - name: resolution + dtype: float64 + doc: The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate + from which spike times were extracted, but could be larger if the acquisition time series was downsampled or + smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be + between samples. + required: false + - name: obs_intervals_index + neurodata_type_inc: VectorIndex + doc: Index into the obs_intervals dataset. + quantity: '?' + - name: obs_intervals + neurodata_type_inc: VectorData + dtype: float64 + dims: + - num_intervals + - start|end + shape: + - null + - 2 + doc: Observation intervals for each unit. + quantity: '?' + - name: electrodes_index + neurodata_type_inc: VectorIndex + doc: Index into electrodes. + quantity: '?' + - name: electrodes + neurodata_type_inc: DynamicTableRegion + doc: Electrode that each spike unit came from, specified using a DynamicTableRegion. + quantity: '?' + - name: electrode_group + neurodata_type_inc: VectorData + dtype: + target_type: ElectrodeGroup + reftype: object + doc: Electrode group that each spike unit came from. + quantity: '?' + - name: waveform_mean + neurodata_type_inc: VectorData + dtype: float32 + dims: + - - num_units + - num_samples + - - num_units + - num_samples + - num_electrodes + shape: + - - null + - null + - - null + - null + - null + doc: Spike waveform mean for each spike unit. + quantity: '?' + attributes: + - name: sampling_rate + dtype: float32 + doc: Sampling rate, in hertz. + required: false + - name: unit + dtype: text + value: volts + doc: Unit of measurement. This value is fixed to 'volts'. + required: false + - name: waveform_sd + neurodata_type_inc: VectorData + dtype: float32 + dims: + - - num_units + - num_samples + - - num_units + - num_samples + - num_electrodes + shape: + - - null + - null + - - null + - null + - null + doc: Spike waveform standard deviation for each spike unit. + quantity: '?' + attributes: + - name: sampling_rate + dtype: float32 + doc: Sampling rate, in hertz. + required: false + - name: unit + dtype: text + value: volts + doc: Unit of measurement. This value is fixed to 'volts'. + required: false + - name: waveforms + neurodata_type_inc: VectorData + dtype: numeric + dims: + - num_waveforms + - num_samples + shape: + - null + - null + doc: "Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' + column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform + was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column + to indicate which spike events belong to a given unit. For example, if the + 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column + correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond + to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to + the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then + the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different + electrodes for the first spike time of the first unit. See + https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical + representation of this example. When there is only one electrode for each unit (i.e., each spike time is + associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is + the number of spike events. The number of electrodes for each spike event should be the same within a given unit. + The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order + of the waveforms within a given unit x spike event should be the same as the order of the electrodes referenced in + the 'electrodes' column of this table. The number of samples for each waveform must be the same." + quantity: '?' + attributes: + - name: sampling_rate + dtype: float32 + doc: Sampling rate, in hertz. + required: false + - name: unit + dtype: text + value: volts + doc: Unit of measurement. This value is fixed to 'volts'. + required: false + - name: waveforms_index + neurodata_type_inc: VectorIndex + doc: Index into the 'waveforms' dataset. One value for every spike event. See 'waveforms' for more detail. + quantity: '?' + - name: waveforms_index_index + neurodata_type_inc: VectorIndex + doc: Index into the 'waveforms_index' dataset. One value for every unit (row in the table). See 'waveforms' for more + detail. + quantity: '?' diff --git a/nwb-schema/2.8.0/core/nwb.namespace.yaml b/nwb-schema/2.8.0/core/nwb.namespace.yaml new file mode 100644 index 00000000..dd967efe --- /dev/null +++ b/nwb-schema/2.8.0/core/nwb.namespace.yaml @@ -0,0 +1,60 @@ +namespaces: +- name: core + doc: NWB namespace + author: + - Andrew Tritt + - Oliver Ruebel + - Ryan Ly + - Ben Dichter + - Keith Godfrey + - Jeff Teeters + contact: + - ajtritt@lbl.gov + - oruebel@lbl.gov + - rly@lbl.gov + - bdichter@lbl.gov + - keithg@alleninstitute.org + - jteeters@berkeley.edu + full_name: NWB core + schema: + - namespace: hdmf-common + - doc: This source module contains base data types used throughout the NWB data + format. + source: nwb.base.yaml + title: Base data types + - doc: This source module contains neurodata_types for device data. + source: nwb.device.yaml + title: Devices + - doc: This source module contains neurodata_types for epoch data. + source: nwb.epoch.yaml + title: Epochs + - doc: This source module contains neurodata_types for image data. + source: nwb.image.yaml + title: Image data + - doc: Main NWB file specification. + source: nwb.file.yaml + title: NWB file + - doc: Miscellaneous types. + source: nwb.misc.yaml + title: Miscellaneous neurodata_types. + - doc: This source module contains neurodata_types for behavior data. + source: nwb.behavior.yaml + title: Behavior + - doc: This source module contains neurodata_types for extracellular electrophysiology + data. + source: nwb.ecephys.yaml + title: Extracellular electrophysiology + - doc: This source module contains neurodata_types for intracellular electrophysiology + data. + source: nwb.icephys.yaml + title: Intracellular electrophysiology + - doc: This source module contains neurodata_types for opto-genetics data. + source: nwb.ogen.yaml + title: Optogenetics + - doc: This source module contains neurodata_types for optical physiology data. + source: nwb.ophys.yaml + title: Optical physiology + - doc: This source module contains neurodata_type for retinotopy data. + source: nwb.retinotopy.yaml + title: Retinotopy + version: "2.8.0" diff --git a/nwb-schema/2.8.0/core/nwb.ogen.yaml b/nwb-schema/2.8.0/core/nwb.ogen.yaml new file mode 100644 index 00000000..419d6190 --- /dev/null +++ b/nwb-schema/2.8.0/core/nwb.ogen.yaml @@ -0,0 +1,48 @@ +groups: +- neurodata_type_def: OptogeneticSeries + neurodata_type_inc: TimeSeries + doc: An optogenetic stimulus. + datasets: + - name: data + dtype: numeric + dims: + - - num_times + - - num_times + - num_rois + shape: + - - null + - - null + - null + doc: Applied power for optogenetic stimulus, in watts. Shape can be 1D or 2D. + 2D data is meant to be used in an extension of OptogeneticSeries that + defines what the second dimension represents. + attributes: + - name: unit + dtype: text + value: watts + doc: Unit of measurement for data, which is fixed to 'watts'. + links: + - name: site + target_type: OptogeneticStimulusSite + doc: Link to OptogeneticStimulusSite object that describes the site to which this + stimulus was applied. + +- neurodata_type_def: OptogeneticStimulusSite + neurodata_type_inc: NWBContainer + doc: A site of optogenetic stimulation. + datasets: + - name: description + dtype: text + doc: Description of stimulation site. + - name: excitation_lambda + dtype: float32 + doc: Excitation wavelength, in nm. + - name: location + dtype: text + doc: Location of the stimulation site. Specify the area, layer, comments on estimation + of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas + names for anatomical regions when possible. + links: + - name: device + target_type: Device + doc: Device that generated the stimulus. diff --git a/nwb-schema/2.8.0/core/nwb.ophys.yaml b/nwb-schema/2.8.0/core/nwb.ophys.yaml new file mode 100644 index 00000000..7fd4b217 --- /dev/null +++ b/nwb-schema/2.8.0/core/nwb.ophys.yaml @@ -0,0 +1,360 @@ +groups: +- neurodata_type_def: OnePhotonSeries + neurodata_type_inc: ImageSeries + doc: Image stack recorded over time from 1-photon microscope. + attributes: + - name: pmt_gain + dtype: float32 + doc: Photomultiplier gain. + required: false + - name: scan_line_rate + dtype: float32 + doc: Lines imaged per second. This is also stored in /general/optophysiology but + is kept here as it is useful information for analysis, and so good to be stored + w/ the actual data. + required: false + - name: exposure_time + dtype: float32 + doc: Exposure time of the sample; often the inverse of the frequency. + required: false + - name: binning + dtype: uint8 + doc: Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc. + required: false + - name: power + dtype: float32 + doc: Power of the excitation in mW, if known. + required: false + - name: intensity + dtype: float32 + doc: Intensity of the excitation in mW/mm^2, if known. + required: false + links: + - name: imaging_plane + target_type: ImagingPlane + doc: Link to ImagingPlane object from which this TimeSeries data was generated. + +- neurodata_type_def: TwoPhotonSeries + neurodata_type_inc: ImageSeries + doc: Image stack recorded over time from 2-photon microscope. + attributes: + - name: pmt_gain + dtype: float32 + doc: Photomultiplier gain. + required: false + - name: scan_line_rate + dtype: float32 + doc: Lines imaged per second. This is also stored in /general/optophysiology but + is kept here as it is useful information for analysis, and so good to be stored + w/ the actual data. + required: false + datasets: + - name: field_of_view + dtype: float32 + dims: + - - width|height + - - width|height|depth + shape: + - - 2 + - - 3 + doc: Width, height and depth of image, or imaged area, in meters. + quantity: '?' + links: + - name: imaging_plane + target_type: ImagingPlane + doc: Link to ImagingPlane object from which this TimeSeries data was generated. + +- neurodata_type_def: RoiResponseSeries + neurodata_type_inc: TimeSeries + doc: ROI responses over an imaging plane. The first dimension represents time. + The second dimension, if present, represents ROIs. + datasets: + - name: data + dtype: numeric + dims: + - - num_times + - - num_times + - num_ROIs + shape: + - - null + - - null + - null + doc: Signals from ROIs. + - name: rois + neurodata_type_inc: DynamicTableRegion + doc: DynamicTableRegion referencing into an ROITable containing information on the ROIs + stored in this timeseries. + +- neurodata_type_def: DfOverF + neurodata_type_inc: NWBDataInterface + default_name: DfOverF + doc: dF/F information about a region of interest (ROI). Storage hierarchy of dF/F + should be the same as for segmentation (i.e., same names for ROIs and for image + planes). + groups: + - neurodata_type_inc: RoiResponseSeries + doc: RoiResponseSeries object(s) containing dF/F for a ROI. + quantity: '+' + +- neurodata_type_def: Fluorescence + neurodata_type_inc: NWBDataInterface + default_name: Fluorescence + doc: Fluorescence information about a region of interest (ROI). Storage hierarchy + of fluorescence should be the same as for segmentation (ie, same names for ROIs + and for image planes). + groups: + - neurodata_type_inc: RoiResponseSeries + doc: RoiResponseSeries object(s) containing fluorescence data for a ROI. + quantity: '+' + +- neurodata_type_def: ImageSegmentation + neurodata_type_inc: NWBDataInterface + default_name: ImageSegmentation + doc: Stores pixels in an image that represent different regions of interest (ROIs) + or masks. All segmentation for a given imaging plane is stored together, with + storage for multiple imaging planes (masks) supported. Each ROI is stored in its + own subgroup, with the ROI group containing both a 2D mask and a list of pixels + that make up this mask. Segments can also be used for masking neuropil. If segmentation + is allowed to change with time, a new imaging plane (or module) is required and + ROI names should remain consistent between them. + groups: + - neurodata_type_inc: PlaneSegmentation + doc: Results from image segmentation of a specific imaging plane. + quantity: '+' + +- neurodata_type_def: PlaneSegmentation + neurodata_type_inc: DynamicTable + doc: Results from image segmentation of a specific imaging plane. + datasets: + - name: image_mask + neurodata_type_inc: VectorData + dims: + - - num_roi + - num_x + - num_y + - - num_roi + - num_x + - num_y + - num_z + shape: + - - null + - null + - null + - - null + - null + - null + - null + doc: ROI masks for each ROI. Each image mask is the size of the original imaging + plane (or volume) and members of the ROI are finite non-zero. + quantity: '?' + - name: pixel_mask_index + neurodata_type_inc: VectorIndex + doc: Index into pixel_mask. + quantity: '?' + - name: pixel_mask + neurodata_type_inc: VectorData + dtype: + - name: x + dtype: uint32 + doc: Pixel x-coordinate. + - name: y + dtype: uint32 + doc: Pixel y-coordinate. + - name: weight + dtype: float32 + doc: Weight of the pixel. + doc: 'Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel + masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation' + quantity: '?' + - name: voxel_mask_index + neurodata_type_inc: VectorIndex + doc: Index into voxel_mask. + quantity: '?' + - name: voxel_mask + neurodata_type_inc: VectorData + dtype: + - name: x + dtype: uint32 + doc: Voxel x-coordinate. + - name: y + dtype: uint32 + doc: Voxel y-coordinate. + - name: z + dtype: uint32 + doc: Voxel z-coordinate. + - name: weight + dtype: float32 + doc: Weight of the voxel. + doc: 'Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel + masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation' + quantity: '?' + groups: + - name: reference_images + doc: Image stacks that the segmentation masks apply to. + groups: + - neurodata_type_inc: ImageSeries + doc: One or more image stacks that the masks apply to (can be one-element + stack). + quantity: '*' + links: + - name: imaging_plane + target_type: ImagingPlane + doc: Link to ImagingPlane object from which this data was generated. + +- neurodata_type_def: ImagingPlane + neurodata_type_inc: NWBContainer + doc: An imaging plane and its metadata. + datasets: + - name: description + dtype: text + doc: Description of the imaging plane. + quantity: '?' + - name: excitation_lambda + dtype: float32 + doc: Excitation wavelength, in nm. + - name: imaging_rate + dtype: float32 + doc: Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored + there instead. + quantity: '?' + - name: indicator + dtype: text + doc: Calcium indicator. + - name: location + dtype: text + doc: Location of the imaging plane. Specify the area, layer, comments on estimation + of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas + names for anatomical regions when possible. + - name: manifold + dtype: float32 + dims: + - - height + - width + - x, y, z + - - height + - width + - depth + - x, y, z + shape: + - - null + - null + - 3 + - - null + - null + - null + - 3 + doc: "DEPRECATED Physical position of each pixel. 'xyz' represents the position\ + \ of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing." + quantity: '?' + attributes: + - name: conversion + dtype: float32 + default_value: 1.0 + doc: Scalar to multiply each element in data to convert it to the specified 'unit'. + If the data are stored in acquisition system units or other units + that require a conversion to be interpretable, multiply the data by 'conversion' + to convert the data to the specified 'unit'. e.g. if the data acquisition system + stores values in this object as pixels from x = -500 to 499, y = -500 to 499 + that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get + from raw data acquisition pixel units to meters is 2/1000. + required: false + - name: unit + dtype: text + default_value: meters + doc: Base unit of measurement for working with the data. The default value is 'meters'. + required: false + - name: origin_coords + dtype: float32 + dims: + - - x, y + - - x, y, z + shape: + - - 2 + - - 3 + doc: Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. + See also reference_frame for what the physical location is relative to (e.g., bregma). + quantity: '?' + attributes: + - name: unit + dtype: text + default_value: meters + doc: Measurement units for origin_coords. The default value is 'meters'. + - name: grid_spacing + dtype: float32 + dims: + - - x, y + - - x, y, z + shape: + - - 2 + - - 3 + doc: Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. + Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. + quantity: '?' + attributes: + - name: unit + dtype: text + default_value: meters + doc: Measurement units for grid_spacing. The default value is 'meters'. + - name: reference_frame + dtype: text + doc: Describes reference frame of origin_coords and grid_spacing. + For example, this can be a text description of the anatomical location and orientation of the grid + defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to + a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. + If origin_coords and grid_spacing are not present, then this field is not required. + For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix + (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in + x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more + rightward, and larger numbers in z means more ventral, then enter the following -- + origin_coords = (-1.2, -0.6, -2) + grid_spacing = (0.2, 0.2, 0.5) + reference_frame = "Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior + axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more + rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral)." + quantity: '?' + groups: + - neurodata_type_inc: OpticalChannel + doc: An optical channel used to record from an imaging plane. + quantity: '+' + links: + - name: device + target_type: Device + doc: Link to the Device object that was used to record from this electrode. + +- neurodata_type_def: OpticalChannel + neurodata_type_inc: NWBContainer + doc: An optical channel used to record from an imaging plane. + datasets: + - name: description + dtype: text + doc: Description or other notes about the channel. + - name: emission_lambda + dtype: float32 + doc: Emission wavelength for channel, in nm. + +- neurodata_type_def: MotionCorrection + neurodata_type_inc: NWBDataInterface + default_name: MotionCorrection + doc: 'An image stack where all frames are shifted (registered) to a common coordinate + system, to account for movement and drift between frames. Note: each frame at + each point in time is assumed to be 2-D (has only x & y dimensions).' + groups: + - neurodata_type_inc: CorrectedImageStack + doc: Results from motion correction of an image stack. + quantity: '+' + +- neurodata_type_def: CorrectedImageStack + neurodata_type_inc: NWBDataInterface + doc: Results from motion correction of an image stack. + groups: + - name: corrected + neurodata_type_inc: ImageSeries + doc: Image stack with frames shifted to the common coordinates. + - name: xy_translation + neurodata_type_inc: TimeSeries + doc: Stores the x,y delta necessary to align each frame to the common coordinates, + for example, to align each frame to a reference image. + links: + - name: original + target_type: ImageSeries + doc: Link to ImageSeries object that is being registered. diff --git a/nwb-schema/2.8.0/core/nwb.retinotopy.yaml b/nwb-schema/2.8.0/core/nwb.retinotopy.yaml new file mode 100644 index 00000000..1cf98457 --- /dev/null +++ b/nwb-schema/2.8.0/core/nwb.retinotopy.yaml @@ -0,0 +1,234 @@ +groups: +- neurodata_type_def: ImagingRetinotopy + neurodata_type_inc: NWBDataInterface + default_name: ImagingRetinotopy + doc: 'DEPRECATED. Intrinsic signal optical imaging or widefield imaging for measuring + retinotopy. Stores orthogonal maps (e.g., altitude/azimuth; radius/theta) of responses + to specific stimuli and a combined polarity map from which to identify visual areas. + This group does not store the raw responses imaged during retinotopic mapping or the + stimuli presented, but rather the resulting phase and power maps after applying a Fourier + transform on the averaged responses. + Note: for data consistency, all images and arrays are stored in the format [row][column] + and [row, col], which equates to [y][x]. Field of view and dimension arrays may + appear backward (i.e., y before x).' + datasets: + - name: axis_1_phase_map + dtype: float32 + dims: + - num_rows + - num_cols + shape: + - null + - null + doc: Phase response to stimulus on the first measured axis. + attributes: + - name: dimension + dtype: int32 + dims: + - num_rows, num_cols + shape: + - 2 + doc: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + - name: field_of_view + dtype: float32 + dims: + - height, width + shape: + - 2 + doc: Size of viewing area, in meters. + - name: unit + dtype: text + doc: Unit that axis data is stored in (e.g., degrees). + - name: axis_1_power_map + dtype: float32 + dims: + - num_rows + - num_cols + shape: + - null + - null + doc: Power response on the first measured axis. Response is scaled so 0.0 is no + power in the response and 1.0 is maximum relative power. + quantity: '?' + attributes: + - name: dimension + dtype: int32 + dims: + - num_rows, num_cols + shape: + - 2 + doc: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + - name: field_of_view + dtype: float32 + dims: + - height, width + shape: + - 2 + doc: Size of viewing area, in meters. + - name: unit + dtype: text + doc: Unit that axis data is stored in (e.g., degrees). + - name: axis_2_phase_map + dtype: float32 + dims: + - num_rows + - num_cols + shape: + - null + - null + doc: Phase response to stimulus on the second measured axis. + attributes: + - name: dimension + dtype: int32 + dims: + - num_rows, num_cols + shape: + - 2 + doc: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + - name: field_of_view + dtype: float32 + dims: + - height, width + shape: + - 2 + doc: Size of viewing area, in meters. + - name: unit + dtype: text + doc: Unit that axis data is stored in (e.g., degrees). + - name: axis_2_power_map + dtype: float32 + dims: + - num_rows + - num_cols + shape: + - null + - null + doc: Power response on the second measured axis. Response is scaled so 0.0 is + no power in the response and 1.0 is maximum relative power. + quantity: '?' + attributes: + - name: dimension + dtype: int32 + dims: + - num_rows, num_cols + shape: + - 2 + doc: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + - name: field_of_view + dtype: float32 + dims: + - height, width + shape: + - 2 + doc: Size of viewing area, in meters. + - name: unit + dtype: text + doc: Unit that axis data is stored in (e.g., degrees). + - name: axis_descriptions + dtype: text + dims: + - axis_1, axis_2 + shape: + - 2 + doc: Two-element array describing the contents of the two response axis fields. + Description should be something like ['altitude', 'azimuth'] or '['radius', + 'theta']. + - name: focal_depth_image + dtype: uint16 + dims: + - num_rows + - num_cols + shape: + - null + - null + doc: 'Gray-scale image taken with same settings/parameters (e.g., focal depth, + wavelength) as data collection. Array format: [rows][columns].' + quantity: '?' + attributes: + - name: bits_per_pixel + dtype: int32 + doc: Number of bits used to represent each value. This is necessary to determine + maximum (white) pixel value. + - name: dimension + dtype: int32 + dims: + - num_rows, num_cols + shape: + - 2 + doc: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + - name: field_of_view + dtype: float32 + dims: + - height, width + shape: + - 2 + doc: Size of viewing area, in meters. + - name: focal_depth + dtype: float32 + doc: Focal depth offset, in meters. + - name: format + dtype: text + doc: Format of image. Right now only 'raw' is supported. + - name: sign_map + dtype: float32 + dims: + - num_rows + - num_cols + shape: + - null + - null + doc: Sine of the angle between the direction of the gradient in axis_1 and axis_2. + quantity: '?' + attributes: + - name: dimension + dtype: int32 + dims: + - num_rows, num_cols + shape: + - 2 + doc: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + - name: field_of_view + dtype: float32 + dims: + - height, width + shape: + - 2 + doc: Size of viewing area, in meters. + - name: vasculature_image + dtype: uint16 + dims: + - num_rows + - num_cols + shape: + - null + - null + doc: 'Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]' + attributes: + - name: bits_per_pixel + dtype: int32 + doc: Number of bits used to represent each value. This is necessary to determine + maximum (white) pixel value + - name: dimension + dtype: int32 + dims: + - num_rows, num_cols + shape: + - 2 + doc: 'Number of rows and columns in the image. NOTE: row, column representation + is equivalent to height, width.' + - name: field_of_view + dtype: float32 + dims: + - height, width + shape: + - 2 + doc: Size of viewing area, in meters. + - name: format + dtype: text + doc: Format of image. Right now only 'raw' is supported. diff --git a/nwb-schema/2.8.0/hdmf-common-schema/common/base.yaml b/nwb-schema/2.8.0/hdmf-common-schema/common/base.yaml new file mode 100644 index 00000000..1e8cbc00 --- /dev/null +++ b/nwb-schema/2.8.0/hdmf-common-schema/common/base.yaml @@ -0,0 +1,21 @@ +# hdmf-schema-language=2.0.2 +datasets: +- data_type_def: Data + doc: An abstract data type for a dataset. + +groups: +- data_type_def: Container + doc: An abstract data type for a group storing collections of data and + metadata. Base type for all data and metadata containers. + +- data_type_def: SimpleMultiContainer + data_type_inc: Container + doc: A simple Container for holding onto multiple containers. + datasets: + - data_type_inc: Data + quantity: '*' + doc: Data objects held within this SimpleMultiContainer. + groups: + - data_type_inc: Container + quantity: '*' + doc: Container objects held within this SimpleMultiContainer. diff --git a/nwb-schema/2.8.0/hdmf-common-schema/common/experimental.yaml b/nwb-schema/2.8.0/hdmf-common-schema/common/experimental.yaml new file mode 100644 index 00000000..0cb6ba45 --- /dev/null +++ b/nwb-schema/2.8.0/hdmf-common-schema/common/experimental.yaml @@ -0,0 +1,14 @@ +groups: [] +datasets: +- data_type_def: EnumData + data_type_inc: VectorData + dtype: uint8 + doc: Data that come from a fixed set of values. A data value of i corresponds + to the i-th value in the VectorData referenced by the 'elements' attribute. + attributes: + - name: elements + dtype: + target_type: VectorData + reftype: object + doc: Reference to the VectorData object that contains the enumerable elements + diff --git a/nwb-schema/2.8.0/hdmf-common-schema/common/namespace.yaml b/nwb-schema/2.8.0/hdmf-common-schema/common/namespace.yaml new file mode 100644 index 00000000..b174003f --- /dev/null +++ b/nwb-schema/2.8.0/hdmf-common-schema/common/namespace.yaml @@ -0,0 +1,51 @@ +# hdmf-schema-language=2.0.2 +namespaces: +- name: hdmf-common + doc: Common data structures provided by HDMF + author: + - Andrew Tritt + - Oliver Ruebel + - Ryan Ly + - Ben Dichter + contact: + - ajtritt@lbl.gov + - oruebel@lbl.gov + - rly@lbl.gov + - bdichter@lbl.gov + full_name: HDMF Common + schema: + - doc: base data types + source: base.yaml + title: Base data types + - doc: data types for a column-based table + source: table.yaml + title: Table data types + - doc: data types for different types of sparse matrices + source: sparse.yaml + title: Sparse data types + version: 1.8.0 + +- name: hdmf-experimental + doc: Experimental data structures provided by HDMF. These are not guaranteed to be available in the future. + author: + - Andrew Tritt + - Oliver Ruebel + - Ryan Ly + - Ben Dichter + - Matthew Avaylon + contact: + - ajtritt@lbl.gov + - oruebel@lbl.gov + - rly@lbl.gov + - bdichter@lbl.gov + - mavaylon@lbl.gov + full_name: HDMF Experimental + schema: + - namespace: hdmf-common + - doc: Experimental data types + source: experimental.yaml + title: Experimental data types + - doc: data types for storing references to web accessible resources + source: resources.yaml + title: Resource reference data types + version: 0.5.0 diff --git a/nwb-schema/2.8.0/hdmf-common-schema/common/resources.yaml b/nwb-schema/2.8.0/hdmf-common-schema/common/resources.yaml new file mode 100644 index 00000000..5e091c77 --- /dev/null +++ b/nwb-schema/2.8.0/hdmf-common-schema/common/resources.yaml @@ -0,0 +1,105 @@ +# hdmf-schema-language=2.0.2 +groups: +- data_type_def: HERD + data_type_inc: Container + doc: "HDMF External Resources Data Structure. A set of six tables for tracking external resource references in a file or across multiple files." + datasets: + - data_type_inc: Data + name: keys + doc: A table for storing user terms that are used to refer to external resources. + dtype: + - name: key + dtype: text + doc: The user term that maps to one or more resources in the `resources` table, e.g., "human". + dims: + - num_rows + shape: + - null + + - data_type_inc: Data + name: files + doc: A table for storing object ids of files used in external resources. + dtype: + - name: file_object_id + dtype: text + doc: The object id (UUID) of a file that contains objects that refers to external resources. + dims: + - num_rows + shape: + - null + + - data_type_inc: Data + name: entities + doc: A table for mapping user terms (i.e., keys) to resource entities. + dtype: + - name: entity_id + dtype: text + doc: "The compact uniform resource identifier (CURIE) of the entity, in the form + [prefix]:[unique local identifier], e.g., 'NCBI_TAXON:9606'." + - name: entity_uri + dtype: text + doc: "The URI for the entity this reference applies to. This can be an empty string. + e.g., https://www.ncbi.nlm.nih.gov/Taxonomy/Browser/wwwtax.cgi?mode=info&id=9606" + dims: + - num_rows + shape: + - null + + - data_type_inc: Data + name: objects + doc: A table for identifying which objects in a file contain references to external resources. + dtype: + - name: files_idx + dtype: uint + doc: The row index to the file in the `files` table containing the object. + - name: object_id + dtype: text + doc: The object id (UUID) of the object. + - name: object_type + dtype: text + doc: The data type of the object. + - name: relative_path + dtype: text + doc: The relative path from the data object with the `object_id` to the dataset or attribute + with the value(s) that is associated with an external resource. This can be an empty + string if the object is a dataset that contains the value(s) that is associated + with an external resource. + - name: field + dtype: text + doc: The field within the compound data type using an external resource. This is used only if + the dataset or attribute is a compound data type; otherwise this should be an empty + string. + dims: + - num_rows + shape: + - null + + - data_type_inc: Data + name: object_keys + doc: A table for identifying which objects use which keys. + dtype: + - name: objects_idx + dtype: uint + doc: The row index to the object in the `objects` table that holds the key + - name: keys_idx + dtype: uint + doc: The row index to the key in the `keys` table. + dims: + - num_rows + shape: + - null + + - data_type_inc: Data + name: entity_keys + doc: A table for identifying which keys use which entity. + dtype: + - name: entities_idx + dtype: uint + doc: The row index to the entity in the `entities` table. + - name: keys_idx + dtype: uint + doc: The row index to the key in the `keys` table. + dims: + - num_rows + shape: + - null diff --git a/nwb-schema/2.8.0/hdmf-common-schema/common/sparse.yaml b/nwb-schema/2.8.0/hdmf-common-schema/common/sparse.yaml new file mode 100644 index 00000000..09de8177 --- /dev/null +++ b/nwb-schema/2.8.0/hdmf-common-schema/common/sparse.yaml @@ -0,0 +1,35 @@ +# hdmf-schema-language=2.0.2 +groups: +- data_type_def: CSRMatrix + data_type_inc: Container + doc: 'A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are + stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].' + attributes: + - name: shape + dtype: uint + dims: + - number of rows, number of columns + shape: + - 2 + doc: The shape (number of rows, number of columns) of this sparse matrix. + datasets: + - name: indices + dtype: uint + dims: + - number of non-zero values + shape: + - null + doc: The column indices. + - name: indptr + dtype: uint + dims: + - number of rows in the matrix + 1 + shape: + - null + doc: The row index pointer. + - name: data + dims: + - number of non-zero values + shape: + - null + doc: The non-zero values in the matrix. diff --git a/nwb-schema/2.8.0/hdmf-common-schema/common/table.yaml b/nwb-schema/2.8.0/hdmf-common-schema/common/table.yaml new file mode 100644 index 00000000..2bacf8b3 --- /dev/null +++ b/nwb-schema/2.8.0/hdmf-common-schema/common/table.yaml @@ -0,0 +1,168 @@ +# hdmf-schema-language=2.0.2 +datasets: + +- data_type_def: VectorData + data_type_inc: Data + doc: An n-dimensional dataset representing a column of a DynamicTable. + If used without an accompanying VectorIndex, first dimension is + along the rows of the DynamicTable and each step along the first + dimension is a cell of the larger table. VectorData can also be + used to represent a ragged array if paired with a VectorIndex. + This allows for storing arrays of varying length in a single cell + of the DynamicTable by indexing into this VectorData. The first + vector is at VectorData[0:VectorIndex[0]]. The second vector is at + VectorData[VectorIndex[0]:VectorIndex[1]], and so on. + dims: + - - dim0 + - - dim0 + - dim1 + - - dim0 + - dim1 + - dim2 + - - dim0 + - dim1 + - dim2 + - dim3 + shape: + - - null + - - null + - null + - - null + - null + - null + - - null + - null + - null + - null + attributes: + - name: description + dtype: text + doc: Description of what these vectors represent. + +- data_type_def: VectorIndex + data_type_inc: VectorData + dtype: uint8 + doc: Used with VectorData to encode a ragged array. An array of indices + into the first dimension of the target VectorData, and forming a map + between the rows of a DynamicTable and the indices of the VectorData. + The name of the VectorIndex is expected to be the name of the target + VectorData object followed by "_index". + dims: + - num_rows + shape: + - null + attributes: + - name: target + dtype: + target_type: VectorData + reftype: object + doc: Reference to the target dataset that this index applies to. + +- data_type_def: ElementIdentifiers + data_type_inc: Data + default_name: element_id + dtype: int + dims: + - num_elements + shape: + - null + doc: A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. + +- data_type_def: DynamicTableRegion + data_type_inc: VectorData + dtype: int + doc: DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a + link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) + (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without + data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. + `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell + of a `DynamicTable` can reference many rows of another `DynamicTable`. + dims: + - num_rows + shape: + - null + attributes: + - name: table + dtype: + target_type: DynamicTable + reftype: object + doc: Reference to the DynamicTable object that this region applies to. + - name: description + dtype: text + doc: Description of what this table region points to. + +groups: + +- data_type_def: DynamicTable + data_type_inc: Container + doc: A group containing multiple datasets that are aligned on the first dimension + (Currently, this requirement if left up to APIs to check and enforce). These datasets + represent different columns in the table. Apart from a column that contains unique + identifiers for each row, there are no other required datasets. Users are free to add + any number of custom VectorData objects (columns) here. DynamicTable also supports + ragged array columns, where each element can be of a different size. To add a ragged + array column, use a VectorIndex type to index the corresponding VectorData type. + See documentation for VectorData and VectorIndex for more details. + Unlike a compound data type, which is analogous to storing an + array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides + an alternative structure to choose from when optimizing storage for anticipated access + patterns. Additionally, this type provides a way of creating a table without having to + define a compound type up front. Although this convenience may be attractive, users + should think carefully about how data will be accessed. DynamicTable is more appropriate + for column-centric access, whereas a dataset with a compound type would be more + appropriate for row-centric access. Finally, data size should also be taken into account. + For small tables, performance loss may be an acceptable trade-off for the flexibility of + a DynamicTable. + attributes: + - name: colnames + dtype: text + dims: + - num_columns + shape: + - null + doc: The names of the columns in this table. This should be used to specify + an order to the columns. + - name: description + dtype: text + doc: Description of what is in this dynamic table. + datasets: + - name: id + data_type_inc: ElementIdentifiers + dtype: int + dims: + - num_rows + shape: + - null + doc: Array of unique identifiers for the rows of this dynamic table. + - data_type_inc: VectorData + doc: Vector columns, including index columns, of this dynamic table. + quantity: '*' + +- data_type_def: AlignedDynamicTable + data_type_inc: DynamicTable + doc: DynamicTable container that supports storing a collection of sub-tables. Each + sub-table is a DynamicTable itself that is aligned with the main table by row + index. I.e., all DynamicTables stored in this group MUST have the same number + of rows. This type effectively defines a 2-level table in which the main data + is stored in the main table implemented by this type and additional columns of + the table are grouped into categories, with each category being represented by + a separate DynamicTable stored within the group. + attributes: + - name: categories + dtype: text + dims: + - num_categories + shape: + - null + doc: The names of the categories in this AlignedDynamicTable. Each category is + represented by one DynamicTable stored in the parent group. This attribute should + be used to specify an order of categories and the category names must match + the names of the corresponding DynamicTable in the group. + groups: + - data_type_inc: DynamicTable + doc: A DynamicTable representing a particular category for columns in the AlignedDynamicTable + parent container. The table MUST be aligned with (i.e., have the same number + of rows) as all other DynamicTables stored in the AlignedDynamicTable parent + container. The name of the category is given by the name of the DynamicTable + and its description by the description attribute of the DynamicTable. + quantity: '*' diff --git a/nwbClearGenerated.m b/nwbClearGenerated.m index 0294c30b..8e0c6cfd 100644 --- a/nwbClearGenerated.m +++ b/nwbClearGenerated.m @@ -1,11 +1,29 @@ -function nwbClearGenerated() +function clearedNamespaceNames = nwbClearGenerated(targetFolder, options) %% NWBCLEARGENERATED clears generated class files. - nwbDir = misc.getMatnwbDir(); - typesPath = fullfile(nwbDir, '+types'); + arguments + targetFolder (1,1) string {mustBeFolder} = misc.getMatnwbDir() + options.ClearCache (1,1) logical = false + end + typesPath = fullfile(targetFolder, '+types'); listing = dir(typesPath); moduleNames = setdiff({listing.name}, {'+untyped', '+util', '.', '..'}); generatedPaths = fullfile(typesPath, moduleNames); for i=1:length(generatedPaths) rmdir(generatedPaths{i}, 's'); end + + if options.ClearCache + cachePath = fullfile(targetFolder, 'namespaces'); + listing = dir(fullfile(cachePath, '*.mat')); + generatedPaths = fullfile(cachePath, {listing.name}); + for i=1:length(generatedPaths) + delete(generatedPaths{i}); + end + end + + if nargout == 1 % Return names of cleared namespaces + [~, clearedNamespaceNames] = fileparts(generatedPaths); + clearedNamespaceNames = strrep(clearedNamespaceNames, '+', ''); + clearedNamespaceNames = string(clearedNamespaceNames); + end end \ No newline at end of file diff --git a/nwbExport.m b/nwbExport.m index 204fc040..655d891d 100644 --- a/nwbExport.m +++ b/nwbExport.m @@ -1,6 +1,6 @@ -function nwbExport(nwb, filenames) +function nwbExport(nwbFileObjects, filePaths, mode) %NWBEXPORT Writes an NWB file. - % nwbRead(nwb,filename) Writes the nwb object to a file at filename. + % nwbRead(nwb, filename) Writes the nwb object to a file at filename. % % Example: % % Generate Matlab code for the NWB objects from the core schema. @@ -14,31 +14,19 @@ function nwbExport(nwb, filenames) % nwbExport(nwb, 'empty.nwb'); % % See also GENERATECORE, GENERATEEXTENSION, NWBFILE, NWBREAD - validateattributes(nwb, {'NwbFile'}, {'nonempty'}, 'nwbExport', 'nwb', 1); - validateattributes(filenames, {'cell', 'string', 'char'}, {'nonempty'}, 'nwbExport', 'filenames', 2); - if isstring(filenames) - filenames = convertStringsToChars(filenames); + + arguments + nwbFileObjects (1,:) NwbFile {mustBeNonempty} + filePaths (1,:) string {mustBeNonzeroLengthText} + mode (1,1) string {mustBeMember(mode, ["edit", "overwrite"])} = "edit" end - if iscell(filenames) - for iName = 1:length(filenames) - name = filenames{iName}; - validateattributes(name, {'string', 'char'}, {'scalartext', 'nonempty'} ... - , 'nwbExport', 'filenames', 2); - filenames{iName} = char(name); - end - end - if ~isscalar(nwb) - assert(~ischar(filenames) && length(filenames) == length(nwb), ... - 'NwbFile and filename array dimensions must match.'); - end - - for iFiles = 1:length(nwb) - if iscellstr(filenames) - filename = filenames{iFiles}; - else - filename = filenames; - end - - nwb(iFiles).export(filename); + + assert(length(nwbFileObjects) == length(filePaths), ... + 'NWB:Export:FilepathLengthMismatch', ... + 'Lists of NWB objects to export and list of file paths must be the same length.') + + for iFiles = 1:length(nwbFileObjects) + filePath = char(filePaths(iFiles)); + nwbFileObjects(iFiles).export(filePath, mode); end end diff --git a/nwbRead.m b/nwbRead.m index fbc85846..70ec5c6a 100644 --- a/nwbRead.m +++ b/nwbRead.m @@ -1,4 +1,4 @@ -function nwb = nwbRead(filename, varargin) +function nwb = nwbRead(filename, flags, options) %NWBREAD Reads an NWB file. % nwb = NWBREAD(filename) Reads the nwb file at filename and returns an % NWBFile object representing its contents. @@ -17,151 +17,98 @@ % % See also GENERATECORE, GENERATEEXTENSION, NWBFILE, NWBEXPORT - for iOption = 1:length(varargin) - option = varargin{iOption}; - if isstring(option) - option = char(option); - end - assert(ischar(option), 'NWB:Read:InvalidParameter' ... - , 'Invalid optional parameter in argument position %u', 1 + iOption); - varargin{iOption} = option; + arguments + filename (1,1) string {matnwb.common.mustBeNwbFile} end - - ignoreCache = any(strcmpi(varargin, 'ignorecache')); - - saveDirMask = strcmpi(varargin, 'savedir'); - assert(isempty(saveDirMask) || ~saveDirMask(end), 'NWB:NWBRead:InvalidSaveDir',... - '`savedir` is a key value pair requiring a directory string as a value.'); - if any(saveDirMask) - saveDir = varargin{find(saveDirMask, 1, 'last') + 1}; - else - saveDir = misc.getMatnwbDir(); + arguments (Repeating) + flags (1,1) string {mustBeMember(flags, "ignorecache")} end - - Blacklist = struct(... - 'attributes', {{'.specloc', 'object_id'}},... - 'groups', {{}}); - validateattributes(filename, {'char', 'string'}, {'scalartext', 'nonempty'} ... - , 'nwbRead', 'filename', 1); - - filename = char(filename); - specLocation = getEmbeddedSpec(filename); - schemaVersion = util.getSchemaVersion(filename); - if ~isempty(specLocation) - Blacklist.groups{end+1} = specLocation; + arguments + options.savedir (1,1) string = misc.getMatnwbDir(); % {mustBeFolder} ? end - % validate supported schema version - Schemas = dir(fullfile(misc.getMatnwbDir(), 'nwb-schema')); - supportedSchemas = setdiff({Schemas.name}, {'.', '..'}); - if ~any(strcmp(schemaVersion, supportedSchemas)) - warning('NWB:Read:UnsupportedSchema' ... - , ['NWB schema version %s is not support by this version of MatNWB. ' ... - 'This file is not guaranteed to be supported.'] ... - , schemaVersion); + regenerateSchemaClasses = not( any(strcmpi(string(flags), 'ignorecache')) ); + + schemaVersion = util.getSchemaVersion(filename); + try + matnwb.common.mustBeValidSchemaVersion(schemaVersion) + catch + warning('NWB:Read:UnsupportedSchema', ... + ['NWB schema version `%s` is not support by this version of MatNWB. ' ... + 'This file is not guaranteed to be supported.'], schemaVersion ) end - - if ~ignoreCache + + specLocation = io.spec.getEmbeddedSpecLocation(filename); + + if regenerateSchemaClasses if isempty(specLocation) try - generateCore(schemaVersion, 'savedir', saveDir); + generateCore(schemaVersion, 'savedir', options.savedir); catch ME - if ~strcmp(ME.identifier, 'NWB:GenerateCore:MissingCoreSchema') + if ~strcmp(ME.identifier, 'NWB:VersionValidator:UnsupportedSchemaVersion') rethrow(ME); end end else - generateSpec(filename, h5info(filename, specLocation), 'savedir', saveDir); + generateEmbeddedSpec(filename, specLocation, 'savedir', options.savedir); end rehash(); end - - nwb = io.parseGroup(filename, h5info(filename), Blacklist); -end -function specLocation = getEmbeddedSpec(filename) - specLocation = ''; - fid = H5F.open(filename); - try - %check for .specloc - attributeId = H5A.open(fid, '.specloc'); - referenceRawData = H5A.read(attributeId); - specLocation = H5R.get_name(attributeId, 'H5R_OBJECT', referenceRawData); - H5A.close(attributeId); - catch ME - if ~strcmp(ME.identifier, 'MATLAB:imagesci:hdf5lib:libraryError') - rethrow(ME); - end % don't error if the attribute doesn't exist. + blackList = struct(... + 'attributes', {{'.specloc', 'object_id'}},... + 'groups', {{}}); + if ~isempty(specLocation) + blackList.groups{end+1} = specLocation; end - H5F.close(fid); + nwb = io.parseGroup(filename, h5info(filename), blackList); end -function generateSpec(filename, specinfo, varargin) - saveDirMask = strcmp(varargin, 'savedir'); - if any(saveDirMask) - assert(~saveDirMask(end),... - 'NWB:Read:InvalidParameter',... - 'savedir must be paired with the desired save directory.'); - saveDir = varargin{find(saveDirMask, 1, 'last') + 1}; - else - saveDir = misc.getMatnwbDir(); + +function generateEmbeddedSpec(filename, specLocation, options) +% generateEmbeddedSpec - Generate embedded specifications / namespaces + arguments + filename (1,1) string {mustBeFile} + specLocation (1,1) string + options.savedir (1,1) string = misc.getMatnwbDir(); % {mustBeFolder} ? end - - specNames = cell(size(specinfo.Groups)); - fid = H5F.open(filename); - for iGroup = 1:length(specinfo.Groups) - location = specinfo.Groups(iGroup).Groups(1); - - namespaceName = split(specinfo.Groups(iGroup).Name, '/'); - namespaceName = namespaceName{end}; - - filenames = {location.Datasets.Name}; - if ~any(strcmp('namespace', filenames)) - warning('NWB:Read:GenerateSpec:CacheInvalid',... - 'Couldn''t find a `namespace` in namespace `%s`. Skipping cache generation.',... - namespaceName); - return; - end - sourceNames = {location.Datasets.Name}; - fileLocation = strcat(location.Name, '/', sourceNames); - schemaMap = containers.Map; - for iFileLocation = 1:length(fileLocation) - did = H5D.open(fid, fileLocation{iFileLocation}); - if strcmp('namespace', sourceNames{iFileLocation}) - namespaceText = H5D.read(did); - else - schemaMap(sourceNames{iFileLocation}) = H5D.read(did); - end - H5D.close(did); - end + + specs = io.spec.readEmbeddedSpecifications(filename, specLocation); + specNames = cell(size(specs)); + + for iSpec = 1:numel(specs) + namespaceName = specs{iSpec}.namespaceName; + namespaceDef = specs{iSpec}.namespaceText; + schemaMap = specs{iSpec}.schemaMap; + + parsedNamespace = spec.generate(namespaceDef, schemaMap); - Namespaces = spec.generate(namespaceText, schemaMap); - % Handle embedded namespaces. - Namespace = Namespaces(strcmp({Namespaces.name}, namespaceName)); - if isempty(Namespace) - % legacy checks in case namespaceName is using the old underscore - % conversion name. - namespaceName = strrep(namespaceName, '_', '-'); - Namespace = Namespaces(strcmp({Namespaces.name}, namespaceName)); + % Ensure the namespace name matches the name of the parsed namespace + isMatch = strcmp({parsedNamespace.name}, namespaceName); + if ~any(isMatch) % Legacy check + % Check if namespaceName is using the old underscore convention. + isMatch = strcmp({parsedNamespace.name}, strrep(namespaceName, '_', '-')); end - - assert(~isempty(Namespace), ... + + assert(any(isMatch), ... 'NWB:Namespace:NameNotFound', ... - 'Namespace %s not found in schema. Perhaps an extension should be generated?', ... + 'Namespace `%s` not found in specification. Perhaps an extension should be generated?', ... namespaceName); + + parsedNamespace = parsedNamespace(isMatch); - spec.saveCache(Namespace, saveDir); - specNames{iGroup} = Namespace.name; + spec.saveCache(parsedNamespace, options.savedir); + specNames{iSpec} = parsedNamespace.name; end - H5F.close(fid); missingNames = cell(size(specNames)); for iName = 1:length(specNames) name = specNames{iName}; try - file.writeNamespace(name, saveDir); + file.writeNamespace(name, options.savedir); catch ME + % Todo: Can this actually happen? if strcmp(ME.identifier, 'NWB:Namespace:CacheMissing') missingNames{iName} = name; else @@ -169,6 +116,7 @@ function generateSpec(filename, specinfo, varargin) end end end + missingNames(cellfun('isempty', missingNames)) = []; assert(isempty(missingNames), 'NWB:Namespace:DependencyMissing',... 'Missing generated caches and dependent caches for the following namespaces:\n%s',... diff --git a/nwbtest.m b/nwbtest.m index 18a5f86a..a1f2ed15 100644 --- a/nwbtest.m +++ b/nwbtest.m @@ -46,6 +46,7 @@ ws = pwd; nwbClearGenerated(); % Clear default files if any. + cleanupObj = onCleanup(@() generateCore); cleaner = onCleanup(@generateCore); % Regenerate core when finished pvcell = struct2pvcell(parser.Unmatched); diff --git a/resources/functionSignatures.json b/resources/functionSignatures.json new file mode 100644 index 00000000..cf52fb2c --- /dev/null +++ b/resources/functionSignatures.json @@ -0,0 +1,81 @@ +{ + "_schemaVersion": "1.0.0", + "generateCore": + { + "inputs": + [ + { + "name":"version", + "kind":"required", + "type":"choices={'2.0.2','2.1.0','2.2.0','2.2.1','2.2.2','2.2.3','2.2.4','2.2.5','2.3.0','2.4.0','2.5.0','2.6.0','2.7.0'}", + "purpose": "Version number for NWB core schema specifications" + }, + { + "name":"savedir", + "kind":"namevalue", + "type":"folder", + "purpose": "Output folder for generated classes" + } + ] + }, + "generateCore": + { + "inputs": + [ + { + "name":"savedir", + "kind":"namevalue", + "type":"folder", + "purpose": "Output folder for generated classes" + } + ] + }, + "generateExtension": + { + "inputs": + [ + { + "name":"namespaceFilePath", + "repeating":true, + "kind":"required", + "type":"file=*.yaml,*.yml", + "purpose": "Path to a *namespace.yaml file." + }, + { + "name":"savedir", + "kind":"namevalue", + "type":"folder", + "purpose": "Output folder for generated classes" + } + ] + }, + "nwbRead": + { + "inputs": + [ + {"name":"filename", "kind":"required", "type":"file=*.nwb,*.h5"}, + {"mutuallyExclusiveGroup": + [ + [ + {"name":"flag", "kind":"ordered", "type":"choices='ignorecache'"} + ], + [ + {"name":"savedir", "kind":"namevalue", "type":"folder","purpose": "Output folder for generated classes"} + ] + ] + } + ], + "outputs": + [ + {"name":"NwbFile Object", "type":"nwbfile"} + ] + }, + "nwbExport": + { + "inputs": + [ + {"name":"nwbFileObjects", "kind":"required", "type":"NwbFile", "purpose":"An NWB file object or a list of NWB file objects"}, + {"name":"filePaths", "kind":"required", "type":"file=*.nwb", "purpose":"A filepath or a list of filepaths for exporting NWB file object(s)"} + ] + } +} diff --git a/tools/documentation/matnwb_exportModifiedTutorials.m b/tools/documentation/matnwb_exportModifiedTutorials.m new file mode 100644 index 00000000..ad1e2770 --- /dev/null +++ b/tools/documentation/matnwb_exportModifiedTutorials.m @@ -0,0 +1,33 @@ +function matnwb_exportModifiedTutorials() +% matnwb_exportModifiedTutorials - Export modified livescript tutorials to html +% +% See also matnwb_exportTutorials + + if exist("isMATLABReleaseOlderThan", "file") == 2 + hasGitRepo = ~isMATLABReleaseOlderThan("R2023b"); + else + hasGitRepo = false; + end + + if hasGitRepo + repo = gitrepo(misc.getMatnwbDir); + modifiedFiles = repo.ModifiedFiles; + else + modifiedFiles = matnwb_listModifiedFiles(); + end + + tutorialFolder = fullfile(misc.getMatnwbDir, 'tutorials'); + isInTutorialFolder = startsWith(modifiedFiles, tutorialFolder); + isLivescript = endsWith(modifiedFiles, ".mlx"); + + tutorialFiles = modifiedFiles(isInTutorialFolder & isLivescript); + + filesToIgnore = ["basicUsage", "read_demo", "remote_read"]; + isIgnored = endsWith(tutorialFiles, filesToIgnore + ".mlx"); + if any(isIgnored) + warning('Skipping export for the following files (see matnwb_exportTutorials):\n%s', ... + strjoin(" - " + filesToIgnore(isIgnored) + ".mlx", newline)) + end + + matnwb_exportTutorials("FilePaths", tutorialFiles, "IgnoreFiles", filesToIgnore) +end diff --git a/tools/documentation/matnwb_exportTutorials.m b/tools/documentation/matnwb_exportTutorials.m new file mode 100644 index 00000000..b514a56f --- /dev/null +++ b/tools/documentation/matnwb_exportTutorials.m @@ -0,0 +1,93 @@ +function matnwb_exportTutorials(options) +% matnwb_exportTutorials - Export mlx tutorial files to the specified output format +% +% Note: This function will ignore the following live scripts: +% - basicUsage.mlx : depends on output from convertTrials.m +% - read_demo.mlx : depends on external data, potentially slow +% - remote_read.mlx : Uses nwbRead on s3 url, potentially very slow] +% +% To export all livescripts (assuming you have made sure the above-mentioned +% files will run) call the function with IgnoreFiles set to empty, i.e: +% matnwb_exportTutorials(..., "IgnoreFiles", string.empty) + + arguments + options.ExportFormat (1,:) string {mustStartWithDot} = [".m", ".html"] + options.Expression (1,1) string = "*" % Filter by expression + options.FileNames (1,:) string = string.empty % Filter by file names + options.FilePaths (1,:) string = string.empty % Export specified files + options.IgnoreFiles (1,:) string = ["basicUsage", "read_demo", "remote_read"]; + options.RunLivescript (1,1) logical = true + end + + [exportFormat, targetFolderNames] = deal(options.ExportFormat); + + targetFolderNames = extractAfter(targetFolderNames, "."); + targetFolderNames(strcmp(targetFolderNames, "m")) = fullfile("private", "mcode"); + + nwbTutorialDir = fullfile(misc.getMatnwbDir, "tutorials"); + targetFolderPaths = fullfile(nwbTutorialDir, targetFolderNames); + + for folderPath = targetFolderPaths + if ~isfolder(folderPath); mkdir(folderPath); end + end + + if isempty(options.FilePaths) + if endsWith(options.Expression, "*") + expression = options.Expression + ".mlx"; + else + expression = options.Expression + "*.mlx"; + end + + L = dir(fullfile(nwbTutorialDir, expression)); + filePaths = string( fullfile({L.folder}, {L.name}) ); + else + filePaths = options.FilePaths; + end + + [~, fileNames] = fileparts(filePaths); + if ~isempty(options.FileNames) + [fileNames, iA] = intersect(fileNames, options.FileNames, 'stable'); + filePaths = filePaths(iA); + end + + if ~isempty(options.IgnoreFiles) + [~, fileNames] = fileparts(filePaths); + [fileNames, iA] = setdiff(fileNames, options.IgnoreFiles, 'stable'); + filePaths = filePaths(iA); + end + + % Go to a temporary directory, so that tutorials are exported in a + % temporary folder which is cleaned up afterwards + currentDir = pwd(); + cleanupWorkdir = onCleanup(@(fp) cd(currentDir)); + + tempDir = fullfile(tempdir, 'nwbTutorials'); + if ~isfolder(tempDir); mkdir(tempDir); end + disp('Changing into temporary directory:') + cd(tempDir) + + cleanupDeleteTempFiles = onCleanup(@(fp) rmdir(tempDir, 's')); + disp(tempDir) + + for i = 1:numel(filePaths) + sourcePath = char( fullfile(filePaths(i)) ); + if options.RunLivescript + fprintf('Running livescript "%s"\n', fileNames(i)) + + matlab.internal.liveeditor.executeAndSave(sourcePath); + end + + for j = 1:numel(exportFormat) + targetPath = fullfile(targetFolderPaths(j), fileNames(i) + exportFormat(j)); + fprintf('Exporting livescript "%s" to "%s"\n', fileNames(i), exportFormat(j)) + export(sourcePath, strrep(targetPath, '.mlx', exportFormat(j))); + end + end +end + +function mustStartWithDot(value) + for i = 1:numel(value) + assert(startsWith(value(i), '.'), ... + 'Value must be a file extension starting with a period, e.g ".html"') + end +end diff --git a/+misc/generateDocs.m b/tools/documentation/matnwb_generateDocs.m similarity index 66% rename from +misc/generateDocs.m rename to tools/documentation/matnwb_generateDocs.m index f04614a5..d278df2e 100644 --- a/+misc/generateDocs.m +++ b/tools/documentation/matnwb_generateDocs.m @@ -1,25 +1,35 @@ -function generateDocs() -% GENERATEDOCS generates docs for MatNWB user API -% GENERATEDOCS() generate documentation for MATLAB files in the current working directory. +function matnwb_generateDocs() +% MATNWB_GENERATEDOCS generates html docs for MatNWB API functions % -% Requires m2html in your path. -rootFiles = dir('.'); +% matnwb_generateDocs() generates html documentation for MATLAB files in the +% current matnwb root directory. +% +% The following files are included: +% - generateCore.m +% - generateExtension.m +% - nwbRead.m +% - nwbExport.m +% +% Requires m2html in your path. + +rootDir = misc.getMatnwbDir(); +rootFiles = dir(rootDir); rootFiles = {rootFiles.name}; rootWhitelist = {'generateCore.m', 'generateExtension.m', 'nwbRead.m', 'nwbExport.m'}; isWhitelisted = ismember(rootFiles, rootWhitelist); rootFiles(~isWhitelisted) = []; -m2html('mfiles', rootFiles, 'htmldir', 'doc'); +docDir = fullfile(rootDir, 'doc'); +m2html('mfiles', rootFiles, 'htmldir', docDir); % correct html files in root directory as the stylesheets will be broken fprintf('Correcting files in root directory...\n'); -rootFiles = dir('doc'); +rootFiles = dir(docDir); rootFiles = {rootFiles.name}; htmlMatches = regexp(rootFiles, '\.html$', 'once'); isHtmlFile = ~cellfun('isempty', htmlMatches); rootFiles(~isHtmlFile) = []; -rootFiles = fullfile('doc', rootFiles); +rootFiles = fullfile(docDir, rootFiles); for iDoc=1:length(rootFiles) fileName = rootFiles{iDoc}; @@ -29,7 +39,7 @@ function generateDocs() % correct index.html so the header indicates MatNWB fprintf('Correcting index.html Header...\n'); -indexPath = fullfile('doc', 'index.html'); +indexPath = fullfile(docDir, 'index.html'); fileReplace(indexPath, 'Index for \.', 'Index for MatNWB'); % remove directories listing in index.html diff --git a/tools/githooks/pre-commit-linux b/tools/githooks/pre-commit-linux new file mode 100644 index 00000000..9f5befa7 --- /dev/null +++ b/tools/githooks/pre-commit-linux @@ -0,0 +1,70 @@ +#!/bin/bash +# NB: Not tested + +# Relative paths +TUTORIAL_FOLDER="tutorials" +PRIVATE_FOLDER="$TUTORIAL_FOLDER/private" + +# Define file mappings (script files and their corresponding documentation) +SCRIPT_FILES=("generateCore.m" "generateExtension.m" "nwbRead.m" "nwbExport.m") +DOC_FOLDER="doc" + +# Get modified files (staged + unstaged) +MODIFIED_FILES=$(git diff --cached --name-only) + +# Check for .mlx files in the tutorials folder +TUTORIAL_FILES=$(echo "$MODIFIED_FILES" | grep "^$TUTORIAL_FOLDER" | grep "\.mlx$") + +# If there are tutorial files, validate them +if [[ -n "$TUTORIAL_FILES" ]]; then + echo "Checking tutorial files..." + + for TUTORIAL_FILE in $TUTORIAL_FILES; do + # Get the base name without extension + BASENAME=$(basename "$TUTORIAL_FILE" .mlx) + + # Find corresponding .html and .m files + HTML_FILE=$(find "$TUTORIAL_FOLDER" -name "$BASENAME.html" -print -quit) + MC_FILE=$(find "$PRIVATE_FOLDER" -name "$BASENAME.m" -print -quit) + + # Get modification dates (default to 0 if file doesn't exist) + TUTORIAL_FILE_DATE=$(stat -c "%Y" "$TUTORIAL_FILE") + HTML_FILE_DATE=$(stat -c "%Y" "$HTML_FILE" 2>/dev/null || echo 0) + MC_FILE_DATE=$(stat -c "%Y" "$MC_FILE" 2>/dev/null || echo 0) + + # Check if .html or .m file is outdated + if [[ "$TUTORIAL_FILE_DATE" -gt "$HTML_FILE_DATE" || "$TUTORIAL_FILE_DATE" -gt "$MC_FILE_DATE" ]]; then + echo "Error: Please re-export live script \"$BASENAME.mlx\"." >&2 + exit 1 + fi + done +fi + +# Flag to track if any files are outdated +OUTDATED_FOUND=0 + +# Loop through each script file +for SCRIPT_FILE in "${SCRIPT_FILES[@]}"; do + # Check if the script file has been modified + if echo "$MODIFIED_FILES" | grep -q "^$SCRIPT_FILE$"; then + # Get the corresponding HTML file in the doc folder + HTML_FILE="$DOC_FOLDER/${SCRIPT_FILE%.m}.html" + + # Get modification dates (default to 0 if file doesn't exist) + SCRIPT_FILE_DATE=$(stat -c "%Y" "$SCRIPT_FILE") + HTML_FILE_DATE=$(stat -c "%Y" "$HTML_FILE" 2>/dev/null || echo 0) + + # Check if the script is newer than the HTML file + if [[ "$SCRIPT_FILE_DATE" -gt "$HTML_FILE_DATE" ]]; then + echo "Error: Please re-export documentation for \"$SCRIPT_FILE\"." >&2 + OUTDATED_FOUND=1 + fi + fi +done + +# Exit with error if any files are outdated +if [[ $OUTDATED_FOUND -eq 1 ]]; then + exit 1 +fi + +exit 0 diff --git a/tools/githooks/pre-commit-mac b/tools/githooks/pre-commit-mac new file mode 100644 index 00000000..4e693d80 --- /dev/null +++ b/tools/githooks/pre-commit-mac @@ -0,0 +1,68 @@ +#!/bin/bash + +# Relative paths +TUTORIAL_FOLDER="tutorials" +PRIVATE_FOLDER="$TUTORIAL_FOLDER/private" + +# Define file mappings (script files and their corresponding documentation) +SCRIPT_FILES=("generateCore.m" "generateExtension.m" "nwbRead.m" "nwbExport.m") +DOC_FOLDER="doc" + +# Get modified files (staged) +MODIFIED_FILES=$(git diff --cached --name-only) + +# Check for .mlx files in the tutorials folder +TUTORIAL_FILES=$(echo "$MODIFIED_FILES" | grep "^$TUTORIAL_FOLDER" | grep "\.mlx$") + +# If there are tutorial files, check that they have been exported +if [[ -n "$TUTORIAL_FILES" ]]; then + + for TUTORIAL_FILE in $TUTORIAL_FILES; do + # Get the base name without extension + BASENAME=$(basename "$TUTORIAL_FILE" .mlx) + + # Find corresponding .html and .m files + HTML_FILE=$(find "$TUTORIAL_FOLDER" -name "$BASENAME.html" -print -quit) + MC_FILE=$(find "$PRIVATE_FOLDER" -name "$BASENAME.m" -print -quit) + + # Get modification dates (default to 0 if file doesn't exist) + TUTORIAL_FILE_DATE=$(stat -f "%m" "$TUTORIAL_FILE") + HTML_FILE_DATE=$(stat -f "%m" "$HTML_FILE" 2>/dev/null || echo 0) + MC_FILE_DATE=$(stat -f "%m" "$MC_FILE" 2>/dev/null || echo 0) + + # Check if .html or .m file is outdated + if [[ "$TUTORIAL_FILE_DATE" -gt "$HTML_FILE_DATE" || "$TUTORIAL_FILE_DATE" -gt "$MC_FILE_DATE" ]]; then + echo "Error: Please re-export html/m-files for live script \"$BASENAME.mlx\"." >&2 + exit 1 + fi + done +fi + +# Flag to track if any files are outdated +OUTDATED_FOUND=0 + +# Loop through each script file +for SCRIPT_FILE in "${SCRIPT_FILES[@]}"; do + # Check if the script file has been modified + if echo "$MODIFIED_FILES" | grep -q "^$SCRIPT_FILE$"; then + # Get the corresponding HTML file in the doc folder + HTML_FILE="$DOC_FOLDER/${SCRIPT_FILE%.m}.html" + + # Get modification dates (default to 0 if file doesn't exist) + SCRIPT_FILE_DATE=$(stat -f "%m" "$SCRIPT_FILE") + HTML_FILE_DATE=$(stat -f "%m" "$HTML_FILE" 2>/dev/null || echo 0) + + # Check if the script is newer than the HTML file + if [[ "$SCRIPT_FILE_DATE" -gt "$HTML_FILE_DATE" ]]; then + echo "Error: Please re-export documentation for \"$SCRIPT_FILE\"." >&2 + OUTDATED_FOUND=1 + fi + fi +done + +# Exit with error if any files are outdated +if [[ $OUTDATED_FOUND -eq 1 ]]; then + exit 1 +fi + +exit 0 diff --git a/tools/maintenance/matnwb_checkTutorials.m b/tools/maintenance/matnwb_checkTutorials.m new file mode 100644 index 00000000..cfc738f6 --- /dev/null +++ b/tools/maintenance/matnwb_checkTutorials.m @@ -0,0 +1,32 @@ +function matnwb_checkTutorials() +% matnwb_checkTutorials - Checks for modified MATLAB Live Script tutorial files +% in the repository and executes tests and html exports if found. +% +% This function determines whether any tutorial files in the `tutorials` +% directory have been modified in the matnwb repository. If such files exist, +% the function performs the following actions: +% 1. Runs unit tests matching the tutorial names. +% 2. Exports the modified tutorial files using the `matnwb_exportTutorials` +% function. +% +% Usage: +% matnwb_checkTutorials() +% +% See also matnwb_listModifiedFiles, matnwb_exportTutorials + + tutorialFolder = fullfile(misc.getMatnwbDir, 'tutorials'); + + modifiedFiles = matnwb_listModifiedFiles("all"); + + isInTutorialFolder = startsWith(modifiedFiles, tutorialFolder); + isLivescript = endsWith(modifiedFiles, ".mlx"); + + tutorialFiles = modifiedFiles(isInTutorialFolder & isLivescript); + + if ~isempty(tutorialFiles) + [~, fileNames] = fileparts(tutorialFiles); + fileNames = string(fileNames) + ".mlx"; + nwbtest('Name', 'tests.unit.Tutorial*', 'ParameterName', fileNames') + matnwb_exportTutorials("FilePaths", tutorialFiles) + end +end diff --git a/tools/maintenance/matnwb_listModifiedFiles.m b/tools/maintenance/matnwb_listModifiedFiles.m new file mode 100644 index 00000000..1eac87d4 --- /dev/null +++ b/tools/maintenance/matnwb_listModifiedFiles.m @@ -0,0 +1,60 @@ +function modifiedFiles = matnwb_listModifiedFiles(mode) +% matnwb_listModifiedFiles - Lists modified files in the repository using Git. +% +% This function identifies files modified in the current Git repository by +% executing a `git diff --name-only` command. The list of modified files is +% returned as a full file path relative to the repository root. +% +% Usage: +% modifiedFiles = matnwb_listModifiedFiles() +% +% Inputs: +% mode - (string) [optional] Which mode to use. Options: "all" or "staged" +% Whether to list all modified files or only files staged for +% commit. Default is "all". +% +% Outputs: +% modifiedFiles - (string array) A list of modified files in the repository, +% with absolute paths. If no modified files are detected, +% an empty string array is returned. +% +% Errors: +% - Raises an error if Git fails or is unavailable. + + arguments + mode (1,1) string {mustBeMember(mode, ["staged", "all"])} = "all" + end + + currentDir = pwd; + cleanupObj = onCleanup(@(fp) cd(currentDir)); + + cd(misc.getMatnwbDir) + + switch mode + case "all" + [status, cmdout] = system([... + 'git --no-pager diff --cached --name-only ', ... + '&& git --no-pager diff --name-only | sort | uniq' ]); + case "staged" + [status, cmdout] = system('git --no-pager diff --cached --name-only'); + end + clear cleanupObj + + if status == 0 + modifiedFiles = splitlines(cmdout); + modifiedFiles = string(modifiedFiles); + modifiedFiles(modifiedFiles=="") = []; + modifiedFiles = removeHiddenFormatting(modifiedFiles); + modifiedFiles = fullfile(misc.getMatnwbDir, modifiedFiles); + else + error('Could not use git to detect modified files.') + end +end + +function cleanText = removeHiddenFormatting(inputText) + % Define the regex pattern for ANSI escape sequences + ansiPattern = '\x1B\[[0-9;]*[a-zA-Z]'; + + % Remove ANSI escape sequences using regexprep + cleanText = regexprep(inputText, ansiPattern, ''); +end diff --git a/tools/maintenance/validateCodecovSettings.m b/tools/maintenance/matnwb_validateCodecovSettings.m similarity index 81% rename from tools/maintenance/validateCodecovSettings.m rename to tools/maintenance/matnwb_validateCodecovSettings.m index 2bcd41bb..610c1ac0 100644 --- a/tools/maintenance/validateCodecovSettings.m +++ b/tools/maintenance/matnwb_validateCodecovSettings.m @@ -1,5 +1,5 @@ -function validateCodecovSettings() -% validateCodecovSettings Validate a codecov settings file. +function matnwb_validateCodecovSettings() +% matnwb_validateCodecovSettings Validate a codecov settings file. % % Note: This is a utility function developer's can use to check the % codecov settings file in .github/.codecov.yaml diff --git a/tools/matnwb_setup.m b/tools/matnwb_setup.m new file mode 100644 index 00000000..e6eef6cc --- /dev/null +++ b/tools/matnwb_setup.m @@ -0,0 +1,11 @@ +% This setup script is meant for developers of the matnwb project +% +% Install git hooks +% Download developer dependencies + +currentFolder = fileparts(mfilename('fullpath')); +addpath(genpath(currentFolder)) + +matnwb_installGitHooks() + +matnwb_installm2html(fileparts(currentFolder)) diff --git a/tools/setup/matnwb_installGitHooks.m b/tools/setup/matnwb_installGitHooks.m new file mode 100644 index 00000000..dcd2790b --- /dev/null +++ b/tools/setup/matnwb_installGitHooks.m @@ -0,0 +1,58 @@ +function matnwb_installGitHooks(projectDirectory) +% matnwb_installGitHooks - Install git hooks +% Installs git hooks from the specified folder to the git project. +% +% Arguments: +% projectDirectory (string): Root directory of the git project (Optional). +% Default assumes this function is located two +% subfolder levels down from the root, e.g., +% tools/githooks. + + arguments + % Project directory - root directory for git project. Assumes this + % function is three subfolder levels down from root directory, i.e + % tools/setup/ + projectDirectory (1,1) string {mustBeFolder} = ... + fileparts(fileparts(fileparts(mfilename('fullpath')))) + end + + gitHooksSourceFolder = fullfile(projectDirectory, 'tools', 'githooks'); + + % Define supported hook names + supportedHookNames = ["pre-commit", "pre-push"]; + + % Git hooks folder in the project directory + gitHooksTargetFolder = fullfile(projectDirectory, '.git', 'hooks'); + + % Ensure the git hooks folder exists + if ~isfolder(gitHooksTargetFolder) + error("installHooks:InvalidGitRepository", ... + "The specified project directory does not contain a valid git repository."); + end + + for hookName = supportedHookNames + if ismac + postfix = "mac"; + elseif isunix + postfix = "linux"; + elseif ispc + postfix = "win"; + end + + listing = dir(fullfile(gitHooksSourceFolder, hookName+"-"+postfix)); + if ~isempty(listing) + targetPath = fullfile(gitHooksTargetFolder, hookName); + scriptContent = fileread(fullfile(gitHooksSourceFolder, listing.name)); + + fid = fopen(targetPath, "wt"); + fwrite(fid, scriptContent); + fclose(fid); + + if isunix + % Make the target executable + system(sprintf('chmod +x "%s"', targetPath)); + end + fprintf('Installed hook: %s -> %s\n', hookName, targetPath); + end + end +end diff --git a/tools/setup/matnwb_installm2html.m b/tools/setup/matnwb_installm2html.m new file mode 100644 index 00000000..7993da19 --- /dev/null +++ b/tools/setup/matnwb_installm2html.m @@ -0,0 +1,41 @@ +function matnwb_installm2html(projectDirectory) + + arguments + projectDirectory (1,1) string {mustBeFolder} + end + + % Define repository URL and target folder + repoURL = 'https://github.com/gllmflndn/m2html.git'; + targetFolder = fullfile(projectDirectory, 'tools', 'external', 'm2html'); + + % Step 1: Clone m2html into tools/external/ + if ~isfolder(targetFolder) + fprintf('Cloning m2html into %s...\n', targetFolder); + system(sprintf('git clone %s %s', repoURL, targetFolder)); + addpath(targetFolder); savepath() + fprintf('Clone complete.\n'); + else + fprintf('Target folder %s already exists. Skipping cloning step.\n', targetFolder); + end + + % Step 2: Add tools/external to .git/info/exclude + excludeFile = fullfile(projectDirectory, '.git', 'info', 'exclude'); + targetFolderRelative = fullfile('tools', 'external'); + if isfile(excludeFile) + % Read current contents of the exclude file + excludeContents = fileread(excludeFile); + + % Check if the path is already excluded + if ~contains(excludeContents, targetFolderRelative) + fprintf('Adding %s to .git/info/exclude...\n', targetFolderRelative); + fid = fopen(excludeFile, 'a'); % Open for appending + fprintf(fid, '\n%s\n', targetFolderRelative); % Add the path to the exclude file + fclose(fid); + fprintf('Path added to exclude file.\n'); + else + fprintf('Path %s is already in the exclude file. Skipping.\n', targetFolderRelative); + end + else + fprintf('Exclude file not found. Make sure you are in a Git repository.\n'); + end +end \ No newline at end of file diff --git a/tutorials/.gitattributes b/tutorials/.gitattributes new file mode 100644 index 00000000..6be91349 --- /dev/null +++ b/tutorials/.gitattributes @@ -0,0 +1 @@ +*.mlx binary diff --git a/tutorials/basicUsage.mlx b/tutorials/basicUsage.mlx index ec806102..1ffaae6f 100644 Binary files a/tutorials/basicUsage.mlx and b/tutorials/basicUsage.mlx differ diff --git a/tutorials/behavior.mlx b/tutorials/behavior.mlx index 33fdc5cd..3d118372 100644 Binary files a/tutorials/behavior.mlx and b/tutorials/behavior.mlx differ diff --git a/tutorials/dimensionMapNoDataPipes.mlx b/tutorials/dimensionMapNoDataPipes.mlx index 630ffdc5..fd616514 100644 Binary files a/tutorials/dimensionMapNoDataPipes.mlx and b/tutorials/dimensionMapNoDataPipes.mlx differ diff --git a/tutorials/dimensionMapWithDataPipes.mlx b/tutorials/dimensionMapWithDataPipes.mlx index f24f6505..dc0b0d15 100644 Binary files a/tutorials/dimensionMapWithDataPipes.mlx and b/tutorials/dimensionMapWithDataPipes.mlx differ diff --git a/tutorials/dynamic_tables.mlx b/tutorials/dynamic_tables.mlx index 35411bdf..df8f7e26 100644 Binary files a/tutorials/dynamic_tables.mlx and b/tutorials/dynamic_tables.mlx differ diff --git a/tutorials/dynamically_loaded_filters.mlx b/tutorials/dynamically_loaded_filters.mlx index a921b4ee..06a282de 100644 Binary files a/tutorials/dynamically_loaded_filters.mlx and b/tutorials/dynamically_loaded_filters.mlx differ diff --git a/tutorials/ecephys.mlx b/tutorials/ecephys.mlx index 5bffdbf2..f5b4a2ef 100644 Binary files a/tutorials/ecephys.mlx and b/tutorials/ecephys.mlx differ diff --git a/tutorials/html/behavior.html b/tutorials/html/behavior.html index e3b1f055..4f70f592 100644 --- a/tutorials/html/behavior.html +++ b/tutorials/html/behavior.html @@ -1,27 +1,125 @@ -Behavior Data

Behavior Data

This tutorial will guide you in writing behavioral data to NWB.

Creating an NWB File

Create an NWBFile object with the required fields (session_description, identifier, and session_start_time) and additional metadata.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3), ...
'general_experimenter', 'My Name', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', 'DOI:10.1016/j.neuron.2016.12.011'); % optional
nwb

SpatialSeries: Storing continuous spatial data

SpatialSeries is a subclass of TimeSeries that represents data in space, such as the spatial direction e.g., of gaze or travel or position of an animal over time.
Create data that corresponds to x, y position over time.
position_data = [linspace(0, 10, 50); linspace(0, 8, 50)];
In SpatialSeries data, the first dimension is always time (in seconds), the second dimension represents the x, y position. SpatialSeries data should be stored as one continuous stream as it is acquired, not by trials as is often reshaped fro analysis. Data can be trial-aligned on-the-fly using the trials table. See the trials tutorial for further information.
For position data reference_frame indicates the zero-position, e.g. the 0,0 point might be the bottom-left corner of an enclosure, as viewed fromvteh tracking camera.
timestamps = linspace(0, 50)/ 200;
position_spatial_series = types.core.SpatialSeries( ...
'description', 'Postion (x, y) in an open field.', ...
'data', position_data, ...
'timestamps', timestamps, ...
'reference_frame', '(0,0) is the bottom left corner.' ...
)

Position: Storing position measured over time

To help data analysis and visualiztion tools know that this SpatialSeries obejct represents the position of the subject, store the SpatialSeries object inside a Position object, which can hold one or more SpatialSeries objects.
position = types.core.Position();
position.spatialseries.set('SpatialSeries', position_spatial_series);

Create a Behavior Processing Module

Create a processing module called "behavior" for storing behavioral data in the NWBFile, then add the Position object to the processing module.
behavior_processing_module = types.core.ProcessingModule('description', 'stores behavioral data.');
behavior_processing_module.nwbdatainterface.set("Position", position);
nwb.processing.set("behavior", behavior_processing_module);

CompassDirection: Storing view angle measured over time

Analogous to how position can be stored, we can create a SpatialSeries object for representing the view angle of the subject.
For direction data reference from indicates the zero direction, for instance in this case "straight ahead" is 0 radians.
view_angle_data = linspace(0, 4, 50);
direction_spatial_series = types.core.SpatialSeries( ...
'description', 'View angle of the subject measured in radians.', ...
'data', view_angle_data, ...
'timestamps', timestamps, ...
'reference_frame', 'straight ahead', ...
'data_unit', 'radians' ...
);
direction = types.core.CompassDirection();
direction.spatialseries.set('spatial_series', direction_spatial_series);
We can add a CompassDirection object to the behavior processing module the same way we have added the position data.
%behavior_processing_module = types.core.ProcessingModule("stores behavioral data."); % if you have not already created it
behavior_processing_module.nwbdatainterface.set('CompassDirection', direction);
%nwb.processing.set('behavior', behavior_processing_module); % if you have not already added it

BehaviorTimeSeries: Storing continuous behavior data

BehavioralTimeSeries is an interface for storing continuous behavior data, such as the speed of a subject.
speed_data = linspace(0, 0.4, 50);
 
speed_time_series = types.core.TimeSeries( ...
'data', speed_data, ...
'starting_time_rate', 10.0, ... % Hz
'description', 'he speed of the subject measured over time.', ...
'data_unit', 'm/s' ...
);
 
behavioral_time_series = types.core.BehavioralTimeSeries();
behavioral_time_series.timeseries.set('speed', speed_time_series);
 
%behavior_processing_module = types.core.ProcessingModule("stores behavioral data."); % if you have not already created it
behavior_processing_module.nwbdatainterface.set('BehavioralTimeSeries', behavioral_time_series);
%nwb.processing.set('behavior', behavior_processing_module); % if you have not already added it

BehavioralEvents: Storing behavioral events

BehavioralEvents is an interface for storing behavioral events. We can use it for storing the timing and amount of rewards (e.g. water amount) or lever press times.
reward_amount = [1.0, 1.5, 1.0, 1.5];
event_timestamps = [1.0, 2.0, 5.0, 6.0];
 
time_series = types.core.TimeSeries( ...
'data', reward_amount, ...
'timestamps', event_timestamps, ...
'description', 'The water amount the subject received as a reward.', ...
'data_unit', 'ml' ...
);
 
behavioral_events = types.core.BehavioralEvents();
behavioral_events.timeseries.set('lever_presses', time_series);
 
%behavior_processing_module = types.core.ProcessingModule("stores behavioral data."); % if you have not already created it
behavior_processing_module.nwbdatainterface.set('BehavioralEvents', behavioral_events);
%nwb.processing.set('behavior', behavior_processing_module); % if you have not already added it
Storing only the timestsamps of the events is possible with the ndx-events NWB extension. You can also add labels associated with the events with this extension. You can find information about installation and example usage here.

BehavioralEpochs: Storing intervals of behavior data

BehavioralEpochs is for storing intervals of behavior data. BehavioralEpochs uses IntervalSeries to represent the time intervals. Create an IntervalSeries object that represents the time intervals when hte animal was running. IntervalSeries uses 1 to indicate the beginning of an interval and -1 to indicate the end.
run_intervals = types.core.IntervalSeries( ...
'description', 'Intervals when the animal was running.', ...
'data', [1, -1, 1, -1, 1, -1], ...
'timestamps', [0.5, 1.5, 3.5, 4.0, 7.0, 7.3] ...
);
 
behavioral_epochs = types.core.BehavioralEpochs();
behavioral_epochs.intervalseries.set('running', run_intervals);
You can add more than one IntervalSeries to a BehavioralEpochs object.
sleep_intervals = types.core.IntervalSeries( ...
'description', 'Intervals when the animal was sleeping', ...
'data', [1, -1, 1, -1], ...
'timestamps', [15.0, 30.0, 60.0, 95.0] ...
);
behavioral_epochs.intervalseries.set('sleeping', sleep_intervals);
 
% behavior_processing_module = types.core.ProcessingModule("stores behavioral data.");
% behavior_processing_module.nwbdatainterface.set('BehavioralEvents', behavioral_events);
% nwb.processing.set('behavior', behavior_processing_module);

Another approach: TimeIntervals

Using TimeIntervals to represent time intervals is often preferred over BehavioralEpochs and IntervalSeries. TimeIntervals is a subclass of DynamicTable, which offers flexibility for tabular data by allowing the addition of optional columns which are not defined in the standard.
sleep_intervals = types.core.TimeIntervals( ...
'description', 'Intervals when the animal was sleeping.', ...
'colnames', {'start_time', 'stop_time', 'stage'} ...
);
 
sleep_intervals.addRow('start_time', 0.3, 'stop_time', 0.35, 'stage', 1);
sleep_intervals.addRow('start_time', 0.7, 'stop_time', 0.9, 'stage', 2);
sleep_intervals.addRow('start_time', 1.3, 'stop_time', 3.0, 'stage', 3);
 
nwb.intervals.set('sleep_intervals', sleep_intervals);

EyeTracking: Storing continuous eye-tracking data of gaze direction

EyeTracking is for storing eye-tracking data which represents direction of gaze as measured by an eye tracking algorithm. An EyeTracking object holds one or more SpatialSeries objects that represent the gaze direction over time extracted from a video.
eye_position_data = [linspace(-20, 30, 50); linspace(30, -20, 50)];
 
right_eye_position = types.core.SpatialSeries( ...
'description', 'The position of the right eye measured in degrees.', ...
'data', eye_position_data, ...
'starting_time_rate', 50.0, ... % Hz
'reference_frame', '(0,0) is middle', ...
'data_unit', 'degrees' ...
);
 
left_eye_position = types.core.SpatialSeries( ...
'description', 'The position of the right eye measured in degrees.', ...
'data', eye_position_data, ...
'starting_time_rate', 50.0, ... % Hz
'reference_frame', '(0,0) is middle', ...
'data_unit', 'degrees' ...
);
 
eye_tracking = types.core.EyeTracking();
eye_tracking.spatialseries.set('right_eye_position', right_eye_position);
eye_tracking.spatialseries.set('left_eye_position', left_eye_position);
 
% behavior_processing_module = types.core.ProcessingModule("stores behavioral data.");
behavior_processing_module.nwbdatainterface.set('EyeTracking', eye_tracking);
% nwb.processing.set('behavior', behavior_processing_module);

PupilTracking: Storing continuous eye-tracking data of pupil size

PupilTracking is for storing eye-tracking data which represents pupil size. PupilTracking hold one or more TimeSeries obejcts taht canrepresent different features such as the dilaltion of the pupil measured over time by a pupil tracking algorithm.
pupil_diameter = types.core.TimeSeries( ...
'description', 'Pupil diameter extracted from the video of the right eye.', ...
'data', linspace(0.001, 0.002, 50), ...
'starting_time_rate', 20.0, ... % Hz
'data_unit', 'meters' ...
);
 
pupil_tracking = types.core.PupilTracking();
pupil_tracking.timeseries.set('pupil_diameter', pupil_diameter);
 
% behavior_processing_module = types.core.ProcessingModule("stores behavioral data.");
behavior_processing_module.nwbdatainterface.set('PupilTracking', pupil_tracking);
% nwb.processing.set('behavior', behavior_processing_module);

Writing the behavior data to an NWB file

All of the above commands build an NWBFile object in-memory. To write this file, use nwbExport.
nwbExport(nwb, 'test_behavior.nwb');
+.S3 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 1px solid rgb(217, 217, 217); border-bottom: 0px none rgb(33, 33, 33); border-radius: 4px 4px 0px 0px; padding: 6px 45px 0px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } +.S4 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 0px none rgb(33, 33, 33); border-bottom: 0px none rgb(33, 33, 33); border-radius: 0px; padding: 0px 45px 0px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } +.S5 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 0px none rgb(33, 33, 33); border-bottom: 1px solid rgb(217, 217, 217); border-radius: 0px; padding: 0px 45px 4px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } +.S6 { color: rgb(33, 33, 33); padding: 10px 0px 6px 17px; background: rgb(255, 255, 255) none repeat scroll 0% 0% / auto padding-box border-box; font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; overflow-x: hidden; line-height: 17.234px; } +/* Styling that is common to warnings and errors is in diagnosticOutput.css */.embeddedOutputsErrorElement { min-height: 18px; max-height: 550px;} +.embeddedOutputsErrorElement .diagnosticMessage-errorType { overflow: auto;} +.embeddedOutputsErrorElement.inlineElement {} +.embeddedOutputsErrorElement.rightPaneElement {} +/* Styling that is common to warnings and errors is in diagnosticOutput.css */.embeddedOutputsWarningElement { min-height: 18px; max-height: 550px;} +.embeddedOutputsWarningElement .diagnosticMessage-warningType { overflow: auto;} +.embeddedOutputsWarningElement.inlineElement {} +.embeddedOutputsWarningElement.rightPaneElement {} +/* Copyright 2015-2023 The MathWorks, Inc. *//* In this file, styles are not scoped to rtcContainer since they could be in the Dojo Tooltip */.diagnosticMessage-wrapper { font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 12px;} +.diagnosticMessage-wrapper.diagnosticMessage-warningType { /*This fallback value will be used for appdesigner warnings*/ color: var(--rtc-warning-output-color, var(--mw-color-matlabWarning));} +.diagnosticMessage-wrapper.diagnosticMessage-warningType a { /*This fallback value will be used for appdesigner warnings*/ color: var(--rtc-warning-output-color, var(--mw-color-matlabWarning)); text-decoration: underline;} +.rtcThemeDefaultOverride .diagnosticMessage-wrapper.diagnosticMessage-warningType,.rtcThemeDefaultOverride .diagnosticMessage-wrapper.diagnosticMessage-warningType a { color: var(--mw-color-matlabWarning) !important;} +.diagnosticMessage-wrapper.diagnosticMessage-errorType { /*This fallback value will be used in appdesigner error tooltip text*/ color: var(--rtc-error-output-color, var(--mw-color-matlabErrors));} +.diagnosticMessage-wrapper.diagnosticMessage-errorType a { /*This fallback value will be used in appdesigner error tooltip text*/ color: var(--rtc-error-output-color, var(--mw-color-matlabErrors)); text-decoration: underline;} +.rtcThemeDefaultOverride .diagnosticMessage-wrapper.diagnosticMessage-errorType,.rtcThemeDefaultOverride .diagnosticMessage-wrapper.diagnosticMessage-errorType a { color: var(--mw-color-matlabErrors) !important;} +.diagnosticMessage-wrapper .diagnosticMessage-messagePart,.diagnosticMessage-wrapper .diagnosticMessage-causePart { white-space: pre-wrap;} +.diagnosticMessage-wrapper .diagnosticMessage-stackPart { white-space: pre;} +.embeddedOutputsTextElement,.embeddedOutputsVariableStringElement { white-space: pre; word-wrap: initial; min-height: 18px; max-height: 550px;} +.embeddedOutputsTextElement .textElement,.embeddedOutputsVariableStringElement .textElement { overflow: auto;} +.textElement,.rtcDataTipElement .textElement { padding-top: 2px;} +.embeddedOutputsTextElement.inlineElement,.embeddedOutputsVariableStringElement.inlineElement {} +.inlineElement .textElement {} +.embeddedOutputsTextElement.rightPaneElement,.embeddedOutputsVariableStringElement.rightPaneElement { min-height: 16px;} +.rightPaneElement .textElement { padding-top: 2px; padding-left: 9px;} +.S7 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 1px solid rgb(217, 217, 217); border-bottom: 1px solid rgb(217, 217, 217); border-radius: 4px; padding: 6px 45px 4px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } +.S8 { margin: 10px 10px 9px 4px; padding: 0px; line-height: 21px; min-height: 0px; white-space: pre-wrap; color: rgb(33, 33, 33); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 14px; font-weight: 400; text-align: left; } +.S9 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 0px none rgb(33, 33, 33); border-bottom: 1px solid rgb(217, 217, 217); border-radius: 0px 0px 4px 4px; padding: 0px 45px 4px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } +.S10 { margin: 15px 10px 5px 4px; padding: 0px; line-height: 18px; min-height: 0px; white-space: pre-wrap; color: rgb(33, 33, 33); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 17px; font-weight: 700; text-align: left; }

Behavior Data

This tutorial will guide you in writing behavioral data to NWB.

Creating an NWB File

Create an NWBFile object with the required fields (session_description, identifier, and session_start_time) and additional metadata.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'general_experimenter', 'My Name', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', 'DOI:10.1016/j.neuron.2016.12.011'); % optional
nwb
nwb =
NwbFile with properties: + + nwb_version: '2.7.0' + file_create_date: [] + identifier: 'Mouse5_Day3' + session_description: 'mouse in open exploration' + session_start_time: {[2018-04-25T02:30:03.000000+02:00]} + timestamps_reference_time: [] + acquisition: [0×1 types.untyped.Set] + analysis: [0×1 types.untyped.Set] + general: [0×1 types.untyped.Set] + general_data_collection: '' + general_devices: [0×1 types.untyped.Set] + general_experiment_description: '' + general_experimenter: 'My Name' + general_extracellular_ephys: [0×1 types.untyped.Set] + general_extracellular_ephys_electrodes: [] + general_institution: 'University of My Institution' + general_intracellular_ephys: [0×1 types.untyped.Set] + general_intracellular_ephys_experimental_conditions: [] + general_intracellular_ephys_filtering: '' + general_intracellular_ephys_intracellular_recordings: [] + general_intracellular_ephys_repetitions: [] + general_intracellular_ephys_sequential_recordings: [] + general_intracellular_ephys_simultaneous_recordings: [] + general_intracellular_ephys_sweep_table: [] + general_keywords: '' + general_lab: '' + general_notes: '' + general_optogenetics: [0×1 types.untyped.Set] + general_optophysiology: [0×1 types.untyped.Set] + general_pharmacology: '' + general_protocol: '' + general_related_publications: 'DOI:10.1016/j.neuron.2016.12.011' + general_session_id: 'session_1234' + general_slices: '' + general_source_script: '' + general_source_script_file_name: '' + general_stimulus: '' + general_subject: [] + general_surgery: '' + general_virus: '' + intervals: [0×1 types.untyped.Set] + intervals_epochs: [] + intervals_invalid_times: [] + intervals_trials: [] + processing: [0×1 types.untyped.Set] + scratch: [0×1 types.untyped.Set] + stimulus_presentation: [0×1 types.untyped.Set] + stimulus_templates: [0×1 types.untyped.Set] + units: [] + +Warning: The following required properties are missing for instance for type "NwbFile": + timestamps_reference_time

SpatialSeries: Storing continuous spatial data

SpatialSeries is a subclass of TimeSeries that represents data in space, such as the spatial direction e.g., of gaze or travel or position of an animal over time.
Create data that corresponds to x, y position over time.
position_data = [linspace(0, 10, 50); linspace(0, 8, 50)]; % 2 x nT array
In SpatialSeries data, the first dimension is always time (in seconds), the second dimension represents the x, y position. However, as described in the dimensionMapNoDataPipes tutorial, when a MATLAB array is exported to HDF5, the array is transposed. Therefore, in order to correctly export the data, in MATLAB the last dimension of an array should be time. SpatialSeries data should be stored as one continuous stream as it is acquired, not by trials as is often reshaped for analysis. Data can be trial-aligned on-the-fly using the trials table. See the trials tutorial for further information.
For position data reference_frame indicates the zero-position, e.g. the 0,0 point might be the bottom-left corner of an enclosure, as viewed from the tracking camera.
timestamps = linspace(0, 50, 50)/ 200;
position_spatial_series = types.core.SpatialSeries( ...
'description', 'Postion (x, y) in an open field.', ...
'data', position_data, ...
'timestamps', timestamps, ...
'reference_frame', '(0,0) is the bottom left corner.' ...
)
position_spatial_series =
SpatialSeries with properties: + + reference_frame: '(0,0) is the bottom left corner.' + starting_time_unit: 'seconds' + timestamps_interval: 1 + timestamps_unit: 'seconds' + data: [2×50 double] + comments: 'no comments' + control: [] + control_description: '' + data_continuity: '' + data_conversion: 1 + data_offset: 0 + data_resolution: -1 + data_unit: 'meters' + description: 'Postion (x, y) in an open field.' + starting_time: [] + starting_time_rate: [] + timestamps: [0 0.0051 0.0102 0.0153 0.0204 0.0255 0.0306 0.0357 0.0408 0.0459 0.0510 0.0561 0.0612 0.0663 0.0714 0.0765 0.0816 0.0867 0.0918 0.0969 0.1020 0.1071 0.1122 0.1173 0.1224 0.1276 0.1327 0.1378 0.1429 0.1480 0.1531 … ] (1×50 double) +

Position: Storing position measured over time

To help data analysis and visualization tools know that this SpatialSeries object represents the position of the subject, store the SpatialSeries object inside a Position object, which can hold one or more SpatialSeries objects.
position = types.core.Position();
position.spatialseries.set('SpatialSeries', position_spatial_series);

Create a Behavior Processing Module

Create a processing module called "behavior" for storing behavioral data in the NWBFile, then add the Position object to the processing module.
behavior_processing_module = types.core.ProcessingModule('description', 'stores behavioral data.');
behavior_processing_module.nwbdatainterface.set("Position", position);
nwb.processing.set("behavior", behavior_processing_module);

CompassDirection: Storing view angle measured over time

Analogous to how position can be stored, we can create a SpatialSeries object for representing the view angle of the subject.
For direction data reference_frame indicates the zero direction, for instance in this case "straight ahead" is 0 radians.
view_angle_data = linspace(0, 4, 50);
direction_spatial_series = types.core.SpatialSeries( ...
'description', 'View angle of the subject measured in radians.', ...
'data', view_angle_data, ...
'timestamps', timestamps, ...
'reference_frame', 'straight ahead', ...
'data_unit', 'radians' ...
);
direction = types.core.CompassDirection();
direction.spatialseries.set('spatial_series', direction_spatial_series);
We can add a CompassDirection object to the behavior processing module the same way we have added the position data.
%behavior_processing_module = types.core.ProcessingModule("stores behavioral data."); % if you have not already created it
behavior_processing_module.nwbdatainterface.set('CompassDirection', direction);
%nwb.processing.set('behavior', behavior_processing_module); % if you have not already added it

BehaviorTimeSeries: Storing continuous behavior data

BehavioralTimeSeries is an interface for storing continuous behavior data, such as the speed of a subject.
speed_data = linspace(0, 0.4, 50);
 
speed_time_series = types.core.TimeSeries( ...
'data', speed_data, ...
'starting_time', 1.0, ... % NB: Important to set starting_time when using starting_time_rate
'starting_time_rate', 10.0, ... % Hz
'description', 'he speed of the subject measured over time.', ...
'data_unit', 'm/s' ...
);
 
behavioral_time_series = types.core.BehavioralTimeSeries();
behavioral_time_series.timeseries.set('speed', speed_time_series);
 
%behavior_processing_module = types.core.ProcessingModule("stores behavioral data."); % if you have not already created it
behavior_processing_module.nwbdatainterface.set('BehavioralTimeSeries', behavioral_time_series);
%nwb.processing.set('behavior', behavior_processing_module); % if you have not already added it

BehavioralEvents: Storing behavioral events

BehavioralEvents is an interface for storing behavioral events. We can use it for storing the timing and amount of rewards (e.g. water amount) or lever press times.
reward_amount = [1.0, 1.5, 1.0, 1.5];
event_timestamps = [1.0, 2.0, 5.0, 6.0];
 
time_series = types.core.TimeSeries( ...
'data', reward_amount, ...
'timestamps', event_timestamps, ...
'description', 'The water amount the subject received as a reward.', ...
'data_unit', 'ml' ...
);
 
behavioral_events = types.core.BehavioralEvents();
behavioral_events.timeseries.set('lever_presses', time_series);
 
%behavior_processing_module = types.core.ProcessingModule("stores behavioral data."); % if you have not already created it
behavior_processing_module.nwbdatainterface.set('BehavioralEvents', behavioral_events);
%nwb.processing.set('behavior', behavior_processing_module); % if you have not already added it
Storing only the timestamps of the events is possible with the ndx-events NWB extension. You can also add labels associated with the events with this extension. You can find information about installation and example usage here.

BehavioralEpochs: Storing intervals of behavior data

BehavioralEpochs is for storing intervals of behavior data. BehavioralEpochs uses IntervalSeries to represent the time intervals. Create an IntervalSeries object that represents the time intervals when the animal was running. IntervalSeries uses 1 to indicate the beginning of an interval and -1 to indicate the end.
run_intervals = types.core.IntervalSeries( ...
'description', 'Intervals when the animal was running.', ...
'data', [1, -1, 1, -1, 1, -1], ...
'timestamps', [0.5, 1.5, 3.5, 4.0, 7.0, 7.3] ...
);
 
behavioral_epochs = types.core.BehavioralEpochs();
behavioral_epochs.intervalseries.set('running', run_intervals);
You can add more than one IntervalSeries to a BehavioralEpochs object.
sleep_intervals = types.core.IntervalSeries( ...
'description', 'Intervals when the animal was sleeping', ...
'data', [1, -1, 1, -1], ...
'timestamps', [15.0, 30.0, 60.0, 95.0] ...
);
behavioral_epochs.intervalseries.set('sleeping', sleep_intervals);
 
% behavior_processing_module = types.core.ProcessingModule("stores behavioral data.");
% behavior_processing_module.nwbdatainterface.set('BehavioralEvents', behavioral_events);
% nwb.processing.set('behavior', behavior_processing_module);

Another approach: TimeIntervals

Using TimeIntervals to represent time intervals is often preferred over BehavioralEpochs and IntervalSeries. TimeIntervals is a subclass of DynamicTable, which offers flexibility for tabular data by allowing the addition of optional columns which are not defined in the standard DynamicTable class.
sleep_intervals = types.core.TimeIntervals( ...
'description', 'Intervals when the animal was sleeping.', ...
'colnames', {'start_time', 'stop_time', 'stage'} ...
);
 
sleep_intervals.addRow('start_time', 0.3, 'stop_time', 0.35, 'stage', 1);
sleep_intervals.addRow('start_time', 0.7, 'stop_time', 0.9, 'stage', 2);
sleep_intervals.addRow('start_time', 1.3, 'stop_time', 3.0, 'stage', 3);
 
nwb.intervals.set('sleep_intervals', sleep_intervals);

EyeTracking: Storing continuous eye-tracking data of gaze direction

EyeTracking is for storing eye-tracking data which represents direction of gaze as measured by an eye tracking algorithm. An EyeTracking object holds one or more SpatialSeries objects that represent the gaze direction over time extracted from a video.
eye_position_data = [linspace(-20, 30, 50); linspace(30, -20, 50)];
 
right_eye_position = types.core.SpatialSeries( ...
'description', 'The position of the right eye measured in degrees.', ...
'data', eye_position_data, ...
'starting_time', 1.0, ... % NB: Important to set starting_time when using starting_time_rate
'starting_time_rate', 50.0, ... % Hz
'reference_frame', '(0,0) is middle', ...
'data_unit', 'degrees' ...
);
 
left_eye_position = types.core.SpatialSeries( ...
'description', 'The position of the right eye measured in degrees.', ...
'data', eye_position_data, ...
'starting_time', 1.0, ... % NB: Important to set starting_time when using starting_time_rate
'starting_time_rate', 50.0, ... % Hz
'reference_frame', '(0,0) is middle', ...
'data_unit', 'degrees' ...
);
 
eye_tracking = types.core.EyeTracking();
eye_tracking.spatialseries.set('right_eye_position', right_eye_position);
eye_tracking.spatialseries.set('left_eye_position', left_eye_position);
 
% behavior_processing_module = types.core.ProcessingModule("stores behavioral data.");
behavior_processing_module.nwbdatainterface.set('EyeTracking', eye_tracking);
% nwb.processing.set('behavior', behavior_processing_module);

PupilTracking: Storing continuous eye-tracking data of pupil size

PupilTracking is for storing eye-tracking data which represents pupil size. PupilTracking holds one or more TimeSeries objects that can represent different features such as the dilation of the pupil measured over time by a pupil tracking algorithm.
pupil_diameter = types.core.TimeSeries( ...
'description', 'Pupil diameter extracted from the video of the right eye.', ...
'data', linspace(0.001, 0.002, 50), ...
'starting_time', 1.0, ... % NB: Important to set starting_time when using starting_time_rate
'starting_time_rate', 20.0, ... % Hz
'data_unit', 'meters' ...
);
 
pupil_tracking = types.core.PupilTracking();
pupil_tracking.timeseries.set('pupil_diameter', pupil_diameter);
 
% behavior_processing_module = types.core.ProcessingModule("stores behavioral data.");
behavior_processing_module.nwbdatainterface.set('PupilTracking', pupil_tracking);
% nwb.processing.set('behavior', behavior_processing_module);

Writing the behavior data to an NWB file

All of the above commands build an NWBFile object in-memory. To write this file, use nwbExport.
% Save to tutorials/tutorial_nwb_files folder
nwbFilePath = misc.getTutorialNwbFilePath('behavior_tutorial.nwb');
nwbExport(nwb, nwbFilePath);
fprintf('Exported NWB file to "%s"\n', 'behavior_tutorial.nwb')
Exported NWB file to "behavior_tutorial.nwb"

\ No newline at end of file diff --git a/tutorials/html/dynamic_tables.html b/tutorials/html/dynamic_tables.html index 1549783e..af3f6c88 100644 --- a/tutorials/html/dynamic_tables.html +++ b/tutorials/html/dynamic_tables.html @@ -91,19 +91,19 @@ Multidimensional ragged array columns Adding rows to multidimensional array columns Learn More! - Python Tutorial

MatNWB Setup

Start by setting up your MATLAB workspace. The code below adds the directory containing the MatNWB package to the MATLAB search path. MatNWB works by automatically creating API classes based on a defined schema.
%{
path_to_matnwb = '~/Repositories/matnwb'; % change to your own path location
addpath(genpath(pwd));
%}

Constructing a table with initialized columns

The DynamicTable class represents a column-based table to which you can add custom columns. It consists of a description, a list of columns , and a list of row IDs. You can create a DynamicTable by first defining the VectorData objects that will make up the columns of the table. Each VectorData object must contain the same number of rows. A list of rows IDs may be passed to the DynamicTable using the id argument. Row IDs are a useful way to access row information independent of row location index. The list of row IDs must be cast as an ElementIdentifiers object before being passed to the DynamicTable object. If no value is passed to id, an ElementIdentifiers object with 0-indexed row IDs will be created for you automatically.
MATLAB Syntax Note: Using column vectors is crucial to properly build vectors and tables. When defining individual values, make sure to use semi-colon (;) instead of instead of comma (,) when defining the data fields of these.
col1 = types.hdmf_common.VectorData( ...
'description', 'column #1', ...
'data', [1;2] ...
);
 
col2 = types.hdmf_common.VectorData( ...
'description', 'column #2', ...
'data', {'a';'b'} ...
);
 
my_table = types.hdmf_common.DynamicTable( ...
'description', 'an example table', ...
'colnames', {'col1', 'col2'}, ...
'col1', col1, ...
'col2', col2, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0;1]) ... % 0-indexed, for compatibility with Python
);
my_table
my_table =
DynamicTable with properties: + Python Tutorial

MatNWB Setup

Start by setting up your MATLAB workspace. The code below adds the directory containing the MatNWB package to the MATLAB search path. MatNWB works by automatically creating API classes based on a defined schema.
%{
path_to_matnwb = '~/Repositories/matnwb'; % change to your own path location
addpath(genpath(pwd));
%}

Constructing a table with initialized columns

The DynamicTable class represents a column-based table to which you can add custom columns. It consists of a description, a list of columns , and a list of row IDs. You can create a DynamicTable by first defining the VectorData objects that will make up the columns of the table. Each VectorData object must contain the same number of rows. A list of rows IDs may be passed to the DynamicTable using the id argument. Row IDs are a useful way to access row information independent of row location index. The list of row IDs must be cast as an ElementIdentifiers object before being passed to the DynamicTable object. If no value is passed to id, an ElementIdentifiers object with 0-indexed row IDs will be created for you automatically.
MATLAB Syntax Note: Using column vectors is crucial to properly build vectors and tables. When defining individual values, make sure to use semi-colon (;) instead of instead of comma (,) when defining the data fields of these.
col1 = types.hdmf_common.VectorData( ...
'description', 'column #1', ...
'data', [1;2] ...
);
 
col2 = types.hdmf_common.VectorData( ...
'description', 'column #2', ...
'data', {'a';'b'} ...
);
 
my_table = types.hdmf_common.DynamicTable( ...
'description', 'an example table', ...
'colnames', {'col1', 'col2'}, ...
'col1', col1, ...
'col2', col2, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0;1]) ... % 0-indexed, for compatibility with Python
);
my_table
my_table =
DynamicTable with properties: id: [1×1 types.hdmf_common.ElementIdentifiers] colnames: {'col1' 'col2'} description: 'an example table' vectordata: [2×1 types.untyped.Set] -

Adding rows

You can add rows to an existing DynamicTable using the object's addRow method. One way of using this method is to pass in the names of columns as parameter names followed by the elements to append. The class of the elements of the column must match the elements to append.
my_table.addRow('col1', 3, 'col2', {'c'}, 'id', 2);

Adding columns

You can add new columns to an existing DynamicTable object using the addColumn method. One way of using this method is to pass in the names of each new column followed by the corresponding values for each new column. The height of the new columns must match the height of the table.
col3 = types.hdmf_common.VectorData('description', 'column #3', ...
'data', [100; 200; 300]);
col4 = types.hdmf_common.VectorData('description', 'column #4', ...
'data', {'a1'; 'b2'; 'c3'});
 
my_table.addColumn('col3', col3,'col4', col4);

Create MATLAB table and convert to dynamic table

As an alternative to building a dynamic table using the DynamicTable and VectorData data types, it is also possible to create a MATLAB table and convert it to a dynamic table. Lets create the same table as before, but using MATLAB's table class:
% Create a table with two variables (columns):
T = table([1;2], {'a';'b'}, 'VariableNames', {'col1', 'col2'});
T.Properties.VariableDescriptions = {'column #1', 'column #2'};

Adding rows

T(end+1, :) = {3, 'c'};

Adding variables (columns)

T = addvars(T, [100;200;300], 'NewVariableNames',{'col3'});
T.Properties.VariableDescriptions{3} = 'column #3';
 
% Alternatively, a new variable can be added directly using dot syntax.
T.col4 = {'a1'; 'b2'; 'c3'};
T.Properties.VariableDescriptions{4} = 'column #4';
T
T = 3×4 table
 col1col2col3col4
11'a'100'a1'
22'b'200'b2'
33'c'300'c3'

Convert to dynamic table

dynamic_table = util.table2nwb(T, 'A MATLAB table that was converted to a dynamic table')
dynamic_table =
DynamicTable with properties: +

Adding rows

You can add rows to an existing DynamicTable using the object's addRow method. One way of using this method is to pass in the names of columns as parameter names followed by the elements to append. The class of the elements of the column must match the elements to append.
my_table.addRow('col1', 3, 'col2', {'c'}, 'id', 2);

Adding columns

You can add new columns to an existing DynamicTable object using the addColumn method. One way of using this method is to pass in the names of each new column followed by the corresponding values for each new column. The height of the new columns must match the height of the table.
col3 = types.hdmf_common.VectorData('description', 'column #3', ...
'data', [100; 200; 300]);
col4 = types.hdmf_common.VectorData('description', 'column #4', ...
'data', {'a1'; 'b2'; 'c3'});
 
my_table.addColumn('col3', col3,'col4', col4);

Create MATLAB table and convert to dynamic table

As an alternative to building a dynamic table using the DynamicTable and VectorData data types, it is also possible to create a MATLAB table and convert it to a dynamic table. Lets create the same table as before, but using MATLAB's table class:
% Create a table with two variables (columns):
T = table([1;2], {'a';'b'}, 'VariableNames', {'col1', 'col2'});
T.Properties.VariableDescriptions = {'column #1', 'column #2'};

Adding rows

T(end+1, :) = {3, 'c'};

Adding variables (columns)

T = addvars(T, [100;200;300], 'NewVariableNames',{'col3'});
T.Properties.VariableDescriptions{3} = 'column #3';
 
% Alternatively, a new variable can be added directly using dot syntax.
T.col4 = {'a1'; 'b2'; 'c3'};
T.Properties.VariableDescriptions{4} = 'column #4';
T
T = 3×4 table
 col1col2col3col4
11'a'100'a1'
22'b'200'b2'
33'c'300'c3'

Convert to dynamic table

dynamic_table = util.table2nwb(T, 'A MATLAB table that was converted to a dynamic table')
dynamic_table =
DynamicTable with properties: id: [1×1 types.hdmf_common.ElementIdentifiers] colnames: {'col1' 'col2' 'col3' 'col4'} description: 'A MATLAB table that was converted to a dynamic table' vectordata: [4×1 types.untyped.Set] -

Enumerated (categorical) data

EnumData is a special type of column for storing an enumerated data type. This way each unique value is stored once, and the data references those values by index. Using this method is more efficient than storing a single value many times, and has the advantage of communicating to downstream tools that the data is categorical in nature.

Warning Regarding EnumData

EnumData is currently an experimental feature and as such should not be used in a production environment.
CellTypeElements = types.hdmf_common.VectorData(...
'description', 'fixed set of elements referenced by cell_type' ...
, 'data', {'aa', 'bb', 'cc'} ... % the enumerated elements
);
CellType = types.hdmf_experimental.EnumData( ...
'description', 'this column holds categorical variables' ... % properties derived from VectorData
, 'data', [0, 1, 2, 1, 0] ... % zero-indexed offset to elements.
, 'elements', types.untyped.ObjectView(CellTypeElements) ...
);
 
MyTable = types.hdmf_common.DynamicTable('description', 'an example table');
MyTable.vectordata.set('cell_type_elements', CellTypeElements); % the *_elements format is required for compatibility with pynwb
MyTable.addColumn('cell_type', CellType);

Ragged array columns

A table column with a different number of elements for each row is called a "ragged array column." To define a table with a ragged array column, pass both the VectorData and the corresponding VectorIndex as columns of the DynamicTable object. The VectorData columns will contain the data values. The VectorIndex column serves to indicate how to arrange the data across rows. By convention the VectorIndex object corresponding to a particular column must have have the same name with the addition of the '_index' suffix.
Below, the VectorIndex values indicate to place the 1st to 3rd (inclusive) elements of the VectorData into the first row and 4th element into the second row. The resulting table will have the cell {'1a'; '1b'; '1c'} in the first row and the cell {'2a'} in the second row.
 
col1 = types.hdmf_common.VectorData( ...
'description', 'column #1', ...
'data', {'1a'; '1b'; '1c'; '2a'} ...
);
 
col1_index = types.hdmf_common.VectorIndex( ...
'description', 'column #1 index', ...
'target',types.untyped.ObjectView(col1), ... % object view of target column
'data', [3; 4] ...
);
 
table_ragged_col = types.hdmf_common.DynamicTable( ...
'description', 'an example table', ...
'colnames', {'col1'}, ...
'col1', col1, ...
'col1_index', col1_index, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0; 1]) ... % 0-indexed, for compatibility with Python
);

Adding ragged array rows

You can add a new row to the ragged array column. Under the hood, the addRow method will add the appropriate value to the VectorIndex column to maintain proper formatting.
table_ragged_col.addRow('col1', {'3a'; '3b'; '3c'}, 'id', 2);

Accessing row elements

You can access data from entire rows of a DynamicTable object by calling the getRow method for the corresponding object. You can supply either an individual row number or a list of row numbers.
my_table.getRow(1)
ans = 1×4 table
 col1col2col3col4
11'a'100'a1'
If you want to access values for just a subset of columns you can pass in the 'columns' arguement along with a cell array with the desired column names
my_table.getRow(1:3, 'columns', {'col1'})
ans = 3×1 table
 col1
11
22
33
You can also access specific rows by their corresponding row ID's, if they have been defined, by supplying a 'true' Boolean to the 'useId' parameter
my_table.getRow(1, 'useId', true)
ans = 1×4 table
 col1col2col3col4
12'b'200'b2'
For a ragged array columns, the getRow method will return a cell with different number of elements for each row
table_ragged_col.getRow(1:2)
ans = 2×1 table
 col1
1[{'1a'};{'1b'};{'1c'}]
21×1 cell

Accessing column elements

To acess all rows from a particular column use the .get method on the vectordata field of the DynamicTable object
 
my_table.vectordata.get('col2').data
ans = 3×1 cell
'a'
'b'
'c'

Referencing rows of other tables

You can create a column that references rows of other tables by adding a DynamicTableRegion object as a column of a DynamicTable. This is analogous to a foreign key in a relational database. The DynamicTableRegion class takes in an ObjectView object as arguement. ObjectView objects create links from one object type referencing another.
dtr_col = types.hdmf_common.DynamicTableRegion( ...
'description', 'references multiple rows of earlier table', ...
'data', [0; 1; 1; 0], ... # 0-indexed
'table',types.untyped.ObjectView(my_table) ... % object view of target table
);
 
data_col = types.hdmf_common.VectorData( ...
'description', 'data column', ...
'data', {'a'; 'b'; 'c'; 'd'} ...
);
 
dtr_table = types.hdmf_common.DynamicTable( ...
'description', 'test table with DynamicTableRegion', ...
'colnames', {'data_col', 'dtr_col'}, ...
'dtr_col', dtr_col, ...
'data_col',data_col, ...
'id',types.hdmf_common.ElementIdentifiers('data', [0; 1; 2; 3]) ...
);

Converting a DynamicTable to a MATLAB table

You can convert a DynamicTable object to a MATLAB table by making use of the object's toTable method. This is a useful way to view the whole table in a human-readable format.
my_table.toTable()
ans = 3×5 table
 idcol1col2col3col4
101'a'100'a1'
212'b'200'b2'
323'c'300'c3'
When the DynamicTable object contains a column that references other tables, you can pass in a Boolean to indicate whether to include just the row indices of the referenced table. Passing in false will result in inclusion of the referenced rows as nested tables.
dtr_table.toTable(false)
ans = 4×3 table
 iddata_coldtr_col
10'a'1×4 table
21'b'1×4 table
32'c'1×4 table
43'd'1×4 table

Creating an expandable table

When using the default HDF5 backend, each column of these tables is an HDF5 Dataset, which by default are set to an unchangeable size. This means that once a file is written, it is not possible to add a new row. If you want to be able to save this file, load it, and add more rows to the table, you will need to set this up when you create the VectorData and ElementIdentifiers columns of a DynamicTable. Specifically, you must wrap the column data with a DataPipe object. The DataPipe class takes in maxSize and axis as arguments to indicate the maximum desired size for each axis and the axis to whcih to append to, respectively. For example, creating a DataPipe object with a maxSize value equal to [Inf, 1] indicates that the number of rows may increase indifinetely. In contrast, setting maxSize equal to [8, 1] would allow the column to grow to a maximum height of 8.
% create NwbFile object with required fields
file= NwbFile( ...
'session_start_time', datetime('2021-01-01 00:00:00', 'TimeZone', 'local'), ...
'identifier', 'ident1', ...
'session_description', 'ExpandableTableTutorial' ...
);
 
% create VectorData objects with DataPipe objects
start_time_exp = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', [1, 2], ... # data must be numerical
'maxSize', Inf ...
) ...
);
 
stop_time_exp = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', [2, 3], ... #data must be numerical
'maxSize', Inf ...
) ...
);
 
random_exp = types.hdmf_common.VectorData( ...
'description', 'random data column', ...
'data', types.untyped.DataPipe( ...
'data', rand(5, 2), ... #data must be numerical
'maxSize', [5, Inf], ...
'axis', 2 ...
) ...
);
 
ids_exp = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', int32([0; 1]), ... # data must be numerical
'maxSize', Inf ...
) ...
);
% create expandable table
colnames = {'start_time', 'stop_time', 'randomvalues'};
file.intervals_trials = types.core.TimeIntervals( ...
'description', 'test expdandable dynamic table', ...
'colnames', colnames, ...
'start_time', start_time_exp, ...
'stop_time', stop_time_exp, ...
'randomvalues', random_exp, ...
'id', ids_exp ...
);
% export file
nwbExport(file, 'expandableTableTestFile.nwb');
Now, you can read in the file, add more rows, and save again to file
readFile = nwbRead('expandableTableTestFile.nwb', 'ignorecache');
readFile.intervals_trials.addRow( ...
'start_time', 3, ...
'stop_time', 4, ...
'randomvalues', rand(5,1), ...
'id', 2 ...
)
nwbExport(readFile, 'expandableTableTestFile.nwb');
Note: DataPipe objects change how the dimension of the datasets for each column map onto the shape of HDF5 datasets. See README for more details.

Multidimensional Columns

The order of dimensions of multidimensional columns in MatNWB is reversed relative to the Python HDMF package (see README for detailed explanation). Therefore, the height of a multidimensional column belonging to a DynamicTable object is defined by the shape of its last dimension. A valid DynamicTable must have matched height across columns.

Constructing multidimensional columns

% Define 1D column
simple_col = types.hdmf_common.VectorData( ...
'description', '1D column',...
'data', rand(10,1) ...
);
% Define ND column
multi_col = types.hdmf_common.VectorData( ...
'description', 'multidimensional column',...
'data', rand(3,2,10) ...
);
% construct table
multi_dim_table = types.hdmf_common.DynamicTable( ...
'description','test table', ...
'colnames', {'simple','multi'}, ...
'simple', simple_col, ...
'multi', multi_col, ...
'id', types.hdmf_common.ElementIdentifiers('data', (0:9)') ... % 0-indexed, for compatibility with Python
);
 

Multidimensional ragged array columns

DynamicTable objects with multidimensional ragged array columns can be constructed by passing in the corresponding VectorIndex column
% Define column with data
multi_ragged_col = types.hdmf_common.VectorData( ...
'description', 'multidimensional ragged array column',...
'data', rand(2,3,5) ...
);
% Define column with VectorIndex
multi_ragged_index = types.hdmf_common.VectorIndex( ...
'description', 'index to multi_ragged_col', ...
'target', types.untyped.ObjectView(multi_ragged_col),'data', [2; 3; 5] ...
);
 
multi_ragged_table = types.hdmf_common.DynamicTable( ...
'description','test table', ...
'colnames', {'multi_ragged'}, ...
'multi_ragged', multi_ragged_col, ...
'multi_ragged_index', multi_ragged_index, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0; 1; 2]) ... % 0-indexed, for compatibility with Python
);

Adding rows to multidimensional array columns

DynamicTable objects with multidimensional array columns can also be constructed by adding a single row at a time. This method makes use of DataPipe objects due to the fact that MATLAB doesn't support singleton dimensions for arrays with more than 2 dimensions. The code block below demonstates how to build a DynamicTable object with a mutidimensional raaged array column in this manner.
% Create file
file = NwbFile( ...
'session_start_time', datetime('2021-01-01 00:00:00', 'TimeZone', 'local'), ...
'identifier', 'ident1', ...
'session_description', 'test_file' ...
);
 
% Define Vector Data Objects with first row of table
start_time_exp = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', 1, ...
'maxSize', Inf ...
) ...
);
stop_time_exp = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', 10, ...
'maxSize', Inf ...
) ...
);
random_exp = types.hdmf_common.VectorData( ...
'description', 'random data column', ...
'data', types.untyped.DataPipe( ...
'data', rand(3,2,5), ... #random data
'maxSize', [3, 2, Inf], ...
'axis', 3 ...
) ...
);
random_exp_index = types.hdmf_common.VectorIndex( ...
'description', 'index to random data column', ...
'target',types.untyped.ObjectView(random_exp), ...
'data', types.untyped.DataPipe( ...
'data', uint64(5), ...
'maxSize', Inf ...
) ...
);
ids_exp = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', int64(0), ... # data must be numerical
'maxSize', Inf ...
) ...
);
% Create expandable table
colnames = {'start_time', 'stop_time', 'randomvalues'};
file.intervals_trials = types.core.TimeIntervals( ...
'description', 'test expdandable dynamic table', ...
'colnames', colnames, ...
'start_time', start_time_exp, ...
'stop_time', stop_time_exp, ...
'randomvalues', random_exp, ...
'randomvalues_index', random_exp_index, ...
'id', ids_exp ...
);
% Export file
nwbExport(file, 'multiRaggedExpandableTableTest.nwb');
% Read in file
read_file = nwbRead('multiRaggedExpandableTableTest.nwb', 'ignorecache');
% add individual rows
read_file.intervals_trials.addRow( ...
'start_time', 2, ...
'stop_time', 20, ...
'randomvalues', rand(3,2,6), ...
'id', 1 ...
);
read_file.intervals_trials.addRow( ...
'start_time', 3, ...
'stop_time', 30, ...
'randomvalues', rand(3,2,3), ...
'id', 2 ...
);
read_file.intervals_trials.addRow( ...
'start_time', 4, ...
'stop_time', 40, ...
'randomvalues', rand(3,2,8), ...
'id', 3 ...
);
 

Learn More!

Python Tutorial

+

Enumerated (categorical) data

EnumData is a special type of column for storing an enumerated data type. This way each unique value is stored once, and the data references those values by index. Using this method is more efficient than storing a single value many times, and has the advantage of communicating to downstream tools that the data is categorical in nature.

Warning Regarding EnumData

EnumData is currently an experimental feature and as such should not be used in a production environment.
CellTypeElements = types.hdmf_common.VectorData(...
'description', 'fixed set of elements referenced by cell_type' ...
, 'data', {'aa', 'bb', 'cc'} ... % the enumerated elements
);
CellType = types.hdmf_experimental.EnumData( ...
'description', 'this column holds categorical variables' ... % properties derived from VectorData
, 'data', [0, 1, 2, 1, 0] ... % zero-indexed offset to elements.
, 'elements', types.untyped.ObjectView(CellTypeElements) ...
);
 
MyTable = types.hdmf_common.DynamicTable('description', 'an example table');
MyTable.vectordata.set('cell_type_elements', CellTypeElements); % the *_elements format is required for compatibility with pynwb
MyTable.addColumn('cell_type', CellType);

Ragged array columns

A table column with a different number of elements for each row is called a "ragged array column." To define a table with a ragged array column, pass both the VectorData and the corresponding VectorIndex as columns of the DynamicTable object. The VectorData columns will contain the data values. The VectorIndex column serves to indicate how to arrange the data across rows. By convention the VectorIndex object corresponding to a particular column must have have the same name with the addition of the '_index' suffix.
Below, the VectorIndex values indicate to place the 1st to 3rd (inclusive) elements of the VectorData into the first row and 4th element into the second row. The resulting table will have the cell {'1a'; '1b'; '1c'} in the first row and the cell {'2a'} in the second row.
 
col1 = types.hdmf_common.VectorData( ...
'description', 'column #1', ...
'data', {'1a'; '1b'; '1c'; '2a'} ...
);
 
col1_index = types.hdmf_common.VectorIndex( ...
'description', 'column #1 index', ...
'target',types.untyped.ObjectView(col1), ... % object view of target column
'data', [3; 4] ...
);
 
table_ragged_col = types.hdmf_common.DynamicTable( ...
'description', 'an example table', ...
'colnames', {'col1'}, ...
'col1', col1, ...
'col1_index', col1_index, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0; 1]) ... % 0-indexed, for compatibility with Python
);

Adding ragged array rows

You can add a new row to the ragged array column. Under the hood, the addRow method will add the appropriate value to the VectorIndex column to maintain proper formatting.
table_ragged_col.addRow('col1', {'3a'; '3b'; '3c'}, 'id', 2);

Accessing row elements

You can access data from entire rows of a DynamicTable object by calling the getRow method for the corresponding object. You can supply either an individual row number or a list of row numbers.
my_table.getRow(1)
ans = 1×4 table
 col1col2col3col4
11'a'100'a1'
If you want to access values for just a subset of columns you can pass in the 'columns' argument along with a cell array with the desired column names
my_table.getRow(1:3, 'columns', {'col1'})
ans = 3×1 table
 col1
11
22
33
You can also access specific rows by their corresponding row ID's, if they have been defined, by supplying a 'true' Boolean to the 'useId' parameter
my_table.getRow(1, 'useId', true)
ans = 1×4 table
 col1col2col3col4
12'b'200'b2'
For a ragged array columns, the getRow method will return a cell with different number of elements for each row
table_ragged_col.getRow(1:2)
ans = 2×1 table
 col1
1[{'1a'};{'1b'};{'1c'}]
21×1 cell

Accessing column elements

To access all rows from a particular column use the .get method on the vectordata field of the DynamicTable object
 
my_table.vectordata.get('col2').data
ans = 3×1 cell
'a'
'b'
'c'

Referencing rows of other tables

You can create a column that references rows of other tables by adding a DynamicTableRegion object as a column of a DynamicTable. This is analogous to a foreign key in a relational database. The DynamicTableRegion class takes in an ObjectView object as argument. ObjectView objects create links from one object type referencing another.
dtr_col = types.hdmf_common.DynamicTableRegion( ...
'description', 'references multiple rows of earlier table', ...
'data', [0; 1; 1; 0], ... # 0-indexed
'table',types.untyped.ObjectView(my_table) ... % object view of target table
);
 
data_col = types.hdmf_common.VectorData( ...
'description', 'data column', ...
'data', {'a'; 'b'; 'c'; 'd'} ...
);
 
dtr_table = types.hdmf_common.DynamicTable( ...
'description', 'test table with DynamicTableRegion', ...
'colnames', {'data_col', 'dtr_col'}, ...
'dtr_col', dtr_col, ...
'data_col',data_col, ...
'id',types.hdmf_common.ElementIdentifiers('data', [0; 1; 2; 3]) ...
);

Converting a DynamicTable to a MATLAB table

You can convert a DynamicTable object to a MATLAB table by making use of the object's toTable method. This is a useful way to view the whole table in a human-readable format.
my_table.toTable()
ans = 3×5 table
 idcol1col2col3col4
101'a'100'a1'
212'b'200'b2'
323'c'300'c3'
When the DynamicTable object contains a column that references other tables, you can pass in a Boolean to indicate whether to include just the row indices of the referenced table. Passing in false will result in inclusion of the referenced rows as nested tables.
dtr_table.toTable(false)
ans = 4×3 table
 iddata_coldtr_col
10'a'1×4 table
21'b'1×4 table
32'c'1×4 table
43'd'1×4 table

Creating an expandable table

When using the default HDF5 backend, each column of these tables is an HDF5 Dataset, which by default are set to an unchangeable size. This means that once a file is written, it is not possible to add a new row. If you want to be able to save this file, load it, and add more rows to the table, you will need to set this up when you create the VectorData and ElementIdentifiers columns of a DynamicTable. Specifically, you must wrap the column data with a DataPipe object. The DataPipe class takes in maxSize and axis as arguments to indicate the maximum desired size for each axis and the axis to which to append to, respectively. For example, creating a DataPipe object with a maxSize value equal to [Inf, 1] indicates that the number of rows may increase indifinetely. In contrast, setting maxSize equal to [8, 1] would allow the column to grow to a maximum height of 8.
% create NwbFile object with required fields
file= NwbFile( ...
'session_start_time', datetime('2021-01-01 00:00:00', 'TimeZone', 'local'), ...
'identifier', 'ident1', ...
'session_description', 'ExpandableTableTutorial' ...
);
 
% create VectorData objects with DataPipe objects
start_time_exp = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', [1, 2], ... # data must be numerical
'maxSize', Inf ...
) ...
);
 
stop_time_exp = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', [2, 3], ... #data must be numerical
'maxSize', Inf ...
) ...
);
 
random_exp = types.hdmf_common.VectorData( ...
'description', 'random data column', ...
'data', types.untyped.DataPipe( ...
'data', rand(5, 2), ... #data must be numerical
'maxSize', [5, Inf], ...
'axis', 2 ...
) ...
);
 
ids_exp = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', int32([0; 1]), ... # data must be numerical
'maxSize', Inf ...
) ...
);
% create expandable table
colnames = {'start_time', 'stop_time', 'randomvalues'};
file.intervals_trials = types.core.TimeIntervals( ...
'description', 'test expdandable dynamic table', ...
'colnames', colnames, ...
'start_time', start_time_exp, ...
'stop_time', stop_time_exp, ...
'randomvalues', random_exp, ...
'id', ids_exp ...
);
% export file
nwbExport(file, 'expandableTableTestFile.nwb');
Now, you can read in the file, add more rows, and save again to file
readFile = nwbRead('expandableTableTestFile.nwb', 'ignorecache');
readFile.intervals_trials.addRow( ...
'start_time', 3, ...
'stop_time', 4, ...
'randomvalues', rand(5,1), ...
'id', 2 ...
)
nwbExport(readFile, 'expandableTableTestFile.nwb');
Note: DataPipe objects change how the dimension of the datasets for each column map onto the shape of HDF5 datasets. See README for more details.

Multidimensional Columns

The order of dimensions of multidimensional columns in MatNWB is reversed relative to the Python HDMF package (see README for detailed explanation). Therefore, the height of a multidimensional column belonging to a DynamicTable object is defined by the shape of its last dimension. A valid DynamicTable must have matched height across columns.

Constructing multidimensional columns

% Define 1D column
simple_col = types.hdmf_common.VectorData( ...
'description', '1D column',...
'data', rand(10,1) ...
);
% Define ND column
multi_col = types.hdmf_common.VectorData( ...
'description', 'multidimensional column',...
'data', rand(3,2,10) ...
);
% construct table
multi_dim_table = types.hdmf_common.DynamicTable( ...
'description','test table', ...
'colnames', {'simple','multi'}, ...
'simple', simple_col, ...
'multi', multi_col, ...
'id', types.hdmf_common.ElementIdentifiers('data', (0:9)') ... % 0-indexed, for compatibility with Python
);
 

Multidimensional ragged array columns

DynamicTable objects with multidimensional ragged array columns can be constructed by passing in the corresponding VectorIndex column
% Define column with data
multi_ragged_col = types.hdmf_common.VectorData( ...
'description', 'multidimensional ragged array column',...
'data', rand(2,3,5) ...
);
% Define column with VectorIndex
multi_ragged_index = types.hdmf_common.VectorIndex( ...
'description', 'index to multi_ragged_col', ...
'target', types.untyped.ObjectView(multi_ragged_col),'data', [2; 3; 5] ...
);
 
multi_ragged_table = types.hdmf_common.DynamicTable( ...
'description','test table', ...
'colnames', {'multi_ragged'}, ...
'multi_ragged', multi_ragged_col, ...
'multi_ragged_index', multi_ragged_index, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0; 1; 2]) ... % 0-indexed, for compatibility with Python
);

Adding rows to multidimensional array columns

DynamicTable objects with multidimensional array columns can also be constructed by adding a single row at a time. This method makes use of DataPipe objects due to the fact that MATLAB doesn't support singleton dimensions for arrays with more than 2 dimensions. The code block below demonstrates how to build a DynamicTable object with a mutidimensional raaged array column in this manner.
% Create file
file = NwbFile( ...
'session_start_time', datetime('2021-01-01 00:00:00', 'TimeZone', 'local'), ...
'identifier', 'ident1', ...
'session_description', 'test_file' ...
);
 
% Define Vector Data Objects with first row of table
start_time_exp = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', 1, ...
'maxSize', Inf ...
) ...
);
stop_time_exp = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', 10, ...
'maxSize', Inf ...
) ...
);
random_exp = types.hdmf_common.VectorData( ...
'description', 'random data column', ...
'data', types.untyped.DataPipe( ...
'data', rand(3,2,5), ... #random data
'maxSize', [3, 2, Inf], ...
'axis', 3 ...
) ...
);
random_exp_index = types.hdmf_common.VectorIndex( ...
'description', 'index to random data column', ...
'target',types.untyped.ObjectView(random_exp), ...
'data', types.untyped.DataPipe( ...
'data', uint64(5), ...
'maxSize', Inf ...
) ...
);
ids_exp = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', int64(0), ... # data must be numerical
'maxSize', Inf ...
) ...
);
% Create expandable table
colnames = {'start_time', 'stop_time', 'randomvalues'};
file.intervals_trials = types.core.TimeIntervals( ...
'description', 'test expdandable dynamic table', ...
'colnames', colnames, ...
'start_time', start_time_exp, ...
'stop_time', stop_time_exp, ...
'randomvalues', random_exp, ...
'randomvalues_index', random_exp_index, ...
'id', ids_exp ...
);
% Export file
nwbExport(file, 'multiRaggedExpandableTableTest.nwb');
% Read in file
read_file = nwbRead('multiRaggedExpandableTableTest.nwb', 'ignorecache');
% add individual rows
read_file.intervals_trials.addRow( ...
'start_time', 2, ...
'stop_time', 20, ...
'randomvalues', rand(3,2,6), ...
'id', 1 ...
);
read_file.intervals_trials.addRow( ...
'start_time', 3, ...
'stop_time', 30, ...
'randomvalues', rand(3,2,3), ...
'id', 2 ...
);
read_file.intervals_trials.addRow( ...
'start_time', 4, ...
'stop_time', 40, ...
'randomvalues', rand(3,2,8), ...
'id', 3 ...
);
 

Learn More!

Python Tutorial


\ No newline at end of file diff --git a/tutorials/html/icephys.html b/tutorials/html/icephys.html index c992dd45..440dfc36 100644 --- a/tutorials/html/icephys.html +++ b/tutorials/html/icephys.html @@ -1,94 +1,99 @@ -Intracellular electrophysiology

Intracellular electrophysiology

The following tutorial describes storage of intracellular electrophysiology data in NWB. NWB supports storage of the time series describing the stimulus and response, information about the electrode and device used, as well as metadata about the organization of the experiment.
Illustration of the hierarchy of metadata tables used to describe the organization of intracellular electrophysiology experiments.

Creating an NWBFile

When creating an NWB file, the first step is to create the NWBFile, which you can create using the NwbFile command.
session_start_time = datetime(2018, 3, 1, 12, 0, 0, 'TimeZone', 'local');
nwbfile = NwbFile( ...
'session_description', 'my first synthetic recording', ...
'identifier', 'EXAMPLE_ID', ...
'session_start_time', session_start_time, ...
'general_experimenter', 'Dr. Bilbo Baggins', ...
'general_lab', 'Bag End Laboratory', ...
'general_institution', 'University of Middle Earth at the Shire', ...
'general_experiment_description', 'I went on an adventure with thirteen dwarves to reclaim vast treasures.', ...
'general_session_id', 'LONELYMTN' ...
);

Device metadata

Device metadata is represented by Device objects.
device = types.core.Device();
nwbfile.general_devices.set('Heka ITC-1600', device);

Electrode metadata

Intracellular electrode metadata is represented by IntracellularElectrode objects. Create an electrode object, which requires a link to the device of the previous step. Then add it to the NWB file.
electrode = types.core.IntracellularElectrode( ...
'description', 'a mock intracellular electrode', ...
'device', types.untyped.SoftLink(device) ...
);
nwbfile.general_intracellular_ephys.set('elec0', electrode);

Stimulus and response data

Intracellular stimulus and response data are represented with subclasses of PatchClampSeries. A stimulus is described by a time series representing voltage or current stimulation with a particular set of parameters. There are two classes for representing stimulus data:
The response is then described by a time series representing voltage or current recorded from a single cell using a single intracellular electrode via one of the following classes:
Below we create a simple example stimulus/response recording data pair.
ccss = types.core.VoltageClampStimulusSeries( ...
'data', [1, 2, 3, 4, 5], ...
'starting_time', 123.6, ...
'starting_time_rate', 10e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'sweep_number', uint64(15), ...
'stimulus_description', 'N/A' ...
);
nwbfile.stimulus_presentation.set('ccss', ccss);
vcs = types.core.VoltageClampSeries( ...
'data', [0.1, 0.2, 0.3, 0.4, 0.5], ...
'data_conversion', 1e-12, ...
'data_resolution', NaN, ...
'starting_time', 123.6, ...
'starting_time_rate', 20e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'capacitance_slow', 100e-12, ...
'resistance_comp_correction', 70.0, ...
'stimulus_description', 'N/A', ...
'sweep_number', uint64(15) ...
);
nwbfile.acquisition.set('vcs', vcs);

Adding an intracellular recording

The IntracellularRecordingsTable relates electrode, stimulus and response pairs and describes metadata specific to individual recordings.
Illustration of the structure of the IntracellularRecordingsTable
ic_rec_table = types.core.IntracellularRecordingsTable( ...
'categories', {'electrodes', 'stimuli', 'responses'}, ...
'colnames', {'recordings_tag'}, ...
'description', [ ...
'A table to group together a stimulus and response from a single ', ...
'electrode and a single simultaneous recording and for storing ', ...
'metadata about the intracellular recording.'], ...
'id', types.hdmf_common.ElementIdentifiers('data', int64([0, 1, 2])), ...
'recordings_tag', types.hdmf_common.VectorData( ...
'data', repmat({'Tag'}, 3, 1), ...
'description', 'Column for storing a custom recordings tag' ...
) ...
);
ic_rec_table.electrodes = types.core.IntracellularElectrodesTable( ...
'description', 'Table for storing intracellular electrode related metadata.', ...
'colnames', {'electrode'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'electrode', types.hdmf_common.VectorData( ...
'data', repmat(types.untyped.ObjectView(electrode), 3, 1), ...
'description', 'Column for storing the reference to the intracellular electrode' ...
) ...
);
ic_rec_table.stimuli = types.core.IntracellularStimuliTable( ...
'description', 'Table for storing intracellular stimulus related metadata.', ...
'colnames', {'stimulus'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'stimulus', types.core.TimeSeriesReferenceVectorData( ...
'description', 'Column storing the reference to the recorded stimulus for the recording (rows)', ...
'data', struct( ...
'idx_start', [0, 1, -1], ...
'count', [5, 3, -1], ...
'timeseries', [ ...
types.untyped.ObjectView(ccss), ...
types.untyped.ObjectView(ccss), ...
types.untyped.ObjectView(vcs) ...
] ...
)...
)...
);
ic_rec_table.responses = types.core.IntracellularResponsesTable( ...
'description', 'Table for storing intracellular response related metadata.', ...
'colnames', {'response'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'response', types.core.TimeSeriesReferenceVectorData( ...
'description', 'Column storing the reference to the recorded response for the recording (rows)', ...
'data', struct( ...
'idx_start', [0, 2, 0], ...
'count', [5, 3, 5], ...
'timeseries', [ ...
types.untyped.ObjectView(vcs), ...
types.untyped.ObjectView(vcs), ...
types.untyped.ObjectView(vcs) ...
] ...
)...
)...
);
The IntracellularRecordingsTable table is not just a DynamicTable but an AlignedDynamicTable. The AlignedDynamicTable type is itself a DynamicTable that may contain an arbitrary number of additional DynamicTable, each of which defines a "category." This is similar to a table with “sub-headings”. In the case of the IntracellularRecordingsTable, we have three predefined categories, i.e., electrodes, stimuli, and responses. We can also dynamically add new categories to the table. As each category corresponds to a DynamicTable, this means we have to create a new DynamicTable and add it to our table.
% add category
ic_rec_table.categories = [ic_rec_table.categories, {'recording_lab_data'}];
ic_rec_table.dynamictable.set( ...
'recording_lab_data', types.hdmf_common.DynamicTable( ...
'description', 'category table for lab-specific recording metadata', ...
'colnames', {'location'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'location', types.hdmf_common.VectorData( ...
'data', {'Mordor', 'Gondor', 'Rohan'}, ...
'description', 'Recording location in Middle Earth' ...
) ...
) ...
);
In an AlignedDynamicTable all category tables must align with the main table, i.e., all tables must have the same number of rows and rows are expected to correspond to each other by index.
We can also add custom columns to any of the subcategory tables, i.e., the electrodes, stimuli, and responses tables, and any custom subcategory tables. All we need to do is indicate the name of the category we want to add the column to.
% Add voltage threshold as column of electrodes table
ic_rec_table.electrodes.colnames = [ic_rec_table.electrodes.colnames {'voltage_threshold'}];
ic_rec_table.electrodes.vectordata.set('voltage_threshold', types.hdmf_common.VectorData( ...
'data', [0.1, 0.12, 0.13], ...
'description', 'Just an example column on the electrodes category table' ...
) ...
);
nwbfile.general_intracellular_ephys_intracellular_recordings = ic_rec_table;

Hierarchical organization of recordings

To describe the organization of intracellular experiments, the metadata is organized hierarchically in a sequence of tables. All of the tables are so-called DynamicTables enabling users to add columns for custom metadata. Storing data in hierarchical tables has the advantage that it allows us to avoid duplication of metadata. E.g., for a single experiment we only need to describe the metadata that is constant across an experimental condition as a single row in the SimultaneousRecordingsTable without having to replicate the same information across all repetitions and sequential-, simultaneous-, and individual intracellular recordings. For analysis, this means that we can easily focus on individual aspects of an experiment while still being able to easily access information about information from related tables. All of these tables are optional, but to use one you must use all of the lower level tables, even if you only need a single row.

Add a simultaneous recording

The SimultaneousRecordingsTable groups intracellular recordings from the IntracellularRecordingsTable together that were recorded simultaneously from different electrodes and/or cells and describes metadata that is constant across the simultaneous recordings. In practice a simultaneous recording is often also referred to as a sweep. This example adds a custom column, "simultaneous_recording_tag."
% create simultaneous recordings table with custom column
% 'simultaneous_recording_tag'
[recordings_vector_data, recordings_vector_index] = util.create_indexed_column( ...
{[0, 1, 2],}, ...
'Column with references to one or more rows in the IntracellularRecordingsTable table', ...
ic_rec_table);
ic_sim_recs_table = types.core.SimultaneousRecordingsTable( ...
'description', [ ...
'A table for grouping different intracellular recordings from ', ...
'the IntracellularRecordingsTable table together that were recorded ', ...
'simultaneously from different electrodes.'...
], ...
'colnames', {'recordings', 'simultaneous_recording_tag'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64(12) ...
), ...
'recordings', recordings_vector_data, ...
'recordings_index', recordings_vector_index, ...
'simultaneous_recording_tag', types.hdmf_common.VectorData( ...
'description', 'A custom tag for simultaneous_recordings', ...
'data', {'LabTag1'} ...
) ...
);
Depending on the lab workflow, it may be useful to add complete columns to a table after we have already populated the table with rows. That would be done like so:
ic_sim_recs_table.colnames = [ic_sim_recs_table.colnames, {'simultaneous_recording_type'}];
ic_sim_recs_table.vectordata.set( ...
'simultaneous_recording_type', types.hdmf_common.VectorData(...
'description', 'Description of the type of simultaneous_recording', ...
'data', {'SimultaneousRecordingType1'} ...
) ...
);
nwbfile.general_intracellular_ephys_simultaneous_recordings = ic_sim_recs_table;

Add a sequential recording

The SequentialRecordingsTable groups simultaneously recorded intracellular recordings from the SimultaneousRecordingsTable together and describes metadata that is constant across the simultaneous recordings. In practice a sequential recording is often also referred to as a sweep sequence. A common use of sequential recordings is to group together simultaneous recordings where a sequence of stimuli of the same type with varying parameters have been presented in a sequence (e.g., a sequence of square waveforms with varying amplitude).
[simultaneous_recordings_vector_data, simultaneous_recordings_vector_index] = util.create_indexed_column( ...
{0,}, ...
'Column with references to one or more rows in the SimultaneousRecordingsTable table', ...
ic_sim_recs_table);
sequential_recordings = types.core.SequentialRecordingsTable( ...
'description', [ ...
'A table for grouping different intracellular recording ', ...
'simultaneous_recordings from the SimultaneousRecordingsTable ', ...
'table together. This is typically used to group together ', ...
'simultaneous_recordings where the a sequence of stimuli of ', ...
'the same type with varying parameters have been presented in ', ...
'a sequence.' ...
], ...
'colnames', {'simultaneous_recordings', 'stimulus_type'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64(15) ...
), ...
'simultaneous_recordings', simultaneous_recordings_vector_data, ...
'simultaneous_recordings_index', simultaneous_recordings_vector_index, ...
'stimulus_type', types.hdmf_common.VectorData( ...
'description', 'Column storing the type of stimulus used for the sequential recording', ...
'data', {'square'} ...
) ...
);
nwbfile.general_intracellular_ephys_sequential_recordings = sequential_recordings;

Add repetitions table

The RepetitionsTable groups sequential recordings from the SequentialRecordingsTable. In practice, a repetition is often also referred to a run. A typical use of the RepetitionsTable is to group sets of different stimuli that are applied in sequence that may be repeated.
[sequential_recordings_vector_data, sequential_recordings_vector_index] = util.create_indexed_column( ...
{0,}, ...
'Column with references to one or more rows in the SequentialRecordingsTable table', ...
sequential_recordings);
nwbfile.general_intracellular_ephys_repetitions = types.core.RepetitionsTable( ...
'description', [ ...
'A table for grouping different intracellular recording sequential ', ...
'recordings together. With each SimultaneousRecording typically ', ...
'representing a particular type of stimulus, the RepetitionsTable ', ...
'table is typically used to group sets of stimuli applied in sequence.' ...
], ...
'colnames', {'sequential_recordings'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64(17) ...
), ...
'sequential_recordings', sequential_recordings_vector_data, ...
'sequential_recordings_index', sequential_recordings_vector_index ...
);

Add experimental condition table

The ExperimentalConditionsTable groups repetitions of intracellular recording from the RepetitionsTable together that belong to the same experimental conditions.
[repetitions_vector_data, repetitions_vector_index] = util.create_indexed_column( ...
{0, 0}, ...
'Column with references to one or more rows in the RepetitionsTable table', ...
nwbfile.general_intracellular_ephys_repetitions);
nwbfile.general_intracellular_ephys_experimental_conditions = types.core.ExperimentalConditionsTable( ...
'description', [ ...
'A table for grouping different intracellular recording ', ...
'repetitions together that belong to the same experimental ', ...
'conditions.' ...
], ...
'colnames', {'repetitions', 'tag'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([19, 21]) ...
), ...
'repetitions', repetitions_vector_data, ...
'repetitions_index', repetitions_vector_index, ...
'tag', types.hdmf_common.VectorData( ...
'description', 'integer tag for a experimental condition', ...
'data', [1,3] ...
) ...
);

Write the NWB file

nwbExport(nwbfile, 'test_new_icephys.nwb');

Read the NWB file

nwbfile2 = nwbRead('test_new_icephys.nwb', 'ignorecache')
nwbfile2 =
NwbFile with properties: - - nwb_version: {'2.6.0'} +.embeddedOutputsTextElement.rightPaneElement,.embeddedOutputsVariableStringElement.rightPaneElement { min-height: 16px;} +.rightPaneElement .textElement { padding-top: 2px; padding-left: 9px;}

Intracellular electrophysiology

The following tutorial describes storage of intracellular electrophysiology data in NWB. NWB supports storage of the time series describing the stimulus and response, information about the electrode and device used, as well as metadata about the organization of the experiment.
Illustration of the hierarchy of metadata tables used to describe the organization of intracellular electrophysiology experiments.

Creating an NWBFile

When creating an NWB file, the first step is to create the NWBFile, which you can create using the NwbFile command.
session_start_time = datetime(2018, 3, 1, 12, 0, 0, 'TimeZone', 'local');
 
 
nwbfile = NwbFile( ...
'session_description', 'my first synthetic recording', ...
'identifier', 'EXAMPLE_ID', ...
'session_start_time', session_start_time, ...
'general_experimenter', 'Dr. Bilbo Baggins', ...
'general_lab', 'Bag End Laboratory', ...
'general_institution', 'University of Middle Earth at the Shire', ...
'general_experiment_description', 'I went on an adventure with thirteen dwarves to reclaim vast treasures.', ...
'general_session_id', 'LONELYMTN' ...
);
 

Device metadata

Device metadata is represented by Device objects.
 
device = types.core.Device();
nwbfile.general_devices.set('Heka ITC-1600', device);

Electrode metadata

Intracellular electrode metadata is represented by IntracellularElectrode objects. Create an electrode object, which requires a link to the device of the previous step. Then add it to the NWB file.
electrode = types.core.IntracellularElectrode( ...
'description', 'a mock intracellular electrode', ...
'device', types.untyped.SoftLink(device), ...
'cell_id', 'a very interesting cell' ...
);
nwbfile.general_intracellular_ephys.set('elec0', electrode);

Stimulus and response data

Intracellular stimulus and response data are represented with subclasses of PatchClampSeries. A stimulus is described by a time series representing voltage or current stimulation with a particular set of parameters. There are two classes for representing stimulus data:
The response is then described by a time series representing voltage or current recorded from a single cell using a single intracellular electrode via one of the following classes:
Below we create a simple example stimulus/response recording data pair for a voltage clamp recording.
ccss = types.core.VoltageClampStimulusSeries( ...
'data', [1, 2, 3, 4, 5], ...
'starting_time', 123.6, ...
'starting_time_rate', 10e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'sweep_number', uint64(15), ...
'stimulus_description', 'N/A' ...
);
nwbfile.stimulus_presentation.set('ccss', ccss);
 
vcs = types.core.VoltageClampSeries( ...
'data', [0.1, 0.2, 0.3, 0.4, 0.5], ...
'data_conversion', 1e-12, ...
'data_resolution', NaN, ...
'starting_time', 123.6, ...
'starting_time_rate', 20e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'capacitance_slow', 100e-12, ...
'resistance_comp_correction', 70.0, ...
'stimulus_description', 'N/A', ...
'sweep_number', uint64(15) ...
);
nwbfile.acquisition.set('vcs', vcs);
You can add stimulus/response recording data pair from a current clamp recording in the same way:
% Create a CurrentClampStimulusSeries object
ccss = types.core.CurrentClampStimulusSeries(...
'data', [1, 2, 3, 4, 5], ...
'starting_time', 123.6, ...
'starting_time_rate', 10e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'sweep_number', uint16(16), ...
'stimulus_description', 'N/A' ...
);
nwbfile.stimulus_presentation.set('ccss', ccss);
 
% Create a CurrentClampSeries object
ccs = types.core.CurrentClampSeries(...
'data', [0.1, 0.2, 0.3, 0.4, 0.5], ...
'data_conversion', 1e-12, ...
'data_resolution', NaN, ...
'starting_time', 123.6, ...
'starting_time_rate', 20e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'bias_current', 1e-12, ...
'bridge_balance', 70e6, ...
'capacitance_compensation', 1e-12, ...
'stimulus_description', 'N/A', ...
'sweep_number', uint16(16) ...
);
nwbfile.acquisition.set('ccs', ccs);
 
IZeroClampSeries is used when the current is clamped to 0.
% Create an IZeroClampSeries object
izcs = types.core.IZeroClampSeries(...
'data', [0.1, 0.2, 0.3, 0.4, 0.5], ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'data_conversion', 1e-12, ...
'data_resolution', NaN, ...
'starting_time', 345.6, ...
'starting_time_rate', 20e3, ...
'sweep_number', uint16(17) ...
);
nwbfile.acquisition.set('izcs', izcs);

Adding an intracellular recording

The IntracellularRecordingsTable relates electrode, stimulus and response pairs and describes metadata specific to individual recordings.
Illustration of the structure of the IntracellularRecordingsTable
ic_rec_table = types.core.IntracellularRecordingsTable( ...
'categories', {'electrodes', 'stimuli', 'responses'}, ...
'colnames', {'recordings_tag'}, ...
'description', [ ...
'A table to group together a stimulus and response from a single ', ...
'electrode and a single simultaneous recording and for storing ', ...
'metadata about the intracellular recording.'], ...
'id', types.hdmf_common.ElementIdentifiers('data', int64([0, 1, 2])), ...
'recordings_tag', types.hdmf_common.VectorData( ...
'data', repmat({'Tag'}, 3, 1), ...
'description', 'Column for storing a custom recordings tag' ...
) ...
);
 
ic_rec_table.electrodes = types.core.IntracellularElectrodesTable( ...
'description', 'Table for storing intracellular electrode related metadata.', ...
'colnames', {'electrode'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'electrode', types.hdmf_common.VectorData( ...
'data', repmat(types.untyped.ObjectView(electrode), 3, 1), ...
'description', 'Column for storing the reference to the intracellular electrode' ...
) ...
);
 
ic_rec_table.stimuli = types.core.IntracellularStimuliTable( ...
'description', 'Table for storing intracellular stimulus related metadata.', ...
'colnames', {'stimulus'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'stimulus', types.core.TimeSeriesReferenceVectorData( ...
'description', 'Column storing the reference to the recorded stimulus for the recording (rows)', ...
'data', struct( ...
'idx_start', [0, 1, -1], ...
'count', [5, 3, -1], ...
'timeseries', [ ...
types.untyped.ObjectView(ccss), ...
types.untyped.ObjectView(ccss), ...
types.untyped.ObjectView(vcs) ...
] ...
)...
)...
);
 
ic_rec_table.responses = types.core.IntracellularResponsesTable( ...
'description', 'Table for storing intracellular response related metadata.', ...
'colnames', {'response'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'response', types.core.TimeSeriesReferenceVectorData( ...
'description', 'Column storing the reference to the recorded response for the recording (rows)', ...
'data', struct( ...
'idx_start', [0, 2, 0], ...
'count', [5, 3, 5], ...
'timeseries', [ ...
types.untyped.ObjectView(vcs), ...
types.untyped.ObjectView(vcs), ...
types.untyped.ObjectView(vcs) ...
] ...
)...
)...
);
 
The IntracellularRecordingsTable table is not just a DynamicTable but an AlignedDynamicTable. The AlignedDynamicTable type is itself a DynamicTable that may contain an arbitrary number of additional DynamicTable, each of which defines a "category." This is similar to a table with “sub-headings”. In the case of the IntracellularRecordingsTable, we have three predefined categories, i.e., electrodes, stimuli, and responses. We can also dynamically add new categories to the table. As each category corresponds to a DynamicTable, this means we have to create a new DynamicTable and add it to our table.
% add category
ic_rec_table.categories = [ic_rec_table.categories, {'recording_lab_data'}];
ic_rec_table.dynamictable.set( ...
'recording_lab_data', types.hdmf_common.DynamicTable( ...
'description', 'category table for lab-specific recording metadata', ...
'colnames', {'location'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'location', types.hdmf_common.VectorData( ...
'data', {'Mordor', 'Gondor', 'Rohan'}, ...
'description', 'Recording location in Middle Earth' ...
) ...
) ...
);
In an AlignedDynamicTable all category tables must align with the main table, i.e., all tables must have the same number of rows and rows are expected to correspond to each other by index.
We can also add custom columns to any of the subcategory tables, i.e., the electrodes, stimuli, and responses tables, and any custom subcategory tables. All we need to do is indicate the name of the category we want to add the column to.
% Add voltage threshold as column of electrodes table
ic_rec_table.electrodes.colnames = [ic_rec_table.electrodes.colnames {'voltage_threshold'}];
ic_rec_table.electrodes.vectordata.set('voltage_threshold', types.hdmf_common.VectorData( ...
'data', [0.1, 0.12, 0.13], ...
'description', 'Just an example column on the electrodes category table' ...
) ...
);
 
nwbfile.general_intracellular_ephys_intracellular_recordings = ic_rec_table;

Hierarchical organization of recordings

To describe the organization of intracellular experiments, the metadata is organized hierarchically in a sequence of tables. All of the tables are so-called DynamicTables enabling users to add columns for custom metadata. Storing data in hierarchical tables has the advantage that it allows us to avoid duplication of metadata. E.g., for a single experiment we only need to describe the metadata that is constant across an experimental condition as a single row in the SimultaneousRecordingsTable without having to replicate the same information across all repetitions and sequential-, simultaneous-, and individual intracellular recordings. For analysis, this means that we can easily focus on individual aspects of an experiment while still being able to easily access information about information from related tables. All of these tables are optional, but to use one you must use all of the lower level tables, even if you only need a single row.

Add a simultaneous recording

The SimultaneousRecordingsTable groups intracellular recordings from the IntracellularRecordingsTable together that were recorded simultaneously from different electrodes and/or cells and describes metadata that is constant across the simultaneous recordings. In practice a simultaneous recording is often also referred to as a sweep. This example adds a custom column, "simultaneous_recording_tag."
% create simultaneous recordings table with custom column
% 'simultaneous_recording_tag'
 
[recordings_vector_data, recordings_vector_index] = util.create_indexed_column( ...
{[0, 1, 2],}, ...
'Column with references to one or more rows in the IntracellularRecordingsTable table', ...
ic_rec_table);
 
ic_sim_recs_table = types.core.SimultaneousRecordingsTable( ...
'description', [ ...
'A table for grouping different intracellular recordings from ', ...
'the IntracellularRecordingsTable table together that were recorded ', ...
'simultaneously from different electrodes.'...
], ...
'colnames', {'recordings', 'simultaneous_recording_tag'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64(12) ...
), ...
'recordings', recordings_vector_data, ...
'recordings_index', recordings_vector_index, ...
'simultaneous_recording_tag', types.hdmf_common.VectorData( ...
'description', 'A custom tag for simultaneous_recordings', ...
'data', {'LabTag1'} ...
) ...
);
 
Depending on the lab workflow, it may be useful to add complete columns to a table after we have already populated the table with rows. That would be done like so:
ic_sim_recs_table.colnames = [ic_sim_recs_table.colnames, {'simultaneous_recording_type'}];
ic_sim_recs_table.vectordata.set( ...
'simultaneous_recording_type', types.hdmf_common.VectorData(...
'description', 'Description of the type of simultaneous_recording', ...
'data', {'SimultaneousRecordingType1'} ...
) ...
);
 
nwbfile.general_intracellular_ephys_simultaneous_recordings = ic_sim_recs_table;

Add a sequential recording

The SequentialRecordingsTable groups simultaneously recorded intracellular recordings from the SimultaneousRecordingsTable together and describes metadata that is constant across the simultaneous recordings. In practice a sequential recording is often also referred to as a sweep sequence. A common use of sequential recordings is to group together simultaneous recordings where a sequence of stimuli of the same type with varying parameters have been presented in a sequence (e.g., a sequence of square waveforms with varying amplitude).
[simultaneous_recordings_vector_data, simultaneous_recordings_vector_index] = util.create_indexed_column( ...
{0,}, ...
'Column with references to one or more rows in the SimultaneousRecordingsTable table', ...
ic_sim_recs_table);
 
sequential_recordings = types.core.SequentialRecordingsTable( ...
'description', [ ...
'A table for grouping different intracellular recording ', ...
'simultaneous_recordings from the SimultaneousRecordingsTable ', ...
'table together. This is typically used to group together ', ...
'simultaneous_recordings where the a sequence of stimuli of ', ...
'the same type with varying parameters have been presented in ', ...
'a sequence.' ...
], ...
'colnames', {'simultaneous_recordings', 'stimulus_type'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64(15) ...
), ...
'simultaneous_recordings', simultaneous_recordings_vector_data, ...
'simultaneous_recordings_index', simultaneous_recordings_vector_index, ...
'stimulus_type', types.hdmf_common.VectorData( ...
'description', 'Column storing the type of stimulus used for the sequential recording', ...
'data', {'square'} ...
) ...
);
 
nwbfile.general_intracellular_ephys_sequential_recordings = sequential_recordings;

Add repetitions table

The RepetitionsTable groups sequential recordings from the SequentialRecordingsTable. In practice, a repetition is often also referred to a run. A typical use of the RepetitionsTable is to group sets of different stimuli that are applied in sequence that may be repeated.
[sequential_recordings_vector_data, sequential_recordings_vector_index] = util.create_indexed_column( ...
{0,}, ...
'Column with references to one or more rows in the SequentialRecordingsTable table', ...
sequential_recordings);
 
 
nwbfile.general_intracellular_ephys_repetitions = types.core.RepetitionsTable( ...
'description', [ ...
'A table for grouping different intracellular recording sequential ', ...
'recordings together. With each SimultaneousRecording typically ', ...
'representing a particular type of stimulus, the RepetitionsTable ', ...
'table is typically used to group sets of stimuli applied in sequence.' ...
], ...
'colnames', {'sequential_recordings'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64(17) ...
), ...
'sequential_recordings', sequential_recordings_vector_data, ...
'sequential_recordings_index', sequential_recordings_vector_index ...
);

Add experimental condition table

The ExperimentalConditionsTable groups repetitions of intracellular recording from the RepetitionsTable together that belong to the same experimental conditions.
[repetitions_vector_data, repetitions_vector_index] = util.create_indexed_column( ...
{0, 0}, ...
'Column with references to one or more rows in the RepetitionsTable table', ...
nwbfile.general_intracellular_ephys_repetitions);
 
nwbfile.general_intracellular_ephys_experimental_conditions = types.core.ExperimentalConditionsTable( ...
'description', [ ...
'A table for grouping different intracellular recording ', ...
'repetitions together that belong to the same experimental ', ...
'conditions.' ...
], ...
'colnames', {'repetitions', 'tag'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([19, 21]) ...
), ...
'repetitions', repetitions_vector_data, ...
'repetitions_index', repetitions_vector_index, ...
'tag', types.hdmf_common.VectorData( ...
'description', 'integer tag for a experimental condition', ...
'data', [1,3] ...
) ...
);

Write the NWB file

nwbExport(nwbfile, 'test_new_icephys.nwb');

Read the NWB file

nwbfile2 = nwbRead('test_new_icephys.nwb', 'ignorecache')
nwbfile2 =
NwbFile with properties: + + nwb_version: '2.7.0' file_create_date: [1×1 types.untyped.DataStub] - identifier: {'EXAMPLE_ID'} - session_description: {'my first synthetic recording'} - session_start_time: 2018-03-01T12:00:00.000000-05:00 - timestamps_reference_time: 2018-03-01T12:00:00.000000-05:00 - acquisition: [1×1 types.untyped.Set] + identifier: 'EXAMPLE_ID' + session_description: 'my first synthetic recording' + session_start_time: [1×1 types.untyped.DataStub] + timestamps_reference_time: [1×1 types.untyped.DataStub] + acquisition: [3×1 types.untyped.Set] analysis: [0×1 types.untyped.Set] general: [0×1 types.untyped.Set] - general_data_collection: [] + general_data_collection: '' general_devices: [1×1 types.untyped.Set] - general_experiment_description: {'I went on an adventure with thirteen dwarves to reclaim vast treasures.'} + general_experiment_description: 'I went on an adventure with thirteen dwarves to reclaim vast treasures.' general_experimenter: [1×1 types.untyped.DataStub] general_extracellular_ephys: [0×1 types.untyped.Set] general_extracellular_ephys_electrodes: [] - general_institution: {'University of Middle Earth at the Shire'} + general_institution: 'University of Middle Earth at the Shire' general_intracellular_ephys: [1×1 types.untyped.Set] general_intracellular_ephys_experimental_conditions: [1×1 types.core.ExperimentalConditionsTable] - general_intracellular_ephys_filtering: [] + general_intracellular_ephys_filtering: '' general_intracellular_ephys_intracellular_recordings: [1×1 types.core.IntracellularRecordingsTable] general_intracellular_ephys_repetitions: [1×1 types.core.RepetitionsTable] general_intracellular_ephys_sequential_recordings: [1×1 types.core.SequentialRecordingsTable] general_intracellular_ephys_simultaneous_recordings: [1×1 types.core.SimultaneousRecordingsTable] general_intracellular_ephys_sweep_table: [] - general_keywords: [] - general_lab: {'Bag End Laboratory'} - general_notes: [] + general_keywords: '' + general_lab: 'Bag End Laboratory' + general_notes: '' general_optogenetics: [0×1 types.untyped.Set] general_optophysiology: [0×1 types.untyped.Set] - general_pharmacology: [] - general_protocol: [] - general_related_publications: [] - general_session_id: {'LONELYMTN'} - general_slices: [] - general_source_script: [] - general_source_script_file_name: [] - general_stimulus: [] + general_pharmacology: '' + general_protocol: '' + general_related_publications: '' + general_session_id: 'LONELYMTN' + general_slices: '' + general_source_script: '' + general_source_script_file_name: '' + general_stimulus: '' general_subject: [] - general_surgery: [] - general_virus: [] + general_surgery: '' + general_virus: '' intervals: [0×1 types.untyped.Set] intervals_epochs: [] intervals_invalid_times: [] @@ -98,10 +103,12 @@ stimulus_presentation: [1×1 types.untyped.Set] stimulus_templates: [0×1 types.untyped.Set] units: [] -

+
+
\ No newline at end of file +--> +
\ No newline at end of file diff --git a/tutorials/html/images.html b/tutorials/html/images.html index 7f7c05fc..bbdaecdf 100644 --- a/tutorials/html/images.html +++ b/tutorials/html/images.html @@ -39,20 +39,20 @@ .S12 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 0px none rgb(33, 33, 33); border-bottom: 1px solid rgb(217, 217, 217); border-radius: 0px 0px 4px 4px; padding: 0px 45px 4px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } .S13 { margin: 15px 10px 5px 4px; padding: 0px; line-height: 18px; min-height: 0px; white-space: pre-wrap; color: rgb(33, 33, 33); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 17px; font-weight: 700; text-align: left; } .S14 { margin: 10px 10px 9px 4px; padding: 0px; line-height: 21px; min-height: 0px; white-space: pre-wrap; color: rgb(33, 33, 33); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 14px; font-weight: 400; text-align: left; } -.S15 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 1px solid rgb(217, 217, 217); border-bottom: 1px solid rgb(217, 217, 217); border-radius: 4px; padding: 6px 45px 4px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; }

Storing Image Data in NWB

Image data can be a collection of individual images or movie segments (as a movie is simply a series of images), about the subject, the environment, the presented stimuli, or other parts related to the experiment. This tutorial focuses in particular on the usage of:

Create an NWB File

nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'timestamps_reference_time', datetime(2018, 4, 25, 3, 0, 45, 'TimeZone', 'local'), ...
'general_experimenter', 'LastName, FirstName', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', 'DOI:10.1016/j.neuron.2016.12.011' ... % optional
);
nwb
nwb =
NwbFile with properties: +.S15 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 1px solid rgb(217, 217, 217); border-bottom: 1px solid rgb(217, 217, 217); border-radius: 4px; padding: 6px 45px 4px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; }

Storing Image Data in NWB

Image data can be a collection of individual images or movie segments (as a movie is simply a series of images), about the subject, the environment, the presented stimuli, or other parts related to the experiment. This tutorial focuses in particular on the usage of:

Create an NWB File

nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'timestamps_reference_time', datetime(2018, 4, 25, 3, 0, 45, 'TimeZone', 'local'), ...
'general_experimenter', 'LastName, FirstName', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', 'DOI:10.1016/j.neuron.2016.12.011' ... % optional
);
nwb
nwb =
NwbFile with properties: - nwb_version: '2.6.0' + nwb_version: '2.7.0' file_create_date: [] identifier: 'Mouse5_Day3' session_description: 'mouse in open exploration' @@ -101,7 +101,7 @@ stimulus_presentation: [0×1 types.untyped.Set] stimulus_templates: [0×1 types.untyped.Set] units: [] -

OpticalSeries: Storing series of images as stimuli

OpticalSeries is for time series of images that were presented to the subject as stimuli. We will create an OpticalSeries object with the name "StimulusPresentation" representing what images were shown to the subject and at what times.
Image data can be stored either in the HDF5 file or as an external image file. For this tutorial, we will use fake image data with shape of ('RGB', 'y', 'x', 'time') = (200, 50, 50, 3). As in all TimeSeries, the first dimension is time. The second and third dimensions represent x and y. The fourth dimension represents the RGB value (length of 3) for color images.
NWB differentiates between acquired data and data that was presented as stimulus. We can add it to the NWBFile object as stimulus data.
If the sampling rate is constant, use rate and starting_time to specify time. For irregularly sampled recordings, use timestamps to specify time for each sample image.
image_data = randi(255, [3, 50, 50, 200]);
optical_series = types.core.OpticalSeries( ...
'distance', 0.7, ... % required
'field_of_view', [0.2, 0.3, 0.7], ... % required
'orientation', 'lower left', ... % required
'data', image_data, ...
'data_unit', 'n.a.', ...
'starting_time_rate', 1.0, ...
'starting_time', 0.0, ...
'description', 'The images presented to the subject as stimuli' ...
);
 
nwb.stimulus_presentation.set('StimulusPresentation', optical_series);

AbstractFeatureSeries: Storing features of visual stimuli

While it is usually recommended to store the entire image data as an OpticalSeries, sometimes it is useful to store features of the visual stimuli instead of or in addition to the raw image data. For example, you may want to store the mean luminance of the image, the contrast, or the spatial frequency. This can be done using an instance of AbstractFeatureSeries. This class is a general container for storing time series of features that are derived from the raw image data.
% Create some fake feature data
feature_data = rand(200, 3); % 200 time points, 3 features
 
% Create an AbstractFeatureSeries object
abstract_feature_series = types.core.AbstractFeatureSeries( ...
'data', feature_data, ...
'timestamps', linspace(0, 1, 200), ...
'description', 'Features of the visual stimuli', ...
'features', {'luminance', 'contrast', 'spatial frequency'}, ...
'feature_units', {'n.a.', 'n.a.', 'cycles/degree'} ...
);
% Add the AbstractFeatureSeries to the NWBFile
nwb.stimulus_presentation.set('StimulusFeatures', abstract_feature_series);

ImageSeries: Storing series of images as acquisition

ImageSeries is a general container for time series of images acquired during the experiment. Image data can be stored either in the HDF5 file or as an external image file. When color images are stored in the HDF5 file the color channel order is expected to be RGB.
image_data = randi(255, [3, 50, 50, 200]);
behavior_images = types.core.ImageSeries( ...
'data', image_data, ...
'description', 'Image data of an animal in environment', ...
'data_unit', 'n.a.', ...
'starting_time_rate', 1.0, ...
'starting_time', 0.0 ...
);
 
nwb.acquisition.set('ImageSeries', behavior_images);

External Files

External files (e.g. video files of the behaving animal) can be added to the NWBFile by creating an ImageSeries object using the external_file attribute that specifies the path to the external file(s) on disk. The file(s) path must be relative to the path of the NWB file. Either external_file or data must be specified, but not both. external_file can be a cell array of multiple video files.
The starting_frame attribute serves as an index to indicate the starting frame of each external file, allowing you to skip the beginning of videos.
external_files = {'video1.pmp4', 'video2.pmp4'};
 
timestamps = [0.0, 0.04, 0.07, 0.1, 0.14, 0.16, 0.21];
behavior_external_file = types.core.ImageSeries( ...
'description', 'Behavior video of animal moving in environment', ...
'data_unit', 'n.a.', ...
'external_file', external_files, ...
'format', 'external', ...
'external_file_starting_frame', [0, 2, 4], ...
'timestamps', timestamps ...
);
 
nwb.acquisition.set('ExternalVideos', behavior_external_file);

Static Images

Static images can be stored in an NWBFile object by creating an RGBAImage, RGBImage or GrayscaleImage object with the image data. All of these image types provide an optional description parameter to include text description about the image and the resolution parameter to specify the pixels/cm resolution of the image.

RGBAImage: for color images with transparency

RGBAImage is for storing data of color image with transparency. data must be 3D where the first and second dimensions represent x and y. The third dimension has length 4 and represents the RGBA value.
image_data = randi(255, [4, 200, 200]);
 
rgba_image = types.core.RGBAImage( ...
'data', image_data, ... % required
'resolution', 70.0, ...
'description', 'RGBA image' ...
);

RGBImage: for color images

RGBImage is for storing data of RGB color image. data must be 3D where the first and second dimensions represent x and y. The third dimension has length 3 and represents the RGB value.
image_data = randi(255, [3, 200, 200]);
 
rgb_image = types.core.RGBImage( ...
'data', image_data, ... % required
'resolution', 70.0, ...
'description', 'RGB image' ...
);

GrayscaleImage: for grayscale images

GrayscaleImage is for storing grayscale image data. data must be 2D where the first and second dimensions represent x and y.
image_data = randi(255, [200, 200]);
 
grayscale_image = types.core.GrayscaleImage( ...
'data', image_data, ... % required
'resolution', 70.0, ...
'description', 'Grayscale image' ...
);

Images: a container for images

Add the images to an Images container that accepts any of these image types.
image_collection = types.core.Images( ...
'description', 'A collection of logo images presented to the subject.'...
);
 
image_collection.image.set('rgba_image', rgba_image);
image_collection.image.set('rgb_image', rgb_image);
image_collection.image.set('grayscale_image', grayscale_image);
 
nwb.acquisition.set('image_collection', image_collection);

Index Series for Repeated Images

You may want to set up a time series of images where some images are repeated many times. You could create an ImageSeries that repeats the data each time the image is shown, but that would be inefficient, because it would store the same data multiple times. A better solution would be to store the unique images once and reference those images. This is how IndexSeries works. First, create an Images container with the order of images defined using an ImageReferences. Then create an IndexSeries that indexes into the Images.
rgbImage = imread('street2.jpg');
grayImage = uint8(sum(double(rgbImage), 3) ./ double(max(max(max(rgbImage)))));
GsStreet = types.core.GrayscaleImage(...
'data', grayImage, ...
'description', 'grayscale image of a street.', ...
'resolution', 28 ...
);
 
RgbStreet = types.core.RGBImage( ...
'data', rgbImage, ...
'resolution', 28, ...
'description', 'RGB Street' ...
);
 
ImageOrder = types.core.ImageReferences(...
'data', [types.untyped.ObjectView(RgbStreet), types.untyped.ObjectView(GsStreet)] ...
);
Images = types.core.Images( ...
'gs_face', GsStreet, ...
'rgb_face', RgbStreet, ...
'description', 'A collection of streets.', ...
'order_of_images', ImageOrder ...
);
 
types.core.IndexSeries(...
'data', [0, 1, 0, 1], ... % NOTE: 0-indexed
'indexed_images', Images, ...
'timestamps', [0.1, 0.2, 0.3, 0.4] ...
)
ans =
IndexSeries with properties: +

OpticalSeries: Storing series of images as stimuli

OpticalSeries is for time series of images that were presented to the subject as stimuli. We will create an OpticalSeries object with the name "StimulusPresentation" representing what images were shown to the subject and at what times.
Image data can be stored either in the HDF5 file or as an external image file. For this tutorial, we will use fake image data with shape of ('time', 'x', 'y', 'RGB') = (200, 50, 50, 3). As in all TimeSeries, the first dimension is time. The second and third dimensions represent x and y. The fourth dimension represents the RGB value (length of 3) for color images. Please note: As described in the dimensionMapNoDataPipes tutorial, when a MATLAB array is exported to HDF5, the array is transposed. Therefore, in order to correctly export the data, we will need to create a transposed array, where the dimensions are in reverse order compared to the type specification.
NWB differentiates between acquired data and data that was presented as stimulus. We can add it to the NWBFile object as stimulus data.
If the sampling rate is constant, use rate and starting_time to specify time. For irregularly sampled recordings, use timestamps to specify time for each sample image.
image_data = randi(255, [3, 50, 50, 200]); % NB: Array is transposed
optical_series = types.core.OpticalSeries( ...
'distance', 0.7, ... % required
'field_of_view', [0.2, 0.3, 0.7], ... % required
'orientation', 'lower left', ... % required
'data', image_data, ...
'data_unit', 'n.a.', ...
'starting_time_rate', 1.0, ...
'starting_time', 0.0, ...
'description', 'The images presented to the subject as stimuli' ...
);
 
nwb.stimulus_presentation.set('StimulusPresentation', optical_series);

AbstractFeatureSeries: Storing features of visual stimuli

While it is usually recommended to store the entire image data as an OpticalSeries, sometimes it is useful to store features of the visual stimuli instead of or in addition to the raw image data. For example, you may want to store the mean luminance of the image, the contrast, or the spatial frequency. This can be done using an instance of AbstractFeatureSeries. This class is a general container for storing time series of features that are derived from the raw image data.
% Create some fake feature data
feature_data = rand(3, 200); % 200 time points, 3 features
 
% Create an AbstractFeatureSeries object
abstract_feature_series = types.core.AbstractFeatureSeries( ...
'data', feature_data, ...
'timestamps', linspace(0, 1, 200), ...
'description', 'Features of the visual stimuli', ...
'features', {'luminance', 'contrast', 'spatial frequency'}, ...
'feature_units', {'n.a.', 'n.a.', 'cycles/degree'} ...
);
% Add the AbstractFeatureSeries to the NWBFile
nwb.stimulus_presentation.set('StimulusFeatures', abstract_feature_series);

ImageSeries: Storing series of images as acquisition

ImageSeries is a general container for time series of images acquired during the experiment. Image data can be stored either in the HDF5 file or as an external image file. When color images are stored in the HDF5 file the color channel order is expected to be RGB.
image_data = randi(255, [3, 50, 50, 200]);
behavior_images = types.core.ImageSeries( ...
'data', image_data, ...
'description', 'Image data of an animal in environment', ...
'data_unit', 'n.a.', ...
'starting_time_rate', 1.0, ...
'starting_time', 0.0 ...
);
 
nwb.acquisition.set('ImageSeries', behavior_images);

External Files

External files (e.g. video files of the behaving animal) can be added to the NWBFile by creating an ImageSeries object using the external_file attribute that specifies the path to the external file(s) on disk. The file(s) path must be relative to the path of the NWB file. Either external_file or data must be specified, but not both. external_file can be a cell array of multiple video files.
The starting_frame attribute serves as an index to indicate the starting frame of each external file, allowing you to skip the beginning of videos.
external_files = {'video1.pmp4', 'video2.pmp4'};
 
timestamps = [0.0, 0.04, 0.07, 0.1, 0.14, 0.16, 0.21];
behavior_external_file = types.core.ImageSeries( ...
'description', 'Behavior video of animal moving in environment', ...
'data_unit', 'n.a.', ...
'external_file', external_files, ...
'format', 'external', ...
'external_file_starting_frame', [0, 2, 4], ...
'timestamps', timestamps ...
);
 
nwb.acquisition.set('ExternalVideos', behavior_external_file);

Static Images

Static images can be stored in an NWBFile object by creating an RGBAImage, RGBImage or GrayscaleImage object with the image data. All of these image types provide an optional description parameter to include text description about the image and the resolution parameter to specify the pixels/cm resolution of the image.

RGBAImage: for color images with transparency

RGBAImage is for storing data of color image with transparency. data must be 3D where the first and second dimensions represent x and y. The third dimension has length 4 and represents the RGBA value.
image_data = randi(255, [4, 200, 200]);
 
rgba_image = types.core.RGBAImage( ...
'data', image_data, ... % required
'resolution', 70.0, ...
'description', 'RGBA image' ...
);

RGBImage: for color images

RGBImage is for storing data of RGB color image. data must be 3D where the first and second dimensions represent x and y. The third dimension has length 3 and represents the RGB value.
image_data = randi(255, [3, 200, 200]);
 
rgb_image = types.core.RGBImage( ...
'data', image_data, ... % required
'resolution', 70.0, ...
'description', 'RGB image' ...
);

GrayscaleImage: for grayscale images

GrayscaleImage is for storing grayscale image data. data must be 2D where the first and second dimensions represent x and y.
image_data = randi(255, [200, 200]);
 
grayscale_image = types.core.GrayscaleImage( ...
'data', image_data, ... % required
'resolution', 70.0, ...
'description', 'Grayscale image' ...
);

Images: a container for images

Add the images to an Images container that accepts any of these image types.
image_collection = types.core.Images( ...
'description', 'A collection of logo images presented to the subject.'...
);
 
image_collection.image.set('rgba_image', rgba_image);
image_collection.image.set('rgb_image', rgb_image);
image_collection.image.set('grayscale_image', grayscale_image);
 
nwb.acquisition.set('image_collection', image_collection);

Index Series for Repeated Images

You may want to set up a time series of images where some images are repeated many times. You could create an ImageSeries that repeats the data each time the image is shown, but that would be inefficient, because it would store the same data multiple times. A better solution would be to store the unique images once and reference those images. This is how IndexSeries works. First, create an Images container with the order of images defined using an ImageReferences. Then create an IndexSeries that indexes into the Images.
rgbImage = imread('street2.jpg');
grayImage = uint8(sum(double(rgbImage), 3) ./ double(max(max(max(rgbImage)))));
GsStreet = types.core.GrayscaleImage(...
'data', grayImage, ...
'description', 'grayscale image of a street.', ...
'resolution', 28 ...
);
 
RgbStreet = types.core.RGBImage( ...
'data', rgbImage, ...
'resolution', 28, ...
'description', 'RGB Street' ...
);
 
ImageOrder = types.core.ImageReferences(...
'data', [types.untyped.ObjectView(RgbStreet), types.untyped.ObjectView(GsStreet)] ...
);
Images = types.core.Images( ...
'gs_face', GsStreet, ...
'rgb_face', RgbStreet, ...
'description', 'A collection of streets.', ...
'order_of_images', ImageOrder ...
);
 
types.core.IndexSeries(...
'data', [0, 1, 0, 1], ... % NOTE: 0-indexed
'indexed_images', Images, ...
'timestamps', [0.1, 0.2, 0.3, 0.4] ...
)
ans =
IndexSeries with properties: indexed_images: [1×1 types.core.Images] indexed_timeseries: [] @@ -121,7 +121,7 @@ starting_time: [] starting_time_rate: [] timestamps: [0.1000 0.2000 0.3000 0.4000] -
Here data contains the (0-indexed) index of the displayed image as they are ordered in the ImageReference.

Writing the images to an NWB File

Now use nwbExport to write the file.
nwbExport(nwb, "images_test.nwb");
+
Here data contains the (0-indexed) index of the displayed image as they are ordered in the ImageReference.

Writing the images to an NWB File

Now use nwbExport to write the file.
nwbExport(nwb, "images_test.nwb");

\ No newline at end of file diff --git a/tutorials/html/ophys.html b/tutorials/html/ophys.html index c9cab142..bbf83ab4 100644 --- a/tutorials/html/ophys.html +++ b/tutorials/html/ophys.html @@ -39,11 +39,10 @@ .S12 { margin: 15px 10px 5px 4px; padding: 0px; line-height: 18px; min-height: 0px; white-space: pre-wrap; color: rgb(33, 33, 33); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 17px; font-weight: 700; text-align: left; } .S13 { margin: 2px 10px 9px 4px; padding: 0px; line-height: 21px; min-height: 0px; white-space: pre-wrap; color: rgb(33, 33, 33); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 14px; font-weight: 400; text-align: center; } .S14 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 0px none rgb(33, 33, 33); border-bottom: 1px solid rgb(217, 217, 217); border-radius: 0px 0px 4px 4px; padding: 0px 45px 4px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } -.S15 { margin: 3px 10px 5px 4px; padding: 0px; line-height: 18px; min-height: 0px; white-space: pre-wrap; color: rgb(33, 33, 33); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 17px; font-weight: 700; text-align: left; } -.S16 { margin: 10px 10px 9px 4px; padding: 0px; line-height: 21px; min-height: 0px; white-space: pre-wrap; color: rgb(33, 33, 33); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 14px; font-weight: 400; text-align: left; } -.S17 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 1px solid rgb(217, 217, 217); border-bottom: 0px none rgb(33, 33, 33); border-radius: 0px; padding: 6px 45px 0px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } -.S18 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 1px solid rgb(217, 217, 217); border-bottom: 1px solid rgb(217, 217, 217); border-radius: 4px; padding: 6px 45px 4px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } -.S19 { margin: 3px 10px 5px 4px; padding: 0px; line-height: 20px; min-height: 0px; white-space: pre-wrap; color: rgb(33, 33, 33); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 20px; font-weight: 700; text-align: left; } +.S15 { margin: 10px 10px 9px 4px; padding: 0px; line-height: 21px; min-height: 0px; white-space: pre-wrap; color: rgb(33, 33, 33); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 14px; font-weight: 400; text-align: left; } +.S16 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 1px solid rgb(217, 217, 217); border-bottom: 1px solid rgb(217, 217, 217); border-radius: 4px; padding: 6px 45px 4px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } +.S17 { margin: 3px 10px 5px 4px; padding: 0px; line-height: 18px; min-height: 0px; white-space: pre-wrap; color: rgb(33, 33, 33); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 17px; font-weight: 700; text-align: left; } +.S18 { margin: 3px 10px 5px 4px; padding: 0px; line-height: 20px; min-height: 0px; white-space: pre-wrap; color: rgb(33, 33, 33); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 20px; font-weight: 700; text-align: left; } .variableValue { width: 100% !important; } .embeddedOutputsMatrixElement,.eoOutputWrapper .matrixElement { min-height: 18px; box-sizing: border-box;} .embeddedOutputsMatrixElement .matrixElement,.eoOutputWrapper .matrixElement,.rtcDataTipElement .matrixElement { position: relative;} @@ -70,25 +69,27 @@ .variableNameElement { margin-bottom: 3px; display: inline-block;} /* * Ellipses as base64 for HTML export. */.matrixElement .horizontalEllipsis,.rtcDataTipElement .matrixElement .horizontalEllipsis { display: inline-block; margin-top: 3px; /* base64 encoded version of images-liveeditor/HEllipsis.png */ width: 30px; height: 12px; background-repeat: no-repeat; background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAB0AAAAJCAYAAADO1CeCAAAAJUlEQVR42mP4//8/A70xw0i29BUDFPxnAEtTW37wWDqakIa4pQDvOOG89lHX2gAAAABJRU5ErkJggg==");} .matrixElement .verticalEllipsis,.textElement .verticalEllipsis,.rtcDataTipElement .matrixElement .verticalEllipsis,.rtcDataTipElement .textElement .verticalEllipsis { margin-left: 35px; /* base64 encoded version of images-liveeditor/VEllipsis.png */ width: 12px; height: 30px; background-repeat: no-repeat; background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAoAAAAZCAYAAAAIcL+IAAAALklEQVR42mP4//8/AzGYgWyFMECMwv8QddRS+P//KyimlmcGUOFoOI6GI/UVAgDnd8Dd4+NCwgAAAABJRU5ErkJggg==");} -.S20 { margin: 15px 10px 5px 4px; padding: 0px; line-height: 28.8px; min-height: 0px; white-space: pre-wrap; color: rgb(192, 76, 11); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 24px; font-weight: 400; text-align: left; }

MatNWB Optical Physiology Tutorial

Introduction

In this tutorial, we will create fake data for a hypothetical optical physiology experiment with a freely moving animal. The types of data we will convert are:
  • Acquired two-photon images
  • Image segmentation (ROIs)
  • Fluorescence and dF/F response
It is recommended to first work through the Introduction to MatNWB tutorial, which demonstrates installing MatNWB and creating an NWB file with subject information, animal position, and trials, as well as writing and reading NWB files in MATLAB.

Set up the NWB file

An NWB file represents a single session of an experiment. Each file must have a session_description, identifier, and session start time. Create a new NWBFile object with those and additional metadata. For all MatNWB functions, we use the Matlab method of entering keyword argument pairs, where arguments are entered as name followed by value.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'timestamps_reference_time', datetime(2018, 4, 25, 3, 0, 45, 'TimeZone', 'local'), ...
'general_experimenter', 'LastName, FirstName', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', {'DOI:10.1016/j.neuron.2016.12.011'}); % optional
nwb
nwb =
NwbFile with properties: - - nwb_version: '2.6.0' +.S19 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 1px solid rgb(217, 217, 217); border-bottom: 0px none rgb(33, 33, 33); border-radius: 0px; padding: 6px 45px 0px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } +.S20 { margin: 15px 10px 5px 4px; padding: 0px; line-height: 28.8px; min-height: 0px; white-space: pre-wrap; color: rgb(192, 76, 11); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 24px; font-weight: 400; text-align: left; }

MatNWB Optical Physiology Tutorial

Introduction

In this tutorial, we will create fake data for a hypothetical optical physiology experiment with a freely moving animal. The types of data we will convert are:
  • Acquired two-photon images
  • Image segmentation (ROIs)
  • Fluorescence and dF/F response
It is recommended to first work through the Introduction to MatNWB tutorial, which demonstrates installing MatNWB and creating an NWB file with subject information, animal position, and trials, as well as writing and reading NWB files in MATLAB.
Please note: The dimensions of timeseries data in MatNWB should be defined in the opposite order of how it is defined in the nwb-schemas. In NWB, time is always stored in the first dimension of the data, whereas in MatNWB data should be specified with time along the last dimension. This is explained in more detail here: MatNWB <-> HDF5 Dimension Mapping.

Set up the NWB file

An NWB file represents a single session of an experiment. Each file must have a session_description, identifier, and session start time. Create a new NWBFile object with those and additional metadata. For all MatNWB functions, we use the Matlab method of entering keyword argument pairs, where arguments are entered as name followed by value.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'timestamps_reference_time', datetime(2018, 4, 25, 3, 0, 45, 'TimeZone', 'local'), ...
'general_experimenter', 'LastName, FirstName', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', {'DOI:10.1016/j.neuron.2016.12.011'}); % optional
nwb
nwb =
NwbFile with properties: + + nwb_version: '2.8.0' file_create_date: [] identifier: 'Mouse5_Day3' session_description: 'mouse in open exploration' @@ -128,6 +129,7 @@ general_subject: [] general_surgery: '' general_virus: '' + general_was_generated_by: '' intervals: [0×1 types.untyped.Set] intervals_epochs: [] intervals_invalid_times: [] @@ -137,19 +139,14 @@ stimulus_presentation: [0×1 types.untyped.Set] stimulus_templates: [0×1 types.untyped.Set] units: [] -

Optical Physiology

Optical physiology results are written in four steps:
  1. Create imaging plane
  2. Acquired two-photon images
  3. Image segmentation
  4. Fluorescence and dF/F responses

Imaging Plane

First, you must create an ImagingPlane object, which will hold information about the area and method used to collect the optical imaging data. This requires creation of a Device object for the microscope and an OpticalChannel object. Then you can create an ImagingPlane.
optical_channel = types.core.OpticalChannel( ...
'description', 'description', ...
'emission_lambda', 500.);
 
device = types.core.Device();
nwb.general_devices.set('Device', device);
 
imaging_plane_name = 'imaging_plane';
imaging_plane = types.core.ImagingPlane( ...
'optical_channel', optical_channel, ...
'description', 'a very interesting part of the brain', ...
'device', types.untyped.SoftLink(device), ...
'excitation_lambda', 600., ...
'imaging_rate', 5., ...
'indicator', 'GFP', ...
'location', 'my favorite brain location');
 
nwb.general_optophysiology.set(imaging_plane_name, imaging_plane);

Storing Two-Photon Data

You can create a TwoPhotonSeries class representing two photon imaging data. TwoPhotonSeries, like SpatialSeries, inherits from TimeSeries and is similar in behavior to OnePhotonSeries.
InternalTwoPhoton = types.core.TwoPhotonSeries( ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'starting_time', 0.0, ...
'starting_time_rate', 3.0, ...
'data', ones(200, 100, 1000), ...
'data_unit', 'lumens');
 
nwb.acquisition.set('2pInternal', InternalTwoPhoton);

Storing One-Photon Data

Now that we have our ImagingPlane, we can create a OnePhotonSeries object to store raw one-photon imaging data.
% using internal data. this data will be stored inside the NWB file
InternalOnePhoton = types.core.OnePhotonSeries( ...
'data', ones(100, 100, 1000), ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'starting_time', 0., ...
'starting_time_rate', 1.0, ...
'data_unit', 'normalized amplitude' ...
);
nwb.acquisition.set('1pInternal', InternalOnePhoton);

Plane Segmentation

Image segmentation stores the detected regions of interest in the TwoPhotonSeries data. ImageSegmentation allows you to have more than one segmentation by creating more PlaneSegmentation objects.

Regions of interest (ROIs)

ROIs can be added to a PlaneSegmentation either as an image_mask or as a pixel_mask. An image mask is an array that is the same size as a single frame of the TwoPhotonSeries, and indicates where a single region of interest is. This image mask may be boolean or continuous between 0 and 1. A pixel_mask, on the other hand, is a list of indices (i.e coordinates) and weights for the ROI. The pixel_mask is represented as a compound data type using a ragged array and below is an example demonstrating how to create either an image_mask or a pixel_mask. Changing the dropdown selection will update the PlaneSegmentation object accordingly.
selection = "Create Image Mask"; % "Create Image Mask" or "Create Pixel Mask"
 
% generate fake image_mask data
imaging_shape = [100, 100];
x = imaging_shape(1);
y = imaging_shape(2);
 
n_rois = 20;
image_mask = zeros(y, x, n_rois);
center = randi(90,2,n_rois);
for i = 1:n_rois
image_mask(center(1,i):center(1,i)+10, center(2,i):center(2,i)+10, i) = 1;
end
 
if selection == "Create Pixel Mask"
ind = find(image_mask);
[y_ind, x_ind, roi_ind] = ind2sub(size(image_mask), ind);
 
pixel_mask_struct = struct();
pixel_mask_struct.x = uint32(x_ind); % Add x coordinates to struct field x
pixel_mask_struct.y = uint32(y_ind); % Add y coordinates to struct field y
pixel_mask_struct.weight = single(ones(size(x_ind)));
% Create pixel mask vector data
pixel_mask = types.hdmf_common.VectorData(...
'data', struct2table(pixel_mask_struct), ...
'description', 'pixel masks');
 
% When creating a pixel mask, it is also necessary to specify a
% pixel_mask_index vector. See the documentation for ragged arrays linked
% above to learn more.
num_pixels_per_roi = zeros(n_rois, 1); % Column vector
for i_roi = 1:n_rois
num_pixels_per_roi(i_roi) = sum(roi_ind == i_roi);
end
 
pixel_mask_index = uint16(cumsum(num_pixels_per_roi)); % Note: Use an integer
% type that can accommodate the maximum value of the cumulative sum
 
% Create pixel_mask_index vector
pixel_mask_index = types.hdmf_common.VectorIndex(...
'description', 'Index into pixel_mask VectorData', ...
'data', pixel_mask_index, ...
'target', types.untyped.ObjectView(pixel_mask) );
 
plane_segmentation = types.core.PlaneSegmentation( ...
'colnames', {'pixel_mask'}, ...
'description', 'roi pixel position (x,y) and pixel weight', ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'pixel_mask_index', pixel_mask_index, ...
'pixel_mask', pixel_mask ...
);
 
else % selection == "Create Image Mask"
plane_segmentation = types.core.PlaneSegmentation( ...
'colnames', {'image_mask'}, ...
'description', 'output from segmenting my favorite imaging plane', ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'image_mask', types.hdmf_common.VectorData(...
'data', image_mask, ...
'description', 'image masks') ...
);
end

Adding ROIs to NWB file

Now create an ImageSegmentation object and put the plane_segmentation object inside of it, naming it PlaneSegmentation.
img_seg = types.core.ImageSegmentation();
img_seg.planesegmentation.set('PlaneSegmentation', plane_segmentation);
Now create a ProcessingModule called "ophys" and put our img_seg object in it, calling it "ImageSegmentation", and add the ProcessingModule to nwb.
ophys_module = types.core.ProcessingModule( ...
'description', 'contains optical physiology data')
ophys_module =
ProcessingModule with properties: - - description: 'contains optical physiology data' - dynamictable: [0×1 types.untyped.Set] - nwbdatainterface: [0×1 types.untyped.Set] -
ophys_module.nwbdatainterface.set('ImageSegmentation', img_seg);
nwb.processing.set('ophys', ophys_module);

Storing fluorescence of ROIs over time

Now that ROIs are stored, you can store fluorescence dF/F data for these regions of interest. This type of data is stored using the RoiResponseSeries class. You will not need to instantiate this class directly to create objects of this type, but it is worth noting that this is the class you will work with after you read data back in.
First, create a data interface to store this data in
roi_table_region = types.hdmf_common.DynamicTableRegion( ...
'table', types.untyped.ObjectView(plane_segmentation), ...
'description', 'all_rois', ...
'data', (0:n_rois-1)');
 
roi_response_series = types.core.RoiResponseSeries( ...
'rois', roi_table_region, ...
'data', NaN(n_rois, 100), ...
'data_unit', 'lumens', ...
'starting_time_rate', 3.0, ...
'starting_time', 0.0);
 
fluorescence = types.core.Fluorescence();
fluorescence.roiresponseseries.set('RoiResponseSeries', roi_response_series);
 
ophys_module.nwbdatainterface.set('Fluorescence', fluorescence);
Finally, the ophys ProcessingModule is added to the NwbFile.
nwb.processing.set('ophys', ophys_module);

Writing the NWB file

nwb_file_name = 'ophys_tutorial.nwb';
if isfile(nwb_file_name); delete(nwb_file_name); end
nwbExport(nwb, nwb_file_name);

Reading the NWB file

read_nwb = nwbRead(nwb_file_name, 'ignorecache');
Data arrays are read passively from the file. Calling TimeSeries.data does not read the data values, but presents an HDF5 object that can be indexed to read data.
read_nwb.processing.get('ophys').nwbdatainterface.get('Fluorescence')...
.roiresponseseries.get('RoiResponseSeries').data
ans =
DataStub with properties: +

Optical Physiology

Optical physiology results are written in four steps:
  1. Create imaging plane
  2. Acquired two-photon images
  3. Image segmentation
  4. Fluorescence and dF/F responses

Imaging Plane

First, you must create an ImagingPlane object, which will hold information about the area and method used to collect the optical imaging data. This requires creation of a Device object for the microscope and an OpticalChannel object. Then you can create an ImagingPlane.
Create a Device representing a two-photon microscope. The fields description, manufacturer, model_number, model_name, and serial_number are optional, but recommended. Then create an OpticalChannel and add both of these to the ImagingPlane.
device = types.core.Device( ...
'description', 'My two-photon microscope', ...
'manufacturer', 'Loki Labs', ...
'model_number', 'ABC-123', ...
'model_name', 'Loki 1.0', ...
'serial_number', '1234567890');
 
% Add device to nwb object
nwb.general_devices.set('Device', device);
 
optical_channel = types.core.OpticalChannel( ...
'description', 'description', ...
'emission_lambda', 500.);
 
imaging_plane_name = 'imaging_plane';
imaging_plane = types.core.ImagingPlane( ...
'optical_channel', optical_channel, ...
'description', 'a very interesting part of the brain', ...
'device', types.untyped.SoftLink(device), ...
'excitation_lambda', 600., ...
'imaging_rate', 5., ...
'indicator', 'GFP', ...
'location', 'my favorite brain location');
 
nwb.general_optophysiology.set(imaging_plane_name, imaging_plane);

Storing Two-Photon Data

You can create a TwoPhotonSeries class representing two photon imaging data. TwoPhotonSeries, like SpatialSeries, inherits from TimeSeries and is similar in behavior to OnePhotonSeries.
InternalTwoPhoton = types.core.TwoPhotonSeries( ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'starting_time', 0.0, ...
'starting_time_rate', 3.0, ...
'data', ones(200, 100, 1000), ...
'data_unit', 'lumens');
 
nwb.acquisition.set('2pInternal', InternalTwoPhoton);

Storing One-Photon Data

Now that we have our ImagingPlane, we can create a OnePhotonSeries object to store raw one-photon imaging data.
% using internal data. this data will be stored inside the NWB file
InternalOnePhoton = types.core.OnePhotonSeries( ...
'data', ones(100, 100, 1000), ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'starting_time', 0., ...
'starting_time_rate', 1.0, ...
'data_unit', 'normalized amplitude' ...
);
nwb.acquisition.set('1pInternal', InternalOnePhoton);

Motion Correction (optional)

You can also store the result of motion correction using a MotionCorrection object, a container type that can hold one or more CorrectedImageStack objects.
% Create the corrected ImageSeries
corrected = types.core.ImageSeries( ...
'description', 'A motion corrected image stack', ...
'data', ones(100, 100, 1000), ... % 3D data array
'data_unit', 'n/a', ...
'format', 'raw', ...
'starting_time', 0.0, ...
'starting_time_rate', 1.0 ...
);
 
% Create the xy_translation TimeSeries
xy_translation = types.core.TimeSeries( ...
'description', 'x,y translation in pixels', ...
'data', ones(2, 1000), ... % 2D data array
'data_unit', 'pixels', ...
'starting_time', 0.0, ...
'starting_time_rate', 1.0 ...
);
 
% Create the CorrectedImageStack
corrected_image_stack = types.core.CorrectedImageStack( ...
'corrected', corrected, ...
'original', types.untyped.SoftLink(InternalOnePhoton), ... % Ensure `InternalOnePhoton` exists
'xy_translation', xy_translation ...
);
 
% Create the MotionCorrection object
motion_correction = types.core.MotionCorrection();
motion_correction.correctedimagestack.set('CorrectedImageStack', corrected_image_stack);
The motion corrected data is considered processed data and will be added to the processing field of the nwb object using a ProcessingModule called "ophys". First, create the ProcessingModule object and then add the motion_correction object to it, naming it "MotionCorrection".
ophys_module = types.core.ProcessingModule( ...
'description', 'Contains optical physiology data');
ophys_module.nwbdatainterface.set('MotionCorrection', motion_correction);
Finally, add the "ophys" ProcessingModule to the nwb (Note that we can continue adding objects to the "ophys" ProcessingModule without needing to explicitly update the nwb):
nwb.processing.set('ophys', ophys_module);

Plane Segmentation

Image segmentation stores the detected regions of interest in the TwoPhotonSeries data. ImageSegmentation allows you to have more than one segmentation by creating more PlaneSegmentation objects.

Regions of interest (ROIs)

ROIs can be added to a PlaneSegmentation either as an image_mask or as a pixel_mask. An image mask is an array that is the same size as a single frame of the TwoPhotonSeries, and indicates where a single region of interest is. This image mask may be boolean or continuous between 0 and 1. A pixel_mask, on the other hand, is a list of indices (i.e coordinates) and weights for the ROI. The pixel_mask is represented as a compound data type using a ragged array and below is an example demonstrating how to create either an image_mask or a pixel_mask. Changing the dropdown selection will update the PlaneSegmentation object accordingly.
selection = "Create Image Mask"; % "Create Image Mask" or "Create Pixel Mask"
 
% generate fake image_mask data
imaging_shape = [100, 100];
x = imaging_shape(1);
y = imaging_shape(2);
 
n_rois = 20;
image_mask = zeros(y, x, n_rois);
center = randi(90,2,n_rois);
for i = 1:n_rois
image_mask(center(1,i):center(1,i)+10, center(2,i):center(2,i)+10, i) = 1;
end
 
if selection == "Create Pixel Mask"
ind = find(image_mask);
[y_ind, x_ind, roi_ind] = ind2sub(size(image_mask), ind);
 
pixel_mask_struct = struct();
pixel_mask_struct.x = uint32(x_ind); % Add x coordinates to struct field x
pixel_mask_struct.y = uint32(y_ind); % Add y coordinates to struct field y
pixel_mask_struct.weight = single(ones(size(x_ind)));
% Create pixel mask vector data
pixel_mask = types.hdmf_common.VectorData(...
'data', struct2table(pixel_mask_struct), ...
'description', 'pixel masks');
 
% When creating a pixel mask, it is also necessary to specify a
% pixel_mask_index vector. See the documentation for ragged arrays linked
% above to learn more.
num_pixels_per_roi = zeros(n_rois, 1); % Column vector
for i_roi = 1:n_rois
num_pixels_per_roi(i_roi) = sum(roi_ind == i_roi);
end
 
pixel_mask_index = uint16(cumsum(num_pixels_per_roi)); % Note: Use an integer
% type that can accommodate the maximum value of the cumulative sum
 
% Create pixel_mask_index vector
pixel_mask_index = types.hdmf_common.VectorIndex(...
'description', 'Index into pixel_mask VectorData', ...
'data', pixel_mask_index, ...
'target', types.untyped.ObjectView(pixel_mask) );
 
plane_segmentation = types.core.PlaneSegmentation( ...
'colnames', {'pixel_mask'}, ...
'description', 'roi pixel position (x,y) and pixel weight', ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'pixel_mask_index', pixel_mask_index, ...
'pixel_mask', pixel_mask ...
);
 
else % selection == "Create Image Mask"
plane_segmentation = types.core.PlaneSegmentation( ...
'colnames', {'image_mask'}, ...
'description', 'output from segmenting my favorite imaging plane', ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'image_mask', types.hdmf_common.VectorData(...
'data', image_mask, ...
'description', 'image masks') ...
);
end

Adding ROIs to NWB file

Now create an ImageSegmentation object and put the plane_segmentation object inside of it, naming it "PlaneSegmentation".
img_seg = types.core.ImageSegmentation();
img_seg.planesegmentation.set('PlaneSegmentation', plane_segmentation);
Add the img_seg object to the "ophys" ProcessingModule we created before, naming it "ImageSegmentation".
ophys_module.nwbdatainterface.set('ImageSegmentation', img_seg);

Storing fluorescence of ROIs over time

Now that ROIs are stored, you can store fluorescence data for these regions of interest. This type of data is stored using the RoiResponseSeries class.
To create a RoiResponseSeries object, we will need to reference a set of rows from the PlaneSegmentation table to indicate which ROIs correspond to which rows of your recorded data matrix. This is done using a DynamicTableRegion, which is a type of link that allows you to reference specific rows of a DynamicTable, such as a PlaneSegmentation table by row indices.
First, we create a DynamicTableRegion that references the ROIs of the PlaneSegmentation table.
roi_table_region = types.hdmf_common.DynamicTableRegion( ...
'table', types.untyped.ObjectView(plane_segmentation), ...
'description', 'all_rois', ...
'data', (0:n_rois-1)');
Then we create a RoiResponseSeries object to store fluorescence data for those ROIs.
roi_response_series = types.core.RoiResponseSeries( ...
'rois', roi_table_region, ...
'data', NaN(n_rois, 100), ... % [nRoi, nT]
'data_unit', 'lumens', ...
'starting_time_rate', 3.0, ...
'starting_time', 0.0);
To help data analysis and visualization tools know that this RoiResponseSeries object represents fluorescence data, we will store the RoiResponseSeries object inside of a Fluorescence object. Then we add the Fluorescence object into the same ProcessingModule named "ophys" that we created earlier.
fluorescence = types.core.Fluorescence();
fluorescence.roiresponseseries.set('RoiResponseSeries', roi_response_series);
 
ophys_module.nwbdatainterface.set('Fluorescence', fluorescence);
Tip: If you want to store dF/F data instead of fluorescence data, then store the RoiResponseSeries object in a DfOverF object, which works the same way as the Fluorescence class.

Writing the NWB file

nwb_file_name = 'ophys_tutorial.nwb';
if isfile(nwb_file_name); delete(nwb_file_name); end
nwbExport(nwb, nwb_file_name);
Warning: The property "grid_spacing_unit" of type "types.core.ImagingPlane" was not exported to file location "/general/optophysiology/imaging_plane" because it depends on the property "grid_spacing" which is unset.
Warning: The property "origin_coords_unit" of type "types.core.ImagingPlane" was not exported to file location "/general/optophysiology/imaging_plane" because it depends on the property "origin_coords" which is unset.

Reading the NWB file

read_nwb = nwbRead(nwb_file_name, 'ignorecache');
Data arrays are read passively from the file. Calling TimeSeries.data does not read the data values, but presents an HDF5 object that can be indexed to read data.
read_nwb.processing.get('ophys').nwbdatainterface.get('Fluorescence')...
.roiresponseseries.get('RoiResponseSeries').data
ans =
DataStub with properties: filename: 'ophys_tutorial.nwb' path: '/processing/ophys/Fluorescence/RoiResponseSeries/data' dims: [20 100] ndims: 2 dataType: 'double' -
This allows you to conveniently work with datasets that are too large to fit in RAM all at once. Access the data in the matrix using the load method.
load with no input arguments reads the entire dataset:
read_nwb.processing.get('ophys').nwbdatainterface.get('Fluorescence'). ...
roiresponseseries.get('RoiResponseSeries').data.load
ans = 20×100
NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN +
This allows you to conveniently work with datasets that are too large to fit in RAM all at once. Access the data in the matrix using the load method.
load with no input arguments reads the entire dataset:
read_nwb.processing.get('ophys').nwbdatainterface.get('Fluorescence'). ...
roiresponseseries.get('RoiResponseSeries').data.load
ans = 20×100
NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN @@ -159,12 +156,12 @@ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN -
If all you need is a section of the data, you can read only that section by indexing the DataStub object like a normal array in MATLAB. This will just read the selected region from disk into RAM. This technique is particularly useful if you are dealing with a large dataset that is too big to fit entirely into your available RAM.
read_nwb.processing.get('ophys'). ...
nwbdatainterface.get('Fluorescence'). ...
roiresponseseries.get('RoiResponseSeries'). ...
data(1:5, 1:10)
ans = 5×10
NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN +
If all you need is a section of the data, you can read only that section by indexing the DataStub object like a normal array in MATLAB. This will just read the selected region from disk into RAM. This technique is particularly useful if you are dealing with a large dataset that is too big to fit entirely into your available RAM.
read_nwb.processing.get('ophys'). ...
nwbdatainterface.get('Fluorescence'). ...
roiresponseseries.get('RoiResponseSeries'). ...
data(1:5, 1:10)
ans = 5×10
NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN -
% read back the image/pixel masks and display the first roi
plane_segmentation = read_nwb.processing.get('ophys'). ...
nwbdatainterface.get('ImageSegmentation'). ...
planesegmentation.get('PlaneSegmentation');
 
if ~isempty(plane_segmentation.image_mask)
roi_mask = plane_segmentation.image_mask.data(:,:,1);
elseif ~isempty(plane_segmentation.pixel_mask)
row = plane_segmentation.getRow(1, 'columns', {'pixel_mask'});
pixel_mask = row.pixel_mask{1};
roi_mask = zeros(imaging_shape);
ind = sub2ind(imaging_shape, pixel_mask.y, pixel_mask.x);
roi_mask(ind) = pixel_mask.weight;
end
imshow(roi_mask)

Learn more!

See the API documentation to learn what data types are available.

Other MatNWB tutorials

Python tutorials

See our tutorials for more details about your data type:
Check out other tutorials that teach advanced NWB topics:

+
% read back the image/pixel masks and display the first roi
plane_segmentation = read_nwb.processing.get('ophys'). ...
nwbdatainterface.get('ImageSegmentation'). ...
planesegmentation.get('PlaneSegmentation');
 
if ~isempty(plane_segmentation.image_mask)
roi_mask = plane_segmentation.image_mask.data(:,:,1);
elseif ~isempty(plane_segmentation.pixel_mask)
row = plane_segmentation.getRow(1, 'columns', {'pixel_mask'});
pixel_mask = row.pixel_mask{1};
roi_mask = zeros(imaging_shape);
ind = sub2ind(imaging_shape, pixel_mask.y, pixel_mask.x);
roi_mask(ind) = pixel_mask.weight;
end
imshow(roi_mask)

Learn more!

See the API documentation to learn what data types are available.

Other MatNWB tutorials

Python tutorials

See our tutorials for more details about your data type:
Check out other tutorials that teach advanced NWB topics: