diff --git a/+types/+core/AbstractFeatureSeries.m b/+types/+core/AbstractFeatureSeries.m deleted file mode 100644 index 27e6aaf3..00000000 --- a/+types/+core/AbstractFeatureSeries.m +++ /dev/null @@ -1,141 +0,0 @@ -classdef AbstractFeatureSeries < types.core.TimeSeries & types.untyped.GroupClass -% ABSTRACTFEATURESERIES Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical. - - -% REQUIRED PROPERTIES -properties - features; % REQUIRED (char) Description of the features represented in TimeSeries::data. -end -% OPTIONAL PROPERTIES -properties - feature_units; % (char) Units of each feature. -end - -methods - function obj = AbstractFeatureSeries(varargin) - % ABSTRACTFEATURESERIES Constructor for AbstractFeatureSeries - varargin = [{'data_unit' 'see `feature_units`'} varargin]; - obj = obj@types.core.TimeSeries(varargin{:}); - - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - addParameter(p, 'data',[]); - addParameter(p, 'data_unit',[]); - addParameter(p, 'feature_units',[]); - addParameter(p, 'features',[]); - misc.parseSkipInvalidName(p, varargin); - obj.data = p.Results.data; - obj.data_unit = p.Results.data_unit; - obj.feature_units = p.Results.feature_units; - obj.features = p.Results.features; - if strcmp(class(obj), 'types.core.AbstractFeatureSeries') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - function set.feature_units(obj, val) - obj.feature_units = obj.validate_feature_units(val); - end - function set.features(obj, val) - obj.features = obj.validate_features(val); - end - %% VALIDATORS - - function val = validate_data(obj, val) - val = types.util.checkDtype('data', 'numeric', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[Inf,Inf], [Inf]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_data_unit(obj, val) - val = types.util.checkDtype('data_unit', 'char', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[1]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_feature_units(obj, val) - val = types.util.checkDtype('feature_units', 'char', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[Inf]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_features(obj, val) - val = types.util.checkDtype('features', 'char', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[Inf]}; - types.util.checkDims(valsz, validshapes); - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - if ~isempty(obj.feature_units) - if startsWith(class(obj.feature_units), 'types.untyped.') - refs = obj.feature_units.export(fid, [fullpath '/feature_units'], refs); - elseif ~isempty(obj.feature_units) - io.writeDataset(fid, [fullpath '/feature_units'], obj.feature_units, 'forceArray'); - end - end - if startsWith(class(obj.features), 'types.untyped.') - refs = obj.features.export(fid, [fullpath '/features'], refs); - elseif ~isempty(obj.features) - io.writeDataset(fid, [fullpath '/features'], obj.features, 'forceArray'); - end - end -end - -end \ No newline at end of file diff --git a/+types/+core/AnnotationSeries.m b/+types/+core/AnnotationSeries.m deleted file mode 100644 index c821b8d6..00000000 --- a/+types/+core/AnnotationSeries.m +++ /dev/null @@ -1,74 +0,0 @@ -classdef AnnotationSeries < types.core.TimeSeries & types.untyped.GroupClass -% ANNOTATIONSERIES Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way. - - - -methods - function obj = AnnotationSeries(varargin) - % ANNOTATIONSERIES Constructor for AnnotationSeries - varargin = [{'data_resolution' types.util.correctType(-1, 'single') 'data_unit' 'n/a'} varargin]; - obj = obj@types.core.TimeSeries(varargin{:}); - - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - addParameter(p, 'data',[]); - addParameter(p, 'data_resolution',[]); - addParameter(p, 'data_unit',[]); - misc.parseSkipInvalidName(p, varargin); - obj.data = p.Results.data; - obj.data_resolution = p.Results.data_resolution; - obj.data_unit = p.Results.data_unit; - if strcmp(class(obj), 'types.core.AnnotationSeries') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - - %% VALIDATORS - - function val = validate_data(obj, val) - val = types.util.checkDtype('data', 'char', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[Inf]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_data_resolution(obj, val) - if isequal(val, -1) - val = -1; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_resolution'' property of class ''AnnotationSeries'' because it is read-only.') - end - end - function val = validate_data_unit(obj, val) - if isequal(val, 'n/a') - val = 'n/a'; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''AnnotationSeries'' because it is read-only.') - end - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - end -end - -end \ No newline at end of file diff --git a/+types/+core/BehavioralEpochs.m b/+types/+core/BehavioralEpochs.m deleted file mode 100644 index c697b625..00000000 --- a/+types/+core/BehavioralEpochs.m +++ /dev/null @@ -1,50 +0,0 @@ -classdef BehavioralEpochs < types.core.NWBDataInterface & types.untyped.GroupClass -% BEHAVIORALEPOCHS TimeSeries for storing behavioral epochs. The objective of this and the other two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries) is to provide generic hooks for software tools/scripts. This allows a tool/script to take the output one specific interface (e.g., UnitTimes) and plot that data relative to another data modality (e.g., behavioral events) without having to define all possible modalities in advance. Declaring one of these interfaces means that one or more TimeSeries of the specified type is published. These TimeSeries should reside in a group having the same name as the interface. For example, if a BehavioralTimeSeries interface is declared, the module will have one or more TimeSeries defined in the module sub-group 'BehavioralTimeSeries'. BehavioralEpochs should use IntervalSeries. BehavioralEvents is used for irregular events. BehavioralTimeSeries is for continuous data. - - -% OPTIONAL PROPERTIES -properties - intervalseries; % (IntervalSeries) IntervalSeries object containing start and stop times of epochs. -end - -methods - function obj = BehavioralEpochs(varargin) - % BEHAVIORALEPOCHS Constructor for BehavioralEpochs - obj = obj@types.core.NWBDataInterface(varargin{:}); - [obj.intervalseries, ivarargin] = types.util.parseConstrained(obj,'intervalseries', 'types.core.IntervalSeries', varargin{:}); - varargin(ivarargin) = []; - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - misc.parseSkipInvalidName(p, varargin); - if strcmp(class(obj), 'types.core.BehavioralEpochs') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - function set.intervalseries(obj, val) - obj.intervalseries = obj.validate_intervalseries(val); - end - %% VALIDATORS - - function val = validate_intervalseries(obj, val) - namedprops = struct(); - constrained = {'types.core.IntervalSeries'}; - types.util.checkSet('intervalseries', namedprops, constrained, val); - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - if ~isempty(obj.intervalseries) - refs = obj.intervalseries.export(fid, fullpath, refs); - end - end -end - -end \ No newline at end of file diff --git a/+types/+core/CurrentClampStimulusSeries.m b/+types/+core/CurrentClampStimulusSeries.m deleted file mode 100644 index ab8d7a4d..00000000 --- a/+types/+core/CurrentClampStimulusSeries.m +++ /dev/null @@ -1,50 +0,0 @@ -classdef CurrentClampStimulusSeries < types.core.PatchClampSeries & types.untyped.GroupClass -% CURRENTCLAMPSTIMULUSSERIES Stimulus current applied during current clamp recording. - - - -methods - function obj = CurrentClampStimulusSeries(varargin) - % CURRENTCLAMPSTIMULUSSERIES Constructor for CurrentClampStimulusSeries - varargin = [{'data_unit' 'amperes'} varargin]; - obj = obj@types.core.PatchClampSeries(varargin{:}); - - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - addParameter(p, 'data',[]); - addParameter(p, 'data_unit',[]); - misc.parseSkipInvalidName(p, varargin); - obj.data = p.Results.data; - obj.data_unit = p.Results.data_unit; - if strcmp(class(obj), 'types.core.CurrentClampStimulusSeries') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - - %% VALIDATORS - - function val = validate_data(obj, val) - - end - function val = validate_data_unit(obj, val) - if isequal(val, 'amperes') - val = 'amperes'; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''CurrentClampStimulusSeries'' because it is read-only.') - end - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.PatchClampSeries(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - end -end - -end \ No newline at end of file diff --git a/+types/+core/IZeroClampSeries.m b/+types/+core/IZeroClampSeries.m deleted file mode 100644 index 880a25c2..00000000 --- a/+types/+core/IZeroClampSeries.m +++ /dev/null @@ -1,72 +0,0 @@ -classdef IZeroClampSeries < types.core.CurrentClampSeries & types.untyped.GroupClass -% IZEROCLAMPSERIES Voltage data from an intracellular recording when all current and amplifier settings are off (i.e., CurrentClampSeries fields will be zero). There is no CurrentClampStimulusSeries associated with an IZero series because the amplifier is disconnected and no stimulus can reach the cell. - - - -methods - function obj = IZeroClampSeries(varargin) - % IZEROCLAMPSERIES Constructor for IZeroClampSeries - varargin = [{'bias_current' types.util.correctType(0, 'single') 'bridge_balance' types.util.correctType(0, 'single') 'capacitance_compensation' types.util.correctType(0, 'single') 'stimulus_description' 'N/A'} varargin]; - obj = obj@types.core.CurrentClampSeries(varargin{:}); - - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - addParameter(p, 'bias_current',[]); - addParameter(p, 'bridge_balance',[]); - addParameter(p, 'capacitance_compensation',[]); - addParameter(p, 'stimulus_description',[]); - misc.parseSkipInvalidName(p, varargin); - obj.bias_current = p.Results.bias_current; - obj.bridge_balance = p.Results.bridge_balance; - obj.capacitance_compensation = p.Results.capacitance_compensation; - obj.stimulus_description = p.Results.stimulus_description; - if strcmp(class(obj), 'types.core.IZeroClampSeries') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - - %% VALIDATORS - - function val = validate_bias_current(obj, val) - if isequal(val, 0) - val = 0; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''bias_current'' property of class ''IZeroClampSeries'' because it is read-only.') - end - end - function val = validate_bridge_balance(obj, val) - if isequal(val, 0) - val = 0; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''bridge_balance'' property of class ''IZeroClampSeries'' because it is read-only.') - end - end - function val = validate_capacitance_compensation(obj, val) - if isequal(val, 0) - val = 0; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''capacitance_compensation'' property of class ''IZeroClampSeries'' because it is read-only.') - end - end - function val = validate_stimulus_description(obj, val) - if isequal(val, 'N/A') - val = 'N/A'; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''stimulus_description'' property of class ''IZeroClampSeries'' because it is read-only.') - end - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.CurrentClampSeries(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - end -end - -end \ No newline at end of file diff --git a/+types/+core/ImageMaskSeries.m b/+types/+core/ImageMaskSeries.m deleted file mode 100644 index 4db9efbc..00000000 --- a/+types/+core/ImageMaskSeries.m +++ /dev/null @@ -1,56 +0,0 @@ -classdef ImageMaskSeries < types.core.ImageSeries & types.untyped.GroupClass -% IMAGEMASKSERIES DEPRECATED. An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed. - - -% OPTIONAL PROPERTIES -properties - masked_imageseries; % ImageSeries -end - -methods - function obj = ImageMaskSeries(varargin) - % IMAGEMASKSERIES Constructor for ImageMaskSeries - obj = obj@types.core.ImageSeries(varargin{:}); - - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - addParameter(p, 'masked_imageseries',[]); - misc.parseSkipInvalidName(p, varargin); - obj.masked_imageseries = p.Results.masked_imageseries; - if strcmp(class(obj), 'types.core.ImageMaskSeries') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - function set.masked_imageseries(obj, val) - obj.masked_imageseries = obj.validate_masked_imageseries(val); - end - %% VALIDATORS - - function val = validate_masked_imageseries(obj, val) - if isa(val, 'types.untyped.SoftLink') - if isprop(val, 'target') - types.util.checkDtype('masked_imageseries', 'types.core.ImageSeries', val.target); - end - else - val = types.util.checkDtype('masked_imageseries', 'types.core.ImageSeries', val); - if ~isempty(val) - val = types.untyped.SoftLink(val); - end - end - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.ImageSeries(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - refs = obj.masked_imageseries.export(fid, [fullpath '/masked_imageseries'], refs); - end -end - -end \ No newline at end of file diff --git a/+types/+core/IntervalSeries.m b/+types/+core/IntervalSeries.m deleted file mode 100644 index 27fd745a..00000000 --- a/+types/+core/IntervalSeries.m +++ /dev/null @@ -1,74 +0,0 @@ -classdef IntervalSeries < types.core.TimeSeries & types.untyped.GroupClass -% INTERVALSERIES Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way. - - - -methods - function obj = IntervalSeries(varargin) - % INTERVALSERIES Constructor for IntervalSeries - varargin = [{'data_resolution' types.util.correctType(-1, 'single') 'data_unit' 'n/a'} varargin]; - obj = obj@types.core.TimeSeries(varargin{:}); - - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - addParameter(p, 'data',[]); - addParameter(p, 'data_resolution',[]); - addParameter(p, 'data_unit',[]); - misc.parseSkipInvalidName(p, varargin); - obj.data = p.Results.data; - obj.data_resolution = p.Results.data_resolution; - obj.data_unit = p.Results.data_unit; - if strcmp(class(obj), 'types.core.IntervalSeries') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - - %% VALIDATORS - - function val = validate_data(obj, val) - val = types.util.checkDtype('data', 'int8', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[Inf]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_data_resolution(obj, val) - if isequal(val, -1) - val = -1; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_resolution'' property of class ''IntervalSeries'' because it is read-only.') - end - end - function val = validate_data_unit(obj, val) - if isequal(val, 'n/a') - val = 'n/a'; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''IntervalSeries'' because it is read-only.') - end - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - end -end - -end \ No newline at end of file diff --git a/+types/+core/OpticalSeries.m b/+types/+core/OpticalSeries.m deleted file mode 100644 index 62a1fe82..00000000 --- a/+types/+core/OpticalSeries.m +++ /dev/null @@ -1,150 +0,0 @@ -classdef OpticalSeries < types.core.ImageSeries & types.untyped.GroupClass -% OPTICALSERIES Image data that is presented or recorded. A stimulus template movie will be stored only as an image. When the image is presented as stimulus, additional data is required, such as field of view (e.g., how much of the visual field the image covers, or how what is the area of the target being imaged). If the OpticalSeries represents acquired imaging data, orientation is also important. - - -% OPTIONAL PROPERTIES -properties - distance; % (single) Distance from camera/monitor to target/eye. - field_of_view; % (single) Width, height and depth of image, or imaged area, in meters. - orientation; % (char) Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference. -end - -methods - function obj = OpticalSeries(varargin) - % OPTICALSERIES Constructor for OpticalSeries - obj = obj@types.core.ImageSeries(varargin{:}); - - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - addParameter(p, 'data',[]); - addParameter(p, 'distance',[]); - addParameter(p, 'field_of_view',[]); - addParameter(p, 'orientation',[]); - misc.parseSkipInvalidName(p, varargin); - obj.data = p.Results.data; - obj.distance = p.Results.distance; - obj.field_of_view = p.Results.field_of_view; - obj.orientation = p.Results.orientation; - if strcmp(class(obj), 'types.core.OpticalSeries') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - function set.distance(obj, val) - obj.distance = obj.validate_distance(val); - end - function set.field_of_view(obj, val) - obj.field_of_view = obj.validate_field_of_view(val); - end - function set.orientation(obj, val) - obj.orientation = obj.validate_orientation(val); - end - %% VALIDATORS - - function val = validate_data(obj, val) - val = types.util.checkDtype('data', 'numeric', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[3,Inf,Inf,Inf], [Inf,Inf,Inf]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_distance(obj, val) - val = types.util.checkDtype('distance', 'single', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[1]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_field_of_view(obj, val) - val = types.util.checkDtype('field_of_view', 'single', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[3], [2]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_orientation(obj, val) - val = types.util.checkDtype('orientation', 'char', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[1]}; - types.util.checkDims(valsz, validshapes); - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.ImageSeries(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - if ~isempty(obj.distance) - if startsWith(class(obj.distance), 'types.untyped.') - refs = obj.distance.export(fid, [fullpath '/distance'], refs); - elseif ~isempty(obj.distance) - io.writeDataset(fid, [fullpath '/distance'], obj.distance); - end - end - if ~isempty(obj.field_of_view) - if startsWith(class(obj.field_of_view), 'types.untyped.') - refs = obj.field_of_view.export(fid, [fullpath '/field_of_view'], refs); - elseif ~isempty(obj.field_of_view) - io.writeDataset(fid, [fullpath '/field_of_view'], obj.field_of_view, 'forceArray'); - end - end - if ~isempty(obj.orientation) - if startsWith(class(obj.orientation), 'types.untyped.') - refs = obj.orientation.export(fid, [fullpath '/orientation'], refs); - elseif ~isempty(obj.orientation) - io.writeDataset(fid, [fullpath '/orientation'], obj.orientation); - end - end - end -end - -end \ No newline at end of file diff --git a/+types/+core/OptogeneticSeries.m b/+types/+core/OptogeneticSeries.m deleted file mode 100644 index b0a59fab..00000000 --- a/+types/+core/OptogeneticSeries.m +++ /dev/null @@ -1,86 +0,0 @@ -classdef OptogeneticSeries < types.core.TimeSeries & types.untyped.GroupClass -% OPTOGENETICSERIES An optogenetic stimulus. - - -% OPTIONAL PROPERTIES -properties - site; % OptogeneticStimulusSite -end - -methods - function obj = OptogeneticSeries(varargin) - % OPTOGENETICSERIES Constructor for OptogeneticSeries - varargin = [{'data_unit' 'watts'} varargin]; - obj = obj@types.core.TimeSeries(varargin{:}); - - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - addParameter(p, 'data',[]); - addParameter(p, 'data_unit',[]); - addParameter(p, 'site',[]); - misc.parseSkipInvalidName(p, varargin); - obj.data = p.Results.data; - obj.data_unit = p.Results.data_unit; - obj.site = p.Results.site; - if strcmp(class(obj), 'types.core.OptogeneticSeries') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - function set.site(obj, val) - obj.site = obj.validate_site(val); - end - %% VALIDATORS - - function val = validate_data(obj, val) - val = types.util.checkDtype('data', 'numeric', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[Inf,Inf], [Inf]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_data_unit(obj, val) - if isequal(val, 'watts') - val = 'watts'; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''OptogeneticSeries'' because it is read-only.') - end - end - function val = validate_site(obj, val) - if isa(val, 'types.untyped.SoftLink') - if isprop(val, 'target') - types.util.checkDtype('site', 'types.core.OptogeneticStimulusSite', val.target); - end - else - val = types.util.checkDtype('site', 'types.core.OptogeneticStimulusSite', val); - if ~isempty(val) - val = types.untyped.SoftLink(val); - end - end - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - refs = obj.site.export(fid, [fullpath '/site'], refs); - end -end - -end \ No newline at end of file diff --git a/+types/+core/RoiResponseSeries.m b/+types/+core/RoiResponseSeries.m deleted file mode 100644 index 452d27b1..00000000 --- a/+types/+core/RoiResponseSeries.m +++ /dev/null @@ -1,67 +0,0 @@ -classdef RoiResponseSeries < types.core.TimeSeries & types.untyped.GroupClass -% ROIRESPONSESERIES ROI responses over an imaging plane. The first dimension represents time. The second dimension, if present, represents ROIs. - - -% REQUIRED PROPERTIES -properties - rois; % REQUIRED (DynamicTableRegion) DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. -end - -methods - function obj = RoiResponseSeries(varargin) - % ROIRESPONSESERIES Constructor for RoiResponseSeries - obj = obj@types.core.TimeSeries(varargin{:}); - - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - addParameter(p, 'data',[]); - addParameter(p, 'rois',[]); - misc.parseSkipInvalidName(p, varargin); - obj.data = p.Results.data; - obj.rois = p.Results.rois; - if strcmp(class(obj), 'types.core.RoiResponseSeries') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - function set.rois(obj, val) - obj.rois = obj.validate_rois(val); - end - %% VALIDATORS - - function val = validate_data(obj, val) - val = types.util.checkDtype('data', 'numeric', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[Inf,Inf], [Inf]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_rois(obj, val) - val = types.util.checkDtype('rois', 'types.hdmf_common.DynamicTableRegion', val); - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - refs = obj.rois.export(fid, [fullpath '/rois'], refs); - end -end - -end \ No newline at end of file diff --git a/+types/+core/SpatialSeries.m b/+types/+core/SpatialSeries.m deleted file mode 100644 index ce027498..00000000 --- a/+types/+core/SpatialSeries.m +++ /dev/null @@ -1,109 +0,0 @@ -classdef SpatialSeries < types.core.TimeSeries & types.untyped.GroupClass -% SPATIALSERIES Direction, e.g., of gaze or travel, or position. The TimeSeries::data field is a 2D array storing position or direction relative to some reference frame. Array structure: [num measurements] [num dimensions]. Each SpatialSeries has a text dataset reference_frame that indicates the zero-position, or the zero-axes for direction. For example, if representing gaze direction, 'straight-ahead' might be a specific pixel on the monitor, or some other point in space. For position data, the 0,0 point might be the top-left corner of an enclosure, as viewed from the tracking camera. The unit of data will indicate how to interpret SpatialSeries values. - - -% OPTIONAL PROPERTIES -properties - reference_frame; % (char) Description defining what exactly 'straight-ahead' means. -end - -methods - function obj = SpatialSeries(varargin) - % SPATIALSERIES Constructor for SpatialSeries - varargin = [{'data_unit' 'meters'} varargin]; - obj = obj@types.core.TimeSeries(varargin{:}); - - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - addParameter(p, 'data',[]); - addParameter(p, 'data_unit',[]); - addParameter(p, 'reference_frame',[]); - misc.parseSkipInvalidName(p, varargin); - obj.data = p.Results.data; - obj.data_unit = p.Results.data_unit; - obj.reference_frame = p.Results.reference_frame; - if strcmp(class(obj), 'types.core.SpatialSeries') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - function set.reference_frame(obj, val) - obj.reference_frame = obj.validate_reference_frame(val); - end - %% VALIDATORS - - function val = validate_data(obj, val) - val = types.util.checkDtype('data', 'numeric', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[3,Inf], [2,Inf], [1,Inf], [Inf]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_data_unit(obj, val) - val = types.util.checkDtype('data_unit', 'char', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[1]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_reference_frame(obj, val) - val = types.util.checkDtype('reference_frame', 'char', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[1]}; - types.util.checkDims(valsz, validshapes); - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - if ~isempty(obj.reference_frame) - if startsWith(class(obj.reference_frame), 'types.untyped.') - refs = obj.reference_frame.export(fid, [fullpath '/reference_frame'], refs); - elseif ~isempty(obj.reference_frame) - io.writeDataset(fid, [fullpath '/reference_frame'], obj.reference_frame); - end - end - end -end - -end \ No newline at end of file diff --git a/+types/+core/SpikeEventSeries.m b/+types/+core/SpikeEventSeries.m deleted file mode 100644 index 95b53c18..00000000 --- a/+types/+core/SpikeEventSeries.m +++ /dev/null @@ -1,103 +0,0 @@ -classdef SpikeEventSeries < types.core.ElectricalSeries & types.untyped.GroupClass -% SPIKEEVENTSERIES Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode). - - - -methods - function obj = SpikeEventSeries(varargin) - % SPIKEEVENTSERIES Constructor for SpikeEventSeries - varargin = [{'data_unit' 'volts' 'timestamps_interval' types.util.correctType(1, 'int32') 'timestamps_unit' 'seconds'} varargin]; - obj = obj@types.core.ElectricalSeries(varargin{:}); - - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - addParameter(p, 'data',[]); - addParameter(p, 'data_unit',[]); - addParameter(p, 'timestamps',[]); - addParameter(p, 'timestamps_interval',[]); - addParameter(p, 'timestamps_unit',[]); - misc.parseSkipInvalidName(p, varargin); - obj.data = p.Results.data; - obj.data_unit = p.Results.data_unit; - obj.timestamps = p.Results.timestamps; - obj.timestamps_interval = p.Results.timestamps_interval; - obj.timestamps_unit = p.Results.timestamps_unit; - if strcmp(class(obj), 'types.core.SpikeEventSeries') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - - %% VALIDATORS - - function val = validate_data(obj, val) - val = types.util.checkDtype('data', 'numeric', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[Inf,Inf,Inf], [Inf,Inf]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_data_unit(obj, val) - if isequal(val, 'volts') - val = 'volts'; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''SpikeEventSeries'' because it is read-only.') - end - end - function val = validate_timestamps(obj, val) - val = types.util.checkDtype('timestamps', 'double', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[Inf]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_timestamps_interval(obj, val) - if isequal(val, 1) - val = 1; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''timestamps_interval'' property of class ''SpikeEventSeries'' because it is read-only.') - end - end - function val = validate_timestamps_unit(obj, val) - if isequal(val, 'seconds') - val = 'seconds'; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''timestamps_unit'' property of class ''SpikeEventSeries'' because it is read-only.') - end - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.ElectricalSeries(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - end -end - -end \ No newline at end of file diff --git a/+types/+core/TwoPhotonSeries.m b/+types/+core/TwoPhotonSeries.m deleted file mode 100644 index b9eda919..00000000 --- a/+types/+core/TwoPhotonSeries.m +++ /dev/null @@ -1,141 +0,0 @@ -classdef TwoPhotonSeries < types.core.ImageSeries & types.untyped.GroupClass -% TWOPHOTONSERIES Image stack recorded over time from 2-photon microscope. - - -% OPTIONAL PROPERTIES -properties - field_of_view; % (single) Width, height and depth of image, or imaged area, in meters. - imaging_plane; % ImagingPlane - pmt_gain; % (single) Photomultiplier gain. - scan_line_rate; % (single) Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. -end - -methods - function obj = TwoPhotonSeries(varargin) - % TWOPHOTONSERIES Constructor for TwoPhotonSeries - obj = obj@types.core.ImageSeries(varargin{:}); - - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - addParameter(p, 'field_of_view',[]); - addParameter(p, 'imaging_plane',[]); - addParameter(p, 'pmt_gain',[]); - addParameter(p, 'scan_line_rate',[]); - misc.parseSkipInvalidName(p, varargin); - obj.field_of_view = p.Results.field_of_view; - obj.imaging_plane = p.Results.imaging_plane; - obj.pmt_gain = p.Results.pmt_gain; - obj.scan_line_rate = p.Results.scan_line_rate; - if strcmp(class(obj), 'types.core.TwoPhotonSeries') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - function set.field_of_view(obj, val) - obj.field_of_view = obj.validate_field_of_view(val); - end - function set.imaging_plane(obj, val) - obj.imaging_plane = obj.validate_imaging_plane(val); - end - function set.pmt_gain(obj, val) - obj.pmt_gain = obj.validate_pmt_gain(val); - end - function set.scan_line_rate(obj, val) - obj.scan_line_rate = obj.validate_scan_line_rate(val); - end - %% VALIDATORS - - function val = validate_field_of_view(obj, val) - val = types.util.checkDtype('field_of_view', 'single', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[3], [2]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_imaging_plane(obj, val) - if isa(val, 'types.untyped.SoftLink') - if isprop(val, 'target') - types.util.checkDtype('imaging_plane', 'types.core.ImagingPlane', val.target); - end - else - val = types.util.checkDtype('imaging_plane', 'types.core.ImagingPlane', val); - if ~isempty(val) - val = types.untyped.SoftLink(val); - end - end - end - function val = validate_pmt_gain(obj, val) - val = types.util.checkDtype('pmt_gain', 'single', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[1]}; - types.util.checkDims(valsz, validshapes); - end - function val = validate_scan_line_rate(obj, val) - val = types.util.checkDtype('scan_line_rate', 'single', val); - if isa(val, 'types.untyped.DataStub') - if 1 == val.ndims - valsz = [val.dims 1]; - else - valsz = val.dims; - end - elseif istable(val) - valsz = [height(val) 1]; - elseif ischar(val) - valsz = [size(val, 1) 1]; - else - valsz = size(val); - end - validshapes = {[1]}; - types.util.checkDims(valsz, validshapes); - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.ImageSeries(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - if ~isempty(obj.field_of_view) - if startsWith(class(obj.field_of_view), 'types.untyped.') - refs = obj.field_of_view.export(fid, [fullpath '/field_of_view'], refs); - elseif ~isempty(obj.field_of_view) - io.writeDataset(fid, [fullpath '/field_of_view'], obj.field_of_view, 'forceArray'); - end - end - refs = obj.imaging_plane.export(fid, [fullpath '/imaging_plane'], refs); - if ~isempty(obj.pmt_gain) - io.writeAttribute(fid, [fullpath '/pmt_gain'], obj.pmt_gain); - end - if ~isempty(obj.scan_line_rate) - io.writeAttribute(fid, [fullpath '/scan_line_rate'], obj.scan_line_rate); - end - end -end - -end \ No newline at end of file diff --git a/+types/+core/VoltageClampStimulusSeries.m b/+types/+core/VoltageClampStimulusSeries.m deleted file mode 100644 index 2df68b7e..00000000 --- a/+types/+core/VoltageClampStimulusSeries.m +++ /dev/null @@ -1,50 +0,0 @@ -classdef VoltageClampStimulusSeries < types.core.PatchClampSeries & types.untyped.GroupClass -% VOLTAGECLAMPSTIMULUSSERIES Stimulus voltage applied during a voltage clamp recording. - - - -methods - function obj = VoltageClampStimulusSeries(varargin) - % VOLTAGECLAMPSTIMULUSSERIES Constructor for VoltageClampStimulusSeries - varargin = [{'data_unit' 'volts'} varargin]; - obj = obj@types.core.PatchClampSeries(varargin{:}); - - - p = inputParser; - p.KeepUnmatched = true; - p.PartialMatching = false; - p.StructExpand = false; - addParameter(p, 'data',[]); - addParameter(p, 'data_unit',[]); - misc.parseSkipInvalidName(p, varargin); - obj.data = p.Results.data; - obj.data_unit = p.Results.data_unit; - if strcmp(class(obj), 'types.core.VoltageClampStimulusSeries') - cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); - types.util.checkUnset(obj, unique(cellStringArguments)); - end - end - %% SETTERS - - %% VALIDATORS - - function val = validate_data(obj, val) - - end - function val = validate_data_unit(obj, val) - if isequal(val, 'volts') - val = 'volts'; - else - error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''VoltageClampStimulusSeries'' because it is read-only.') - end - end - %% EXPORT - function refs = export(obj, fid, fullpath, refs) - refs = export@types.core.PatchClampSeries(obj, fid, fullpath, refs); - if any(strcmp(refs, fullpath)) - return; - end - end -end - -end \ No newline at end of file diff --git a/.codespellrc b/.codespellrc index 65f2c912..5c4c3a73 100644 --- a/.codespellrc +++ b/.codespellrc @@ -1,3 +1,3 @@ [codespell] -skip = *.html,*logo_matnwb.svg,*fastsearch.m,*.yaml,*testResults.xml +skip = *.html,*.svg,*fastsearch.m,*.yaml,*testResults.xml ignore-words-list = DNE,nd,whos diff --git a/.gitignore b/.gitignore index 988501fa..14397dfc 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,5 @@ workspace/ *.swp .DS_Store +tests/env.mat + +docs/build diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..9138a7fe --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,13 @@ +version: "2" + +build: + os: "ubuntu-22.04" + tools: + python: "3.10" + +python: + install: + - requirements: docs/requirements.txt + +sphinx: + configuration: docs/source/conf.py diff --git a/+contrib/+blackrock/AddNEVFile.m b/code/+contrib/+blackrock/AddNEVFile.m similarity index 100% rename from +contrib/+blackrock/AddNEVFile.m rename to code/+contrib/+blackrock/AddNEVFile.m diff --git a/+contrib/+blackrock/AddNSFile.m b/code/+contrib/+blackrock/AddNSFile.m similarity index 100% rename from +contrib/+blackrock/AddNSFile.m rename to code/+contrib/+blackrock/AddNSFile.m diff --git a/+contrib/+blackrock/README.md b/code/+contrib/+blackrock/README.md similarity index 100% rename from +contrib/+blackrock/README.md rename to code/+contrib/+blackrock/README.md diff --git a/+contrib/+blackrock/get_channel_info.m b/code/+contrib/+blackrock/get_channel_info.m similarity index 100% rename from +contrib/+blackrock/get_channel_info.m rename to code/+contrib/+blackrock/get_channel_info.m diff --git a/+contrib/+mworks/AddEyePos.m b/code/+contrib/+mworks/AddEyePos.m similarity index 100% rename from +contrib/+mworks/AddEyePos.m rename to code/+contrib/+mworks/AddEyePos.m diff --git a/+contrib/+mworks/README.md b/code/+contrib/+mworks/README.md similarity index 100% rename from +contrib/+mworks/README.md rename to code/+contrib/+mworks/README.md diff --git a/+contrib/+tdt/README.md b/code/+contrib/+tdt/README.md similarity index 100% rename from +contrib/+tdt/README.md rename to code/+contrib/+tdt/README.md diff --git a/+contrib/+tdt/TDT2NWB.m b/code/+contrib/+tdt/TDT2NWB.m similarity index 100% rename from +contrib/+tdt/TDT2NWB.m rename to code/+contrib/+tdt/TDT2NWB.m diff --git a/+contrib/+tdt/TDTbin2mat.m b/code/+contrib/+tdt/TDTbin2mat.m similarity index 100% rename from +contrib/+tdt/TDTbin2mat.m rename to code/+contrib/+tdt/TDTbin2mat.m diff --git a/+contrib/+tdt/addECoG.m b/code/+contrib/+tdt/addECoG.m similarity index 100% rename from +contrib/+tdt/addECoG.m rename to code/+contrib/+tdt/addECoG.m diff --git a/+contrib/+tdt/addElectrodeTable.m b/code/+contrib/+tdt/addElectrodeTable.m similarity index 100% rename from +contrib/+tdt/addElectrodeTable.m rename to code/+contrib/+tdt/addElectrodeTable.m diff --git a/+file/+interface/HasProps.m b/code/+file/+interface/HasProps.m similarity index 100% rename from +file/+interface/HasProps.m rename to code/+file/+interface/HasProps.m diff --git a/code/+file/+internal/getRequiredPropertyNames.m b/code/+file/+internal/getRequiredPropertyNames.m new file mode 100644 index 00000000..a57c0a13 --- /dev/null +++ b/code/+file/+internal/getRequiredPropertyNames.m @@ -0,0 +1,26 @@ +function requiredPropertyNames = getRequiredPropertyNames(classprops) +% getRequiredPropertyNames - Get name of required properties from props info + + allProperties = keys(classprops); + requiredPropertyNames = {}; + + for iProp = 1:length(allProperties) + + propertyName = allProperties{iProp}; + prop = classprops(propertyName); + + isRequired = ischar(prop) || isa(prop, 'containers.Map') || isstruct(prop); + isPropertyRequired = false; + if isa(prop, 'file.interface.HasProps') + isPropertyRequired = false(size(prop)); + for iSubProp = 1:length(prop) + p = prop(iSubProp); + isPropertyRequired(iSubProp) = p.required; + end + end + + if isRequired || all(isPropertyRequired) + requiredPropertyNames = [requiredPropertyNames {propertyName}]; %#ok + end + end +end diff --git a/code/+file/+internal/mergeProps.m b/code/+file/+internal/mergeProps.m new file mode 100644 index 00000000..6344121a --- /dev/null +++ b/code/+file/+internal/mergeProps.m @@ -0,0 +1,18 @@ +function allProps = mergeProps(props, superClassProps) +% merge_props - Merge maps containing props info for class and it's superclasses + + allPropsCell = [{props}, superClassProps]; + allProps = containers.Map(); + + % Start from most remote ancestor and work towards current class. + % Important to go in this order because subclasses can override + % properties, and we need to keep the property definition for the superclass + % that is closest to the current class or the property definition for the + % class itself in the final map + for i = numel(allPropsCell):-1:1 + superPropNames = allPropsCell{i}.keys; + for jProp = 1:numel(superPropNames) + allProps(superPropNames{jProp}) = allPropsCell{i}(superPropNames{jProp}); + end + end +end diff --git a/+file/Attribute.m b/code/+file/Attribute.m similarity index 100% rename from +file/Attribute.m rename to code/+file/Attribute.m diff --git a/+file/Dataset.m b/code/+file/Dataset.m similarity index 100% rename from +file/Dataset.m rename to code/+file/Dataset.m diff --git a/+file/Group.m b/code/+file/Group.m similarity index 100% rename from +file/Group.m rename to code/+file/Group.m diff --git a/+file/Link.m b/code/+file/Link.m similarity index 100% rename from +file/Link.m rename to code/+file/Link.m diff --git a/+file/addSpaces.m b/code/+file/addSpaces.m similarity index 100% rename from +file/addSpaces.m rename to code/+file/addSpaces.m diff --git a/+file/cloneNwbFileClass.m b/code/+file/cloneNwbFileClass.m similarity index 100% rename from +file/cloneNwbFileClass.m rename to code/+file/cloneNwbFileClass.m diff --git a/+file/fillClass.m b/code/+file/fillClass.m similarity index 89% rename from +file/fillClass.m rename to code/+file/fillClass.m index 58723bfd..de5d09e5 100644 --- a/+file/fillClass.m +++ b/code/+file/fillClass.m @@ -1,4 +1,4 @@ -function template = fillClass(name, namespace, processed, classprops, inherited) +function template = fillClass(name, namespace, processed, classprops, inherited, superClassProps) %name is the name of the scheme %namespace is the namespace context for this class @@ -87,7 +87,20 @@ %% return classfile string classDefinitionHeader = [... 'classdef ' name ' < ' depnm ' & ' classTag newline... %header, dependencies - '% ' upper(name) ' ' class.doc]; %name, docstr + '% ' upper(name) ' - ' class.doc]; %name, docstr + + allClassProps = file.internal.mergeProps(classprops, superClassProps); + allRequiredPropertyNames = file.internal.getRequiredPropertyNames(allClassProps); + if isempty(allRequiredPropertyNames) + allRequiredPropertyNames = {'None'}; + end + + % Add list of required properties in class docstring + classDefinitionHeader = [classDefinitionHeader, newline... + '%', newline, ... + '% Required Properties:', newline, ... + sprintf('%% %s', strjoin(allRequiredPropertyNames, ', '))]; + hiddenAndReadonly = intersect(hidden, readonly); hidden = setdiff(hidden, hiddenAndReadonly); readonly = setdiff(readonly, hiddenAndReadonly); @@ -126,7 +139,8 @@ depnm,... defaults,... %all defaults, regardless of inheritance classprops,... - namespace); + namespace, ... + superClassProps); setterFcns = file.fillSetters(setdiff(nonInherited, union(readonly, hiddenAndReadonly))); validatorFcns = file.fillValidators(allProperties, classprops, namespace, namespace.getFullClassName(name), inherited); exporterFcns = file.fillExport(nonInherited, class, depnm); diff --git a/+file/fillConstructor.m b/code/+file/fillConstructor.m similarity index 65% rename from +file/fillConstructor.m rename to code/+file/fillConstructor.m index 269829d2..7c8d36d3 100644 --- a/+file/fillConstructor.m +++ b/code/+file/fillConstructor.m @@ -1,6 +1,11 @@ -function functionString = fillConstructor(name, parentname, defaults, props, namespace) +function functionString = fillConstructor(name, parentname, defaults, props, namespace, superClassProps) caps = upper(name); - functionBody = ['% ' caps ' Constructor for ' name]; + functionBody = ['% ' caps ' - Constructor for ' name]; + + docString = fillConstructorDocString(name, props, namespace, superClassProps); + if ~isempty(docString) + functionBody = [functionBody newline() docString]; + end bodyString = fillBody(parentname, defaults, props, namespace); if ~isempty(bodyString) @@ -23,7 +28,6 @@ ['function obj = ' name '(varargin)']... file.addSpaces(functionBody, 4)... 'end'}, newline()); - end function bodystr = fillBody(parentName, defaults, props, namespace) @@ -198,4 +202,116 @@ ' types.util.dynamictable.checkConfig(obj);', ... 'end',... }, newline); +end + +function docString = fillConstructorDocString(name, props, namespace, superClassProps) + + classVarName = name; classVarName(1) = lower(classVarName(1)); + fullClassName = sprintf('types.%s.%s', namespace.name, name); + fullClassNameUpper = sprintf('types.%s.%s', namespace.name, upper(name)); + + props = file.internal.mergeProps(props, superClassProps); + names = props.keys(); + + docString = [... + "%", ... + "% Syntax:", ... + sprintf("%% %s = %s() creates a %s object with unset property values.", classVarName, fullClassNameUpper, name), ... + "%", ... + ]; + + if ~isempty(names) + docString = [docString, ... + sprintf("%% %s = %s(Name, Value) creates a %s object where one or more property values are specified using name-value pairs.", classVarName, fullClassNameUpper, name), ... + "%", ... + "% Input Arguments (Name-Value Arguments):", ... + ]; + end + + for i = 1:numel(names) + propName = names{i}; + thisProp = props(propName); + try + if isprop(thisProp, 'readonly') && thisProp.readonly + continue + end + catch + % pass + end + + valueType = getTypeStr(thisProp); + try + description = thisProp.doc; + catch + description = 'No description'; + end + + docString = [docString, ... + sprintf("%% - %s (%s) - %s", propName, valueType, description), ... + "%"]; %#ok + end + + docString = [docString, ... + "% Output Arguments:", ... + sprintf("%% - %s (%s) - A %s object", classVarName, fullClassName, name), ... + "" + ]; + + docString = char( strjoin(docString, newline) ); +end + +% Todo: Mostly duplicate code from file.fillProps. Should consolidate +function typeStr = getTypeStr(prop) + if ischar(prop) + typeStr = prop; + elseif isstruct(prop) + columnNames = fieldnames(prop); + columnDocStr = cell(size(columnNames)); + for i=1:length(columnNames) + name = columnNames{i}; + columnDocStr{i} = getTypeStr(prop.(name)); + end + typeStr = ['Table with columns: (', strjoin(columnDocStr, ', '), ')']; + elseif isa(prop, 'file.Attribute') + if isa(prop.dtype, 'containers.Map') + assertValidRefType(prop.dtype('reftype')) + typeStr = sprintf('%s reference to %s', capitalize(prop.dtype('reftype')), prop.dtype('target_type')); + else + typeStr = prop.dtype; + end + elseif isa(prop, 'containers.Map') + assertValidRefType(prop('reftype')) + typeStr = sprintf('%s reference to %s', capitalize(prop('reftype')), prop('target_type')); + elseif isa(prop, 'file.interface.HasProps') + typeStrCell = cell(size(prop)); + for iProp = 1:length(typeStrCell) + anonProp = prop(iProp); + if isa(anonProp, 'file.Dataset') && isempty(anonProp.type) + typeStrCell{iProp} = getTypeStr(anonProp.dtype); + elseif isempty(anonProp.type) + typeStrCell{iProp} = 'types.untyped.Set'; + else + typeStrCell{iProp} = anonProp.type; + end + end + typeStr = strjoin(typeStrCell, '|'); + else + typeStr = prop.type; + end +end + +function assertValidRefType(referenceType) + arguments + referenceType (1,1) string + end + assert( ismember(referenceType, ["region", "object"]), ... + 'NWB:ClassGenerator:InvalidRefType', ... + 'Invalid reftype found while filling description for class properties.') +end + +function word = capitalize(word) + arguments + word (1,:) char + end + word(1) = upper(word(1)); end \ No newline at end of file diff --git a/+file/fillCustomConstraint.m b/code/+file/fillCustomConstraint.m similarity index 100% rename from +file/fillCustomConstraint.m rename to code/+file/fillCustomConstraint.m diff --git a/+file/fillDynamicTableMethods.m b/code/+file/fillDynamicTableMethods.m similarity index 100% rename from +file/fillDynamicTableMethods.m rename to code/+file/fillDynamicTableMethods.m diff --git a/+file/fillExport.m b/code/+file/fillExport.m similarity index 100% rename from +file/fillExport.m rename to code/+file/fillExport.m diff --git a/+file/fillProps.m b/code/+file/fillProps.m similarity index 100% rename from +file/fillProps.m rename to code/+file/fillProps.m diff --git a/+file/fillSetters.m b/code/+file/fillSetters.m similarity index 100% rename from +file/fillSetters.m rename to code/+file/fillSetters.m diff --git a/+file/fillValidators.m b/code/+file/fillValidators.m similarity index 100% rename from +file/fillValidators.m rename to code/+file/fillValidators.m diff --git a/+file/formatShape.m b/code/+file/formatShape.m similarity index 100% rename from +file/formatShape.m rename to code/+file/formatShape.m diff --git a/+file/isShapeScalar.m b/code/+file/isShapeScalar.m similarity index 100% rename from +file/isShapeScalar.m rename to code/+file/isShapeScalar.m diff --git a/+file/mapType.m b/code/+file/mapType.m similarity index 100% rename from +file/mapType.m rename to code/+file/mapType.m diff --git a/+file/processClass.m b/code/+file/processClass.m similarity index 100% rename from +file/processClass.m rename to code/+file/processClass.m diff --git a/+file/writeNamespace.m b/code/+file/writeNamespace.m similarity index 76% rename from +file/writeNamespace.m rename to code/+file/writeNamespace.m index 00ab7aa9..77d0da7c 100644 --- a/+file/writeNamespace.m +++ b/code/+file/writeNamespace.m @@ -15,11 +15,16 @@ function writeNamespace(namespaceName, saveDir) [processed, classprops, inherited] = file.processClass(className, Namespace, pregenerated); if ~isempty(processed) + superClassProps = cell(1, numel(processed)-1); + for iSuper = 2:numel(processed) + [~, superClassProps{iSuper-1}, ~] = file.processClass(processed(iSuper).type, Namespace, pregenerated); + end + fid = fopen(fullfile(classFileDir, [className '.m']), 'W'); % Create cleanup object to close to file in case the write operation fails. fileCleanupObj = onCleanup(@(id) fclose(fid)); fwrite(fid, file.fillClass(className, Namespace, processed, ... - classprops, inherited), 'char'); + classprops, inherited, superClassProps), 'char'); else % pass end diff --git a/+io/+space/+shape/Block.m b/code/+io/+space/+shape/Block.m similarity index 100% rename from +io/+space/+shape/Block.m rename to code/+io/+space/+shape/Block.m diff --git a/+io/+space/+shape/Point.m b/code/+io/+space/+shape/Point.m similarity index 100% rename from +io/+space/+shape/Point.m rename to code/+io/+space/+shape/Point.m diff --git a/+io/+space/Shape.m b/code/+io/+space/Shape.m similarity index 100% rename from +io/+space/Shape.m rename to code/+io/+space/Shape.m diff --git a/+io/+space/findShapes.m b/code/+io/+space/findShapes.m similarity index 100% rename from +io/+space/findShapes.m rename to code/+io/+space/findShapes.m diff --git a/+io/+space/getReadSpace.m b/code/+io/+space/getReadSpace.m similarity index 100% rename from +io/+space/getReadSpace.m rename to code/+io/+space/getReadSpace.m diff --git a/+io/+space/segmentSelection.m b/code/+io/+space/segmentSelection.m similarity index 100% rename from +io/+space/segmentSelection.m rename to code/+io/+space/segmentSelection.m diff --git a/+io/+spec/+internal/readEmbeddedSpecLocation.m b/code/+io/+spec/+internal/readEmbeddedSpecLocation.m similarity index 100% rename from +io/+spec/+internal/readEmbeddedSpecLocation.m rename to code/+io/+spec/+internal/readEmbeddedSpecLocation.m diff --git a/+io/+spec/getEmbeddedSpecLocation.m b/code/+io/+spec/getEmbeddedSpecLocation.m similarity index 100% rename from +io/+spec/getEmbeddedSpecLocation.m rename to code/+io/+spec/getEmbeddedSpecLocation.m diff --git a/+io/+spec/readEmbeddedSpecifications.m b/code/+io/+spec/readEmbeddedSpecifications.m similarity index 100% rename from +io/+spec/readEmbeddedSpecifications.m rename to code/+io/+spec/readEmbeddedSpecifications.m diff --git a/+io/+spec/writeEmbeddedSpecifications.m b/code/+io/+spec/writeEmbeddedSpecifications.m similarity index 100% rename from +io/+spec/writeEmbeddedSpecifications.m rename to code/+io/+spec/writeEmbeddedSpecifications.m diff --git a/+io/createParsedType.m b/code/+io/createParsedType.m similarity index 100% rename from +io/createParsedType.m rename to code/+io/createParsedType.m diff --git a/+io/getBaseType.m b/code/+io/getBaseType.m similarity index 100% rename from +io/getBaseType.m rename to code/+io/getBaseType.m diff --git a/+io/getMatType.m b/code/+io/getMatType.m similarity index 100% rename from +io/getMatType.m rename to code/+io/getMatType.m diff --git a/+io/getMatTypeSize.m b/code/+io/getMatTypeSize.m similarity index 100% rename from +io/getMatTypeSize.m rename to code/+io/getMatTypeSize.m diff --git a/+io/getRefData.m b/code/+io/getRefData.m similarity index 100% rename from +io/getRefData.m rename to code/+io/getRefData.m diff --git a/+io/isBool.m b/code/+io/isBool.m similarity index 100% rename from +io/isBool.m rename to code/+io/isBool.m diff --git a/+io/map2kwargs.m b/code/+io/map2kwargs.m similarity index 100% rename from +io/map2kwargs.m rename to code/+io/map2kwargs.m diff --git a/+io/mapData2H5.m b/code/+io/mapData2H5.m similarity index 100% rename from +io/mapData2H5.m rename to code/+io/mapData2H5.m diff --git a/+io/parseAttributes.m b/code/+io/parseAttributes.m similarity index 100% rename from +io/parseAttributes.m rename to code/+io/parseAttributes.m diff --git a/+io/parseCompound.m b/code/+io/parseCompound.m similarity index 100% rename from +io/parseCompound.m rename to code/+io/parseCompound.m diff --git a/+io/parseDataset.m b/code/+io/parseDataset.m similarity index 100% rename from +io/parseDataset.m rename to code/+io/parseDataset.m diff --git a/+io/parseGroup.m b/code/+io/parseGroup.m similarity index 100% rename from +io/parseGroup.m rename to code/+io/parseGroup.m diff --git a/+io/parseReference.m b/code/+io/parseReference.m similarity index 100% rename from +io/parseReference.m rename to code/+io/parseReference.m diff --git a/+io/pathParts.m b/code/+io/pathParts.m similarity index 100% rename from +io/pathParts.m rename to code/+io/pathParts.m diff --git a/+io/resolvePath.m b/code/+io/resolvePath.m similarity index 100% rename from +io/resolvePath.m rename to code/+io/resolvePath.m diff --git a/+io/timestamp2datetime.m b/code/+io/timestamp2datetime.m similarity index 100% rename from +io/timestamp2datetime.m rename to code/+io/timestamp2datetime.m diff --git a/+io/writeAttribute.m b/code/+io/writeAttribute.m similarity index 100% rename from +io/writeAttribute.m rename to code/+io/writeAttribute.m diff --git a/+io/writeCompound.m b/code/+io/writeCompound.m similarity index 100% rename from +io/writeCompound.m rename to code/+io/writeCompound.m diff --git a/+io/writeDataset.m b/code/+io/writeDataset.m similarity index 100% rename from +io/writeDataset.m rename to code/+io/writeDataset.m diff --git a/+io/writeGroup.m b/code/+io/writeGroup.m similarity index 100% rename from +io/writeGroup.m rename to code/+io/writeGroup.m diff --git a/+io/writeSoftLink.m b/code/+io/writeSoftLink.m similarity index 100% rename from +io/writeSoftLink.m rename to code/+io/writeSoftLink.m diff --git a/+matnwb/+common/findLatestSchemaVersion.m b/code/+matnwb/+common/findLatestSchemaVersion.m similarity index 100% rename from +matnwb/+common/findLatestSchemaVersion.m rename to code/+matnwb/+common/findLatestSchemaVersion.m diff --git a/+matnwb/+common/mustBeNwbFile.m b/code/+matnwb/+common/mustBeNwbFile.m similarity index 100% rename from +matnwb/+common/mustBeNwbFile.m rename to code/+matnwb/+common/mustBeNwbFile.m diff --git a/+matnwb/+common/mustBeValidSchemaVersion.m b/code/+matnwb/+common/mustBeValidSchemaVersion.m similarity index 100% rename from +matnwb/+common/mustBeValidSchemaVersion.m rename to code/+matnwb/+common/mustBeValidSchemaVersion.m diff --git a/+misc/cellPrettyPrint.m b/code/+misc/cellPrettyPrint.m similarity index 100% rename from +misc/cellPrettyPrint.m rename to code/+misc/cellPrettyPrint.m diff --git a/+misc/getMatnwbDir.m b/code/+misc/getMatnwbDir.m similarity index 100% rename from +misc/getMatnwbDir.m rename to code/+misc/getMatnwbDir.m diff --git a/+misc/getTutorialNwbFilePath.m b/code/+misc/getTutorialNwbFilePath.m similarity index 100% rename from +misc/getTutorialNwbFilePath.m rename to code/+misc/getTutorialNwbFilePath.m diff --git a/+misc/parseSkipInvalidName.m b/code/+misc/parseSkipInvalidName.m similarity index 100% rename from +misc/parseSkipInvalidName.m rename to code/+misc/parseSkipInvalidName.m diff --git a/+misc/str2validName.m b/code/+misc/str2validName.m similarity index 100% rename from +misc/str2validName.m rename to code/+misc/str2validName.m diff --git a/+schemes/Namespace.m b/code/+schemes/Namespace.m similarity index 100% rename from +schemes/Namespace.m rename to code/+schemes/Namespace.m diff --git a/+schemes/exportJson.m b/code/+schemes/exportJson.m similarity index 100% rename from +schemes/exportJson.m rename to code/+schemes/exportJson.m diff --git a/+schemes/getClasses.m b/code/+schemes/getClasses.m similarity index 100% rename from +schemes/getClasses.m rename to code/+schemes/getClasses.m diff --git a/+schemes/loadNamespace.m b/code/+schemes/loadNamespace.m similarity index 100% rename from +schemes/loadNamespace.m rename to code/+schemes/loadNamespace.m diff --git a/+spec/generate.m b/code/+spec/generate.m similarity index 100% rename from +spec/generate.m rename to code/+spec/generate.m diff --git a/+spec/getNamespaceInfo.m b/code/+spec/getNamespaceInfo.m similarity index 100% rename from +spec/getNamespaceInfo.m rename to code/+spec/getNamespaceInfo.m diff --git a/+spec/getSourceInfo.m b/code/+spec/getSourceInfo.m similarity index 100% rename from +spec/getSourceInfo.m rename to code/+spec/getSourceInfo.m diff --git a/+spec/loadCache.m b/code/+spec/loadCache.m similarity index 100% rename from +spec/loadCache.m rename to code/+spec/loadCache.m diff --git a/+spec/loadSchemaObject.m b/code/+spec/loadSchemaObject.m similarity index 100% rename from +spec/loadSchemaObject.m rename to code/+spec/loadSchemaObject.m diff --git a/+spec/saveCache.m b/code/+spec/saveCache.m similarity index 100% rename from +spec/saveCache.m rename to code/+spec/saveCache.m diff --git a/+spec/schema2matlab.m b/code/+spec/schema2matlab.m similarity index 100% rename from +spec/schema2matlab.m rename to code/+spec/schema2matlab.m diff --git a/code/+types/+core/AbstractFeatureSeries.m b/code/+types/+core/AbstractFeatureSeries.m new file mode 100644 index 00000000..5cae4d44 --- /dev/null +++ b/code/+types/+core/AbstractFeatureSeries.m @@ -0,0 +1,184 @@ +classdef AbstractFeatureSeries < types.core.TimeSeries & types.untyped.GroupClass +% ABSTRACTFEATURESERIES - Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical. +% +% Required Properties: +% data, features + + +% REQUIRED PROPERTIES +properties + features; % REQUIRED (char) Description of the features represented in TimeSeries::data. +end +% OPTIONAL PROPERTIES +properties + feature_units; % (char) Units of each feature. +end + +methods + function obj = AbstractFeatureSeries(varargin) + % ABSTRACTFEATURESERIES - Constructor for AbstractFeatureSeries + % + % Syntax: + % abstractFeatureSeries = types.core.ABSTRACTFEATURESERIES() creates a AbstractFeatureSeries object with unset property values. + % + % abstractFeatureSeries = types.core.ABSTRACTFEATURESERIES(Name, Value) creates a AbstractFeatureSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (numeric) - Values of each feature at each time. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - data_unit (char) - Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is "see 'feature_units'". + % + % - description (char) - Description of the time series. + % + % - feature_units (char) - Units of each feature. + % + % - features (char) - Description of the features represented in TimeSeries::data. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - abstractFeatureSeries (types.core.AbstractFeatureSeries) - A AbstractFeatureSeries object + + varargin = [{'data_unit' 'see `feature_units`'} varargin]; + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'feature_units',[]); + addParameter(p, 'features',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + obj.feature_units = p.Results.feature_units; + obj.features = p.Results.features; + if strcmp(class(obj), 'types.core.AbstractFeatureSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.feature_units(obj, val) + obj.feature_units = obj.validate_feature_units(val); + end + function set.features(obj, val) + obj.features = obj.validate_features(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf], [Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_unit(obj, val) + val = types.util.checkDtype('data_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_feature_units(obj, val) + val = types.util.checkDtype('feature_units', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_features(obj, val) + val = types.util.checkDtype('features', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.feature_units) + if startsWith(class(obj.feature_units), 'types.untyped.') + refs = obj.feature_units.export(fid, [fullpath '/feature_units'], refs); + elseif ~isempty(obj.feature_units) + io.writeDataset(fid, [fullpath '/feature_units'], obj.feature_units, 'forceArray'); + end + end + if startsWith(class(obj.features), 'types.untyped.') + refs = obj.features.export(fid, [fullpath '/features'], refs); + elseif ~isempty(obj.features) + io.writeDataset(fid, [fullpath '/features'], obj.features, 'forceArray'); + end + end +end + +end \ No newline at end of file diff --git a/code/+types/+core/AnnotationSeries.m b/code/+types/+core/AnnotationSeries.m new file mode 100644 index 00000000..dd44fbb3 --- /dev/null +++ b/code/+types/+core/AnnotationSeries.m @@ -0,0 +1,109 @@ +classdef AnnotationSeries < types.core.TimeSeries & types.untyped.GroupClass +% ANNOTATIONSERIES - Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way. +% +% Required Properties: +% data + + + +methods + function obj = AnnotationSeries(varargin) + % ANNOTATIONSERIES - Constructor for AnnotationSeries + % + % Syntax: + % annotationSeries = types.core.ANNOTATIONSERIES() creates a AnnotationSeries object with unset property values. + % + % annotationSeries = types.core.ANNOTATIONSERIES(Name, Value) creates a AnnotationSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (char) - Annotations made during an experiment. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - description (char) - Description of the time series. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - annotationSeries (types.core.AnnotationSeries) - A AnnotationSeries object + + varargin = [{'data_resolution' types.util.correctType(-1, 'single') 'data_unit' 'n/a'} varargin]; + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_resolution',[]); + addParameter(p, 'data_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_resolution = p.Results.data_resolution; + obj.data_unit = p.Results.data_unit; + if strcmp(class(obj), 'types.core.AnnotationSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_resolution(obj, val) + if isequal(val, -1) + val = -1; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_resolution'' property of class ''AnnotationSeries'' because it is read-only.') + end + end + function val = validate_data_unit(obj, val) + if isequal(val, 'n/a') + val = 'n/a'; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''AnnotationSeries'' because it is read-only.') + end + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/code/+types/+core/BehavioralEpochs.m b/code/+types/+core/BehavioralEpochs.m new file mode 100644 index 00000000..93239c91 --- /dev/null +++ b/code/+types/+core/BehavioralEpochs.m @@ -0,0 +1,65 @@ +classdef BehavioralEpochs < types.core.NWBDataInterface & types.untyped.GroupClass +% BEHAVIORALEPOCHS - TimeSeries for storing behavioral epochs. The objective of this and the other two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries) is to provide generic hooks for software tools/scripts. This allows a tool/script to take the output one specific interface (e.g., UnitTimes) and plot that data relative to another data modality (e.g., behavioral events) without having to define all possible modalities in advance. Declaring one of these interfaces means that one or more TimeSeries of the specified type is published. These TimeSeries should reside in a group having the same name as the interface. For example, if a BehavioralTimeSeries interface is declared, the module will have one or more TimeSeries defined in the module sub-group 'BehavioralTimeSeries'. BehavioralEpochs should use IntervalSeries. BehavioralEvents is used for irregular events. BehavioralTimeSeries is for continuous data. +% +% Required Properties: +% None + + +% OPTIONAL PROPERTIES +properties + intervalseries; % (IntervalSeries) IntervalSeries object containing start and stop times of epochs. +end + +methods + function obj = BehavioralEpochs(varargin) + % BEHAVIORALEPOCHS - Constructor for BehavioralEpochs + % + % Syntax: + % behavioralEpochs = types.core.BEHAVIORALEPOCHS() creates a BehavioralEpochs object with unset property values. + % + % behavioralEpochs = types.core.BEHAVIORALEPOCHS(Name, Value) creates a BehavioralEpochs object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - intervalseries (IntervalSeries) - IntervalSeries object containing start and stop times of epochs. + % + % Output Arguments: + % - behavioralEpochs (types.core.BehavioralEpochs) - A BehavioralEpochs object + + obj = obj@types.core.NWBDataInterface(varargin{:}); + [obj.intervalseries, ivarargin] = types.util.parseConstrained(obj,'intervalseries', 'types.core.IntervalSeries', varargin{:}); + varargin(ivarargin) = []; + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + misc.parseSkipInvalidName(p, varargin); + if strcmp(class(obj), 'types.core.BehavioralEpochs') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.intervalseries(obj, val) + obj.intervalseries = obj.validate_intervalseries(val); + end + %% VALIDATORS + + function val = validate_intervalseries(obj, val) + namedprops = struct(); + constrained = {'types.core.IntervalSeries'}; + types.util.checkSet('intervalseries', namedprops, constrained, val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.NWBDataInterface(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.intervalseries) + refs = obj.intervalseries.export(fid, fullpath, refs); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/BehavioralEvents.m b/code/+types/+core/BehavioralEvents.m similarity index 64% rename from +types/+core/BehavioralEvents.m rename to code/+types/+core/BehavioralEvents.m index a5d94e28..1536944c 100644 --- a/+types/+core/BehavioralEvents.m +++ b/code/+types/+core/BehavioralEvents.m @@ -1,5 +1,8 @@ classdef BehavioralEvents < types.core.NWBDataInterface & types.untyped.GroupClass -% BEHAVIORALEVENTS TimeSeries for storing behavioral events. See description of BehavioralEpochs for more details. +% BEHAVIORALEVENTS - TimeSeries for storing behavioral events. See description of BehavioralEpochs for more details. +% +% Required Properties: +% None % OPTIONAL PROPERTIES @@ -9,7 +12,19 @@ methods function obj = BehavioralEvents(varargin) - % BEHAVIORALEVENTS Constructor for BehavioralEvents + % BEHAVIORALEVENTS - Constructor for BehavioralEvents + % + % Syntax: + % behavioralEvents = types.core.BEHAVIORALEVENTS() creates a BehavioralEvents object with unset property values. + % + % behavioralEvents = types.core.BEHAVIORALEVENTS(Name, Value) creates a BehavioralEvents object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - timeseries (TimeSeries) - TimeSeries object containing behavioral events. + % + % Output Arguments: + % - behavioralEvents (types.core.BehavioralEvents) - A BehavioralEvents object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.timeseries, ivarargin] = types.util.parseConstrained(obj,'timeseries', 'types.core.TimeSeries', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/BehavioralTimeSeries.m b/code/+types/+core/BehavioralTimeSeries.m similarity index 63% rename from +types/+core/BehavioralTimeSeries.m rename to code/+types/+core/BehavioralTimeSeries.m index 4f697bae..c5689f52 100644 --- a/+types/+core/BehavioralTimeSeries.m +++ b/code/+types/+core/BehavioralTimeSeries.m @@ -1,5 +1,8 @@ classdef BehavioralTimeSeries < types.core.NWBDataInterface & types.untyped.GroupClass -% BEHAVIORALTIMESERIES TimeSeries for storing Behavoioral time series data. See description of BehavioralEpochs for more details. +% BEHAVIORALTIMESERIES - TimeSeries for storing Behavoioral time series data. See description of BehavioralEpochs for more details. +% +% Required Properties: +% None % OPTIONAL PROPERTIES @@ -9,7 +12,19 @@ methods function obj = BehavioralTimeSeries(varargin) - % BEHAVIORALTIMESERIES Constructor for BehavioralTimeSeries + % BEHAVIORALTIMESERIES - Constructor for BehavioralTimeSeries + % + % Syntax: + % behavioralTimeSeries = types.core.BEHAVIORALTIMESERIES() creates a BehavioralTimeSeries object with unset property values. + % + % behavioralTimeSeries = types.core.BEHAVIORALTIMESERIES(Name, Value) creates a BehavioralTimeSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - timeseries (TimeSeries) - TimeSeries object containing continuous behavioral data. + % + % Output Arguments: + % - behavioralTimeSeries (types.core.BehavioralTimeSeries) - A BehavioralTimeSeries object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.timeseries, ivarargin] = types.util.parseConstrained(obj,'timeseries', 'types.core.TimeSeries', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/ClusterWaveforms.m b/code/+types/+core/ClusterWaveforms.m similarity index 76% rename from +types/+core/ClusterWaveforms.m rename to code/+types/+core/ClusterWaveforms.m index a234c0f3..7612237e 100644 --- a/+types/+core/ClusterWaveforms.m +++ b/code/+types/+core/ClusterWaveforms.m @@ -1,5 +1,8 @@ classdef ClusterWaveforms < types.core.NWBDataInterface & types.untyped.GroupClass -% CLUSTERWAVEFORMS DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. +% CLUSTERWAVEFORMS - DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one. +% +% Required Properties: +% waveform_filtering, waveform_mean, waveform_sd % REQUIRED PROPERTIES @@ -15,7 +18,25 @@ methods function obj = ClusterWaveforms(varargin) - % CLUSTERWAVEFORMS Constructor for ClusterWaveforms + % CLUSTERWAVEFORMS - Constructor for ClusterWaveforms + % + % Syntax: + % clusterWaveforms = types.core.CLUSTERWAVEFORMS() creates a ClusterWaveforms object with unset property values. + % + % clusterWaveforms = types.core.CLUSTERWAVEFORMS(Name, Value) creates a ClusterWaveforms object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - clustering_interface (Clustering) - Link to Clustering interface that was the source of the clustered data + % + % - waveform_filtering (char) - Filtering applied to data before generating mean/sd + % + % - waveform_mean (single) - The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled) + % + % - waveform_sd (single) - Stdev of waveforms for each cluster, using the same indices as in mean + % + % Output Arguments: + % - clusterWaveforms (types.core.ClusterWaveforms) - A ClusterWaveforms object + obj = obj@types.core.NWBDataInterface(varargin{:}); diff --git a/+types/+core/Clustering.m b/code/+types/+core/Clustering.m similarity index 81% rename from +types/+core/Clustering.m rename to code/+types/+core/Clustering.m index ab8dc279..8a18b0a6 100644 --- a/+types/+core/Clustering.m +++ b/code/+types/+core/Clustering.m @@ -1,5 +1,8 @@ classdef Clustering < types.core.NWBDataInterface & types.untyped.GroupClass -% CLUSTERING DEPRECATED Clustered spike data, whether from automatic clustering tools (e.g., klustakwik) or as a result of manual sorting. +% CLUSTERING - DEPRECATED Clustered spike data, whether from automatic clustering tools (e.g., klustakwik) or as a result of manual sorting. +% +% Required Properties: +% description, num, peak_over_rms, times % REQUIRED PROPERTIES @@ -12,7 +15,25 @@ methods function obj = Clustering(varargin) - % CLUSTERING Constructor for Clustering + % CLUSTERING - Constructor for Clustering + % + % Syntax: + % clustering = types.core.CLUSTERING() creates a Clustering object with unset property values. + % + % clustering = types.core.CLUSTERING(Name, Value) creates a Clustering object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - description (char) - Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc) + % + % - num (int32) - Cluster number of each event + % + % - peak_over_rms (single) - Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric). + % + % - times (double) - Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module. + % + % Output Arguments: + % - clustering (types.core.Clustering) - A Clustering object + obj = obj@types.core.NWBDataInterface(varargin{:}); diff --git a/+types/+core/CompassDirection.m b/code/+types/+core/CompassDirection.m similarity index 59% rename from +types/+core/CompassDirection.m rename to code/+types/+core/CompassDirection.m index fb301492..9df7227b 100644 --- a/+types/+core/CompassDirection.m +++ b/code/+types/+core/CompassDirection.m @@ -1,5 +1,8 @@ classdef CompassDirection < types.core.NWBDataInterface & types.untyped.GroupClass -% COMPASSDIRECTION With a CompassDirection interface, a module publishes a SpatialSeries object representing a floating point value for theta. The SpatialSeries::reference_frame field should indicate what direction corresponds to 0 and which is the direction of rotation (this should be clockwise). The si_unit for the SpatialSeries should be radians or degrees. +% COMPASSDIRECTION - With a CompassDirection interface, a module publishes a SpatialSeries object representing a floating point value for theta. The SpatialSeries::reference_frame field should indicate what direction corresponds to 0 and which is the direction of rotation (this should be clockwise). The si_unit for the SpatialSeries should be radians or degrees. +% +% Required Properties: +% None % OPTIONAL PROPERTIES @@ -9,7 +12,19 @@ methods function obj = CompassDirection(varargin) - % COMPASSDIRECTION Constructor for CompassDirection + % COMPASSDIRECTION - Constructor for CompassDirection + % + % Syntax: + % compassDirection = types.core.COMPASSDIRECTION() creates a CompassDirection object with unset property values. + % + % compassDirection = types.core.COMPASSDIRECTION(Name, Value) creates a CompassDirection object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - spatialseries (SpatialSeries) - SpatialSeries object containing direction of gaze travel. + % + % Output Arguments: + % - compassDirection (types.core.CompassDirection) - A CompassDirection object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.spatialseries, ivarargin] = types.util.parseConstrained(obj,'spatialseries', 'types.core.SpatialSeries', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/CorrectedImageStack.m b/code/+types/+core/CorrectedImageStack.m similarity index 71% rename from +types/+core/CorrectedImageStack.m rename to code/+types/+core/CorrectedImageStack.m index 58195421..ce251f4f 100644 --- a/+types/+core/CorrectedImageStack.m +++ b/code/+types/+core/CorrectedImageStack.m @@ -1,5 +1,8 @@ classdef CorrectedImageStack < types.core.NWBDataInterface & types.untyped.GroupClass -% CORRECTEDIMAGESTACK Results from motion correction of an image stack. +% CORRECTEDIMAGESTACK - Results from motion correction of an image stack. +% +% Required Properties: +% corrected, xy_translation % REQUIRED PROPERTIES @@ -14,7 +17,23 @@ methods function obj = CorrectedImageStack(varargin) - % CORRECTEDIMAGESTACK Constructor for CorrectedImageStack + % CORRECTEDIMAGESTACK - Constructor for CorrectedImageStack + % + % Syntax: + % correctedImageStack = types.core.CORRECTEDIMAGESTACK() creates a CorrectedImageStack object with unset property values. + % + % correctedImageStack = types.core.CORRECTEDIMAGESTACK(Name, Value) creates a CorrectedImageStack object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - corrected (ImageSeries) - Image stack with frames shifted to the common coordinates. + % + % - original (ImageSeries) - Link to ImageSeries object that is being registered. + % + % - xy_translation (TimeSeries) - Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image. + % + % Output Arguments: + % - correctedImageStack (types.core.CorrectedImageStack) - A CorrectedImageStack object + obj = obj@types.core.NWBDataInterface(varargin{:}); diff --git a/+types/+core/CurrentClampSeries.m b/code/+types/+core/CurrentClampSeries.m similarity index 51% rename from +types/+core/CurrentClampSeries.m rename to code/+types/+core/CurrentClampSeries.m index fe27e0cb..47578edf 100644 --- a/+types/+core/CurrentClampSeries.m +++ b/code/+types/+core/CurrentClampSeries.m @@ -1,5 +1,8 @@ classdef CurrentClampSeries < types.core.PatchClampSeries & types.untyped.GroupClass -% CURRENTCLAMPSERIES Voltage data from an intracellular current-clamp recording. A corresponding CurrentClampStimulusSeries (stored separately as a stimulus) is used to store the current injected. +% CURRENTCLAMPSERIES - Voltage data from an intracellular current-clamp recording. A corresponding CurrentClampStimulusSeries (stored separately as a stimulus) is used to store the current injected. +% +% Required Properties: +% data % OPTIONAL PROPERTIES @@ -11,7 +14,55 @@ methods function obj = CurrentClampSeries(varargin) - % CURRENTCLAMPSERIES Constructor for CurrentClampSeries + % CURRENTCLAMPSERIES - Constructor for CurrentClampSeries + % + % Syntax: + % currentClampSeries = types.core.CURRENTCLAMPSERIES() creates a CurrentClampSeries object with unset property values. + % + % currentClampSeries = types.core.CURRENTCLAMPSERIES(Name, Value) creates a CurrentClampSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - bias_current (single) - Bias current, in amps. + % + % - bridge_balance (single) - Bridge balance, in ohms. + % + % - capacitance_compensation (single) - Capacitance compensation, in farads. + % + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (any) - Recorded voltage. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - description (char) - Description of the time series. + % + % - electrode (IntracellularElectrode) - Link to IntracellularElectrode object that describes the electrode that was used to apply or record this data. + % + % - gain (single) - Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - stimulus_description (char) - Protocol/stimulus name for this patch-clamp dataset. + % + % - sweep_number (uint32) - Sweep number, allows to group different PatchClampSeries together. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - currentClampSeries (types.core.CurrentClampSeries) - A CurrentClampSeries object + varargin = [{'data_unit' 'volts'} varargin]; obj = obj@types.core.PatchClampSeries(varargin{:}); diff --git a/code/+types/+core/CurrentClampStimulusSeries.m b/code/+types/+core/CurrentClampStimulusSeries.m new file mode 100644 index 00000000..adffb47f --- /dev/null +++ b/code/+types/+core/CurrentClampStimulusSeries.m @@ -0,0 +1,95 @@ +classdef CurrentClampStimulusSeries < types.core.PatchClampSeries & types.untyped.GroupClass +% CURRENTCLAMPSTIMULUSSERIES - Stimulus current applied during current clamp recording. +% +% Required Properties: +% data + + + +methods + function obj = CurrentClampStimulusSeries(varargin) + % CURRENTCLAMPSTIMULUSSERIES - Constructor for CurrentClampStimulusSeries + % + % Syntax: + % currentClampStimulusSeries = types.core.CURRENTCLAMPSTIMULUSSERIES() creates a CurrentClampStimulusSeries object with unset property values. + % + % currentClampStimulusSeries = types.core.CURRENTCLAMPSTIMULUSSERIES(Name, Value) creates a CurrentClampStimulusSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (any) - Stimulus current applied. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - description (char) - Description of the time series. + % + % - electrode (IntracellularElectrode) - Link to IntracellularElectrode object that describes the electrode that was used to apply or record this data. + % + % - gain (single) - Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - stimulus_description (char) - Protocol/stimulus name for this patch-clamp dataset. + % + % - sweep_number (uint32) - Sweep number, allows to group different PatchClampSeries together. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - currentClampStimulusSeries (types.core.CurrentClampStimulusSeries) - A CurrentClampStimulusSeries object + + varargin = [{'data_unit' 'amperes'} varargin]; + obj = obj@types.core.PatchClampSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + if strcmp(class(obj), 'types.core.CurrentClampStimulusSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + + end + function val = validate_data_unit(obj, val) + if isequal(val, 'amperes') + val = 'amperes'; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''CurrentClampStimulusSeries'' because it is read-only.') + end + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.PatchClampSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/DecompositionSeries.m b/code/+types/+core/DecompositionSeries.m similarity index 51% rename from +types/+core/DecompositionSeries.m rename to code/+types/+core/DecompositionSeries.m index 7e792986..120d011c 100644 --- a/+types/+core/DecompositionSeries.m +++ b/code/+types/+core/DecompositionSeries.m @@ -1,5 +1,8 @@ classdef DecompositionSeries < types.core.TimeSeries & types.untyped.GroupClass -% DECOMPOSITIONSERIES Spectral analysis of a time series, e.g. of an LFP or a speech signal. +% DECOMPOSITIONSERIES - Spectral analysis of a time series, e.g. of an LFP or a speech signal. +% +% Required Properties: +% bands, data, metric % REQUIRED PROPERTIES @@ -15,7 +18,51 @@ methods function obj = DecompositionSeries(varargin) - % DECOMPOSITIONSERIES Constructor for DecompositionSeries + % DECOMPOSITIONSERIES - Constructor for DecompositionSeries + % + % Syntax: + % decompositionSeries = types.core.DECOMPOSITIONSERIES() creates a DecompositionSeries object with unset property values. + % + % decompositionSeries = types.core.DECOMPOSITIONSERIES(Name, Value) creates a DecompositionSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - bands (DynamicTable) - Table for describing the bands that this series was generated from. There should be one row in this table for each band. + % + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (numeric) - Data decomposed into frequency bands. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - data_unit (char) - Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'. + % + % - description (char) - Description of the time series. + % + % - metric (char) - The metric used, e.g. phase, amplitude, power. + % + % - source_channels (DynamicTableRegion) - DynamicTableRegion pointer to the channels that this decomposition series was generated from. + % + % - source_timeseries (TimeSeries) - Link to TimeSeries object that this data was calculated from. Metadata about electrodes and their position can be read from that ElectricalSeries so it is not necessary to store that information here. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - decompositionSeries (types.core.DecompositionSeries) - A DecompositionSeries object + varargin = [{'data_unit' 'no unit'} varargin]; obj = obj@types.core.TimeSeries(varargin{:}); diff --git a/+types/+core/Device.m b/code/+types/+core/Device.m similarity index 81% rename from +types/+core/Device.m rename to code/+types/+core/Device.m index 426a9bd5..243bb76a 100644 --- a/+types/+core/Device.m +++ b/code/+types/+core/Device.m @@ -1,5 +1,8 @@ classdef Device < types.core.NWBContainer & types.untyped.GroupClass -% DEVICE Metadata about a data acquisition device, e.g., recording system, electrode, microscope. +% DEVICE - Metadata about a data acquisition device, e.g., recording system, electrode, microscope. +% +% Required Properties: +% None % OPTIONAL PROPERTIES @@ -13,7 +16,27 @@ methods function obj = Device(varargin) - % DEVICE Constructor for Device + % DEVICE - Constructor for Device + % + % Syntax: + % device = types.core.DEVICE() creates a Device object with unset property values. + % + % device = types.core.DEVICE(Name, Value) creates a Device object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - description (char) - Description of the device as free-form text. If there is any software/firmware associated with the device, the names and versions of those can be added to NWBFile.was_generated_by. + % + % - manufacturer (char) - The name of the manufacturer of the device, e.g., Imec, Plexon, Thorlabs. + % + % - model_name (char) - The model name of the device, e.g., Neuropixels 1.0, V-Probe, Bergamo III. + % + % - model_number (char) - The model number (or part/product number) of the device, e.g., PRB_1_4_0480_1, PLX-VP-32-15SE(75)-(260-80)(460-10)-300-(1)CON/32m-V, BERGAMO. + % + % - serial_number (char) - The serial number of the device. + % + % Output Arguments: + % - device (types.core.Device) - A Device object + obj = obj@types.core.NWBContainer(varargin{:}); diff --git a/+types/+core/DfOverF.m b/code/+types/+core/DfOverF.m similarity index 65% rename from +types/+core/DfOverF.m rename to code/+types/+core/DfOverF.m index 50748605..0aa24714 100644 --- a/+types/+core/DfOverF.m +++ b/code/+types/+core/DfOverF.m @@ -1,5 +1,8 @@ classdef DfOverF < types.core.NWBDataInterface & types.untyped.GroupClass -% DFOVERF dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes). +% DFOVERF - dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes). +% +% Required Properties: +% roiresponseseries % REQUIRED PROPERTIES @@ -9,7 +12,19 @@ methods function obj = DfOverF(varargin) - % DFOVERF Constructor for DfOverF + % DFOVERF - Constructor for DfOverF + % + % Syntax: + % dfOverF = types.core.DFOVERF() creates a DfOverF object with unset property values. + % + % dfOverF = types.core.DFOVERF(Name, Value) creates a DfOverF object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - roiresponseseries (RoiResponseSeries) - RoiResponseSeries object(s) containing dF/F for a ROI. + % + % Output Arguments: + % - dfOverF (types.core.DfOverF) - A DfOverF object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.roiresponseseries, ivarargin] = types.util.parseConstrained(obj,'roiresponseseries', 'types.core.RoiResponseSeries', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/ElectricalSeries.m b/code/+types/+core/ElectricalSeries.m similarity index 54% rename from +types/+core/ElectricalSeries.m rename to code/+types/+core/ElectricalSeries.m index 9a81aa87..6c27bfeb 100644 --- a/+types/+core/ElectricalSeries.m +++ b/code/+types/+core/ElectricalSeries.m @@ -1,5 +1,8 @@ classdef ElectricalSeries < types.core.TimeSeries & types.untyped.GroupClass -% ELECTRICALSERIES A time series of acquired voltage data from extracellular recordings. The data field is an int or float array storing data in volts. The first dimension should always represent time. The second dimension, if present, should represent channels. +% ELECTRICALSERIES - A time series of acquired voltage data from extracellular recordings. The data field is an int or float array storing data in volts. The first dimension should always represent time. The second dimension, if present, should represent channels. +% +% Required Properties: +% data, electrodes % READONLY PROPERTIES @@ -18,7 +21,47 @@ methods function obj = ElectricalSeries(varargin) - % ELECTRICALSERIES Constructor for ElectricalSeries + % ELECTRICALSERIES - Constructor for ElectricalSeries + % + % Syntax: + % electricalSeries = types.core.ELECTRICALSERIES() creates a ElectricalSeries object with unset property values. + % + % electricalSeries = types.core.ELECTRICALSERIES(Name, Value) creates a ElectricalSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - channel_conversion (single) - Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels. + % + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (numeric) - Recorded voltage data. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - description (char) - Description of the time series. + % + % - electrodes (DynamicTableRegion) - DynamicTableRegion pointer to the electrodes that this time series was generated from. + % + % - filtering (char) - Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be "High-pass 4-pole Bessel filter at 500 Hz". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be "Low-pass filter at 300 Hz". If a non-standard filter type is used, provide as much detail about the filter properties as possible. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - electricalSeries (types.core.ElectricalSeries) - A ElectricalSeries object + varargin = [{'channel_conversion_axis' types.util.correctType(1, 'int32') 'data_unit' 'volts'} varargin]; obj = obj@types.core.TimeSeries(varargin{:}); diff --git a/+types/+core/ElectrodeGroup.m b/code/+types/+core/ElectrodeGroup.m similarity index 81% rename from +types/+core/ElectrodeGroup.m rename to code/+types/+core/ElectrodeGroup.m index 96c8af02..eca53034 100644 --- a/+types/+core/ElectrodeGroup.m +++ b/code/+types/+core/ElectrodeGroup.m @@ -1,5 +1,8 @@ classdef ElectrodeGroup < types.core.NWBContainer & types.untyped.GroupClass -% ELECTRODEGROUP A physical grouping of electrodes, e.g. a shank of an array. +% ELECTRODEGROUP - A physical grouping of electrodes, e.g. a shank of an array. +% +% Required Properties: +% None % OPTIONAL PROPERTIES @@ -12,7 +15,25 @@ methods function obj = ElectrodeGroup(varargin) - % ELECTRODEGROUP Constructor for ElectrodeGroup + % ELECTRODEGROUP - Constructor for ElectrodeGroup + % + % Syntax: + % electrodeGroup = types.core.ELECTRODEGROUP() creates a ElectrodeGroup object with unset property values. + % + % electrodeGroup = types.core.ELECTRODEGROUP(Name, Value) creates a ElectrodeGroup object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - description (char) - Description of this electrode group. + % + % - device (Device) - Link to the device that was used to record from this electrode group. + % + % - location (char) - Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible. + % + % - position (Table with columns: (single, single, single)) - stereotaxic or common framework coordinates + % + % Output Arguments: + % - electrodeGroup (types.core.ElectrodeGroup) - A ElectrodeGroup object + obj = obj@types.core.NWBContainer(varargin{:}); diff --git a/+types/+core/EventDetection.m b/code/+types/+core/EventDetection.m similarity index 79% rename from +types/+core/EventDetection.m rename to code/+types/+core/EventDetection.m index 19c8f062..cd78d8d4 100644 --- a/+types/+core/EventDetection.m +++ b/code/+types/+core/EventDetection.m @@ -1,5 +1,8 @@ classdef EventDetection < types.core.NWBDataInterface & types.untyped.GroupClass -% EVENTDETECTION Detected spike events from voltage trace(s). +% EVENTDETECTION - Detected spike events from voltage trace(s). +% +% Required Properties: +% detection_method, source_idx, times % READONLY PROPERTIES @@ -19,7 +22,25 @@ methods function obj = EventDetection(varargin) - % EVENTDETECTION Constructor for EventDetection + % EVENTDETECTION - Constructor for EventDetection + % + % Syntax: + % eventDetection = types.core.EVENTDETECTION() creates a EventDetection object with unset property values. + % + % eventDetection = types.core.EVENTDETECTION(Name, Value) creates a EventDetection object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - detection_method (char) - Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values. + % + % - source_electricalseries (ElectricalSeries) - Link to the ElectricalSeries that this data was calculated from. Metadata about electrodes and their position can be read from that ElectricalSeries so it's not necessary to include that information here. + % + % - source_idx (int32) - Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data. + % + % - times (double) - Timestamps of events, in seconds. + % + % Output Arguments: + % - eventDetection (types.core.EventDetection) - A EventDetection object + varargin = [{'times_unit' 'seconds'} varargin]; obj = obj@types.core.NWBDataInterface(varargin{:}); diff --git a/+types/+core/EventWaveform.m b/code/+types/+core/EventWaveform.m similarity index 64% rename from +types/+core/EventWaveform.m rename to code/+types/+core/EventWaveform.m index 3915db7c..3847832f 100644 --- a/+types/+core/EventWaveform.m +++ b/code/+types/+core/EventWaveform.m @@ -1,5 +1,8 @@ classdef EventWaveform < types.core.NWBDataInterface & types.untyped.GroupClass -% EVENTWAVEFORM DEPRECATED. Represents either the waveforms of detected events, as extracted from a raw data trace in /acquisition, or the event waveforms that were stored during experiment acquisition. +% EVENTWAVEFORM - DEPRECATED. Represents either the waveforms of detected events, as extracted from a raw data trace in /acquisition, or the event waveforms that were stored during experiment acquisition. +% +% Required Properties: +% None % OPTIONAL PROPERTIES @@ -9,7 +12,19 @@ methods function obj = EventWaveform(varargin) - % EVENTWAVEFORM Constructor for EventWaveform + % EVENTWAVEFORM - Constructor for EventWaveform + % + % Syntax: + % eventWaveform = types.core.EVENTWAVEFORM() creates a EventWaveform object with unset property values. + % + % eventWaveform = types.core.EVENTWAVEFORM(Name, Value) creates a EventWaveform object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - spikeeventseries (SpikeEventSeries) - SpikeEventSeries object(s) containing detected spike event waveforms. + % + % Output Arguments: + % - eventWaveform (types.core.EventWaveform) - A EventWaveform object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.spikeeventseries, ivarargin] = types.util.parseConstrained(obj,'spikeeventseries', 'types.core.SpikeEventSeries', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/ExperimentalConditionsTable.m b/code/+types/+core/ExperimentalConditionsTable.m similarity index 58% rename from +types/+core/ExperimentalConditionsTable.m rename to code/+types/+core/ExperimentalConditionsTable.m index 5dbc082f..ff3b62e0 100644 --- a/+types/+core/ExperimentalConditionsTable.m +++ b/code/+types/+core/ExperimentalConditionsTable.m @@ -1,5 +1,8 @@ classdef ExperimentalConditionsTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass -% EXPERIMENTALCONDITIONSTABLE A table for grouping different intracellular recording repetitions together that belong to the same experimental condition. +% EXPERIMENTALCONDITIONSTABLE - A table for grouping different intracellular recording repetitions together that belong to the same experimental condition. +% +% Required Properties: +% id, repetitions, repetitions_index % REQUIRED PROPERTIES @@ -10,7 +13,29 @@ methods function obj = ExperimentalConditionsTable(varargin) - % EXPERIMENTALCONDITIONSTABLE Constructor for ExperimentalConditionsTable + % EXPERIMENTALCONDITIONSTABLE - Constructor for ExperimentalConditionsTable + % + % Syntax: + % experimentalConditionsTable = types.core.EXPERIMENTALCONDITIONSTABLE() creates a ExperimentalConditionsTable object with unset property values. + % + % experimentalConditionsTable = types.core.EXPERIMENTALCONDITIONSTABLE(Name, Value) creates a ExperimentalConditionsTable object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - description (char) - Description of what is in this dynamic table. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - repetitions (DynamicTableRegion) - A reference to one or more rows in the RepetitionsTable table. + % + % - repetitions_index (VectorIndex) - Index dataset for the repetitions column. + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % Output Arguments: + % - experimentalConditionsTable (types.core.ExperimentalConditionsTable) - A ExperimentalConditionsTable object + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); diff --git a/+types/+core/EyeTracking.m b/code/+types/+core/EyeTracking.m similarity index 68% rename from +types/+core/EyeTracking.m rename to code/+types/+core/EyeTracking.m index b27553a5..5fb4cb3f 100644 --- a/+types/+core/EyeTracking.m +++ b/code/+types/+core/EyeTracking.m @@ -1,5 +1,8 @@ classdef EyeTracking < types.core.NWBDataInterface & types.untyped.GroupClass -% EYETRACKING Eye-tracking data, representing direction of gaze. +% EYETRACKING - Eye-tracking data, representing direction of gaze. +% +% Required Properties: +% None % OPTIONAL PROPERTIES @@ -9,7 +12,19 @@ methods function obj = EyeTracking(varargin) - % EYETRACKING Constructor for EyeTracking + % EYETRACKING - Constructor for EyeTracking + % + % Syntax: + % eyeTracking = types.core.EYETRACKING() creates a EyeTracking object with unset property values. + % + % eyeTracking = types.core.EYETRACKING(Name, Value) creates a EyeTracking object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - spatialseries (SpatialSeries) - SpatialSeries object containing data measuring direction of gaze. + % + % Output Arguments: + % - eyeTracking (types.core.EyeTracking) - A EyeTracking object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.spatialseries, ivarargin] = types.util.parseConstrained(obj,'spatialseries', 'types.core.SpatialSeries', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/FeatureExtraction.m b/code/+types/+core/FeatureExtraction.m similarity index 79% rename from +types/+core/FeatureExtraction.m rename to code/+types/+core/FeatureExtraction.m index cab26ebd..13342b54 100644 --- a/+types/+core/FeatureExtraction.m +++ b/code/+types/+core/FeatureExtraction.m @@ -1,5 +1,8 @@ classdef FeatureExtraction < types.core.NWBDataInterface & types.untyped.GroupClass -% FEATUREEXTRACTION Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source. +% FEATUREEXTRACTION - Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source. +% +% Required Properties: +% description, electrodes, features, times % REQUIRED PROPERTIES @@ -12,7 +15,25 @@ methods function obj = FeatureExtraction(varargin) - % FEATUREEXTRACTION Constructor for FeatureExtraction + % FEATUREEXTRACTION - Constructor for FeatureExtraction + % + % Syntax: + % featureExtraction = types.core.FEATUREEXTRACTION() creates a FeatureExtraction object with unset property values. + % + % featureExtraction = types.core.FEATUREEXTRACTION(Name, Value) creates a FeatureExtraction object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - description (char) - Description of features (eg, ''PC1'') for each of the extracted features. + % + % - electrodes (DynamicTableRegion) - DynamicTableRegion pointer to the electrodes that this time series was generated from. + % + % - features (single) - Multi-dimensional array of features extracted from each event. + % + % - times (double) - Times of events that features correspond to (can be a link). + % + % Output Arguments: + % - featureExtraction (types.core.FeatureExtraction) - A FeatureExtraction object + obj = obj@types.core.NWBDataInterface(varargin{:}); diff --git a/+types/+core/FilteredEphys.m b/code/+types/+core/FilteredEphys.m similarity index 50% rename from +types/+core/FilteredEphys.m rename to code/+types/+core/FilteredEphys.m index 53c81128..02cd1e5e 100644 --- a/+types/+core/FilteredEphys.m +++ b/code/+types/+core/FilteredEphys.m @@ -1,5 +1,8 @@ classdef FilteredEphys < types.core.NWBDataInterface & types.untyped.GroupClass -% FILTEREDEPHYS Electrophysiology data from one or more channels that has been subjected to filtering. Examples of filtered data include Theta and Gamma (LFP has its own interface). FilteredEphys modules publish an ElectricalSeries for each filtered channel or set of channels. The name of each ElectricalSeries is arbitrary but should be informative. The source of the filtered data, whether this is from analysis of another time series or as acquired by hardware, should be noted in each's TimeSeries::description field. There is no assumed 1::1 correspondence between filtered ephys signals and electrodes, as a single signal can apply to many nearby electrodes, and one electrode may have different filtered (e.g., theta and/or gamma) signals represented. Filter properties should be noted in the ElectricalSeries 'filtering' attribute. +% FILTEREDEPHYS - Electrophysiology data from one or more channels that has been subjected to filtering. Examples of filtered data include Theta and Gamma (LFP has its own interface). FilteredEphys modules publish an ElectricalSeries for each filtered channel or set of channels. The name of each ElectricalSeries is arbitrary but should be informative. The source of the filtered data, whether this is from analysis of another time series or as acquired by hardware, should be noted in each's TimeSeries::description field. There is no assumed 1::1 correspondence between filtered ephys signals and electrodes, as a single signal can apply to many nearby electrodes, and one electrode may have different filtered (e.g., theta and/or gamma) signals represented. Filter properties should be noted in the ElectricalSeries 'filtering' attribute. +% +% Required Properties: +% electricalseries % REQUIRED PROPERTIES @@ -9,7 +12,19 @@ methods function obj = FilteredEphys(varargin) - % FILTEREDEPHYS Constructor for FilteredEphys + % FILTEREDEPHYS - Constructor for FilteredEphys + % + % Syntax: + % filteredEphys = types.core.FILTEREDEPHYS() creates a FilteredEphys object with unset property values. + % + % filteredEphys = types.core.FILTEREDEPHYS(Name, Value) creates a FilteredEphys object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - electricalseries (ElectricalSeries) - ElectricalSeries object(s) containing filtered electrophysiology data. + % + % Output Arguments: + % - filteredEphys (types.core.FilteredEphys) - A FilteredEphys object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.electricalseries, ivarargin] = types.util.parseConstrained(obj,'electricalseries', 'types.core.ElectricalSeries', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/Fluorescence.m b/code/+types/+core/Fluorescence.m similarity index 64% rename from +types/+core/Fluorescence.m rename to code/+types/+core/Fluorescence.m index 904cb778..2c6f372f 100644 --- a/+types/+core/Fluorescence.m +++ b/code/+types/+core/Fluorescence.m @@ -1,5 +1,8 @@ classdef Fluorescence < types.core.NWBDataInterface & types.untyped.GroupClass -% FLUORESCENCE Fluorescence information about a region of interest (ROI). Storage hierarchy of fluorescence should be the same as for segmentation (ie, same names for ROIs and for image planes). +% FLUORESCENCE - Fluorescence information about a region of interest (ROI). Storage hierarchy of fluorescence should be the same as for segmentation (ie, same names for ROIs and for image planes). +% +% Required Properties: +% roiresponseseries % REQUIRED PROPERTIES @@ -9,7 +12,19 @@ methods function obj = Fluorescence(varargin) - % FLUORESCENCE Constructor for Fluorescence + % FLUORESCENCE - Constructor for Fluorescence + % + % Syntax: + % fluorescence = types.core.FLUORESCENCE() creates a Fluorescence object with unset property values. + % + % fluorescence = types.core.FLUORESCENCE(Name, Value) creates a Fluorescence object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - roiresponseseries (RoiResponseSeries) - RoiResponseSeries object(s) containing fluorescence data for a ROI. + % + % Output Arguments: + % - fluorescence (types.core.Fluorescence) - A Fluorescence object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.roiresponseseries, ivarargin] = types.util.parseConstrained(obj,'roiresponseseries', 'types.core.RoiResponseSeries', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/GrayscaleImage.m b/code/+types/+core/GrayscaleImage.m similarity index 54% rename from +types/+core/GrayscaleImage.m rename to code/+types/+core/GrayscaleImage.m index 0f773147..ee85bdf1 100644 --- a/+types/+core/GrayscaleImage.m +++ b/code/+types/+core/GrayscaleImage.m @@ -1,11 +1,30 @@ classdef GrayscaleImage < types.core.Image & types.untyped.DatasetClass -% GRAYSCALEIMAGE A grayscale image. +% GRAYSCALEIMAGE - A grayscale image. +% +% Required Properties: +% data methods function obj = GrayscaleImage(varargin) - % GRAYSCALEIMAGE Constructor for GrayscaleImage + % GRAYSCALEIMAGE - Constructor for GrayscaleImage + % + % Syntax: + % grayscaleImage = types.core.GRAYSCALEIMAGE() creates a GrayscaleImage object with unset property values. + % + % grayscaleImage = types.core.GRAYSCALEIMAGE(Name, Value) creates a GrayscaleImage object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (numeric) - No description + % + % - description (char) - Description of the image. + % + % - resolution (single) - Pixel resolution of the image, in pixels per centimeter. + % + % Output Arguments: + % - grayscaleImage (types.core.GrayscaleImage) - A GrayscaleImage object + obj = obj@types.core.Image(varargin{:}); diff --git a/code/+types/+core/IZeroClampSeries.m b/code/+types/+core/IZeroClampSeries.m new file mode 100644 index 00000000..1002c42d --- /dev/null +++ b/code/+types/+core/IZeroClampSeries.m @@ -0,0 +1,115 @@ +classdef IZeroClampSeries < types.core.CurrentClampSeries & types.untyped.GroupClass +% IZEROCLAMPSERIES - Voltage data from an intracellular recording when all current and amplifier settings are off (i.e., CurrentClampSeries fields will be zero). There is no CurrentClampStimulusSeries associated with an IZero series because the amplifier is disconnected and no stimulus can reach the cell. +% +% Required Properties: +% bias_current, bridge_balance, capacitance_compensation, data + + + +methods + function obj = IZeroClampSeries(varargin) + % IZEROCLAMPSERIES - Constructor for IZeroClampSeries + % + % Syntax: + % iZeroClampSeries = types.core.IZEROCLAMPSERIES() creates a IZeroClampSeries object with unset property values. + % + % iZeroClampSeries = types.core.IZEROCLAMPSERIES(Name, Value) creates a IZeroClampSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (any) - Recorded voltage. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - description (char) - Description of the time series. + % + % - electrode (IntracellularElectrode) - Link to IntracellularElectrode object that describes the electrode that was used to apply or record this data. + % + % - gain (single) - Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - sweep_number (uint32) - Sweep number, allows to group different PatchClampSeries together. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - iZeroClampSeries (types.core.IZeroClampSeries) - A IZeroClampSeries object + + varargin = [{'bias_current' types.util.correctType(0, 'single') 'bridge_balance' types.util.correctType(0, 'single') 'capacitance_compensation' types.util.correctType(0, 'single') 'stimulus_description' 'N/A'} varargin]; + obj = obj@types.core.CurrentClampSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'bias_current',[]); + addParameter(p, 'bridge_balance',[]); + addParameter(p, 'capacitance_compensation',[]); + addParameter(p, 'stimulus_description',[]); + misc.parseSkipInvalidName(p, varargin); + obj.bias_current = p.Results.bias_current; + obj.bridge_balance = p.Results.bridge_balance; + obj.capacitance_compensation = p.Results.capacitance_compensation; + obj.stimulus_description = p.Results.stimulus_description; + if strcmp(class(obj), 'types.core.IZeroClampSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_bias_current(obj, val) + if isequal(val, 0) + val = 0; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''bias_current'' property of class ''IZeroClampSeries'' because it is read-only.') + end + end + function val = validate_bridge_balance(obj, val) + if isequal(val, 0) + val = 0; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''bridge_balance'' property of class ''IZeroClampSeries'' because it is read-only.') + end + end + function val = validate_capacitance_compensation(obj, val) + if isequal(val, 0) + val = 0; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''capacitance_compensation'' property of class ''IZeroClampSeries'' because it is read-only.') + end + end + function val = validate_stimulus_description(obj, val) + if isequal(val, 'N/A') + val = 'N/A'; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''stimulus_description'' property of class ''IZeroClampSeries'' because it is read-only.') + end + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.CurrentClampSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/Image.m b/code/+types/+core/Image.m similarity index 76% rename from +types/+core/Image.m rename to code/+types/+core/Image.m index 93a0ab7d..f46bc05c 100644 --- a/+types/+core/Image.m +++ b/code/+types/+core/Image.m @@ -1,5 +1,8 @@ classdef Image < types.core.NWBData & types.untyped.DatasetClass -% IMAGE An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)). +% IMAGE - An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)). +% +% Required Properties: +% data % OPTIONAL PROPERTIES @@ -10,7 +13,23 @@ methods function obj = Image(varargin) - % IMAGE Constructor for Image + % IMAGE - Constructor for Image + % + % Syntax: + % image = types.core.IMAGE() creates a Image object with unset property values. + % + % image = types.core.IMAGE(Name, Value) creates a Image object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (numeric) - No description + % + % - description (char) - Description of the image. + % + % - resolution (single) - Pixel resolution of the image, in pixels per centimeter. + % + % Output Arguments: + % - image (types.core.Image) - A Image object + obj = obj@types.core.NWBData(varargin{:}); diff --git a/code/+types/+core/ImageMaskSeries.m b/code/+types/+core/ImageMaskSeries.m new file mode 100644 index 00000000..71d06e35 --- /dev/null +++ b/code/+types/+core/ImageMaskSeries.m @@ -0,0 +1,107 @@ +classdef ImageMaskSeries < types.core.ImageSeries & types.untyped.GroupClass +% IMAGEMASKSERIES - DEPRECATED. An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed. +% +% Required Properties: +% data + + +% OPTIONAL PROPERTIES +properties + masked_imageseries; % ImageSeries +end + +methods + function obj = ImageMaskSeries(varargin) + % IMAGEMASKSERIES - Constructor for ImageMaskSeries + % + % Syntax: + % imageMaskSeries = types.core.IMAGEMASKSERIES() creates a ImageMaskSeries object with unset property values. + % + % imageMaskSeries = types.core.IMAGEMASKSERIES(Name, Value) creates a ImageMaskSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (numeric) - Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - data_unit (char) - Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + % + % - description (char) - Description of the time series. + % + % - device (Device) - Link to the Device object that was used to capture these images. + % + % - dimension (int32) - Number of pixels on x, y, (and z) axes. + % + % - external_file (char) - Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. + % + % - external_file_starting_frame (int32) - Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. + % + % - format (char) - Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. + % + % - masked_imageseries (ImageSeries) - Link to ImageSeries object that this image mask is applied to. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - imageMaskSeries (types.core.ImageMaskSeries) - A ImageMaskSeries object + + obj = obj@types.core.ImageSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'masked_imageseries',[]); + misc.parseSkipInvalidName(p, varargin); + obj.masked_imageseries = p.Results.masked_imageseries; + if strcmp(class(obj), 'types.core.ImageMaskSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.masked_imageseries(obj, val) + obj.masked_imageseries = obj.validate_masked_imageseries(val); + end + %% VALIDATORS + + function val = validate_masked_imageseries(obj, val) + if isa(val, 'types.untyped.SoftLink') + if isprop(val, 'target') + types.util.checkDtype('masked_imageseries', 'types.core.ImageSeries', val.target); + end + else + val = types.util.checkDtype('masked_imageseries', 'types.core.ImageSeries', val); + if ~isempty(val) + val = types.untyped.SoftLink(val); + end + end + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.ImageSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.masked_imageseries.export(fid, [fullpath '/masked_imageseries'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/ImageReferences.m b/code/+types/+core/ImageReferences.m similarity index 59% rename from +types/+core/ImageReferences.m rename to code/+types/+core/ImageReferences.m index e5139fbf..fc0cfa16 100644 --- a/+types/+core/ImageReferences.m +++ b/code/+types/+core/ImageReferences.m @@ -1,11 +1,26 @@ classdef ImageReferences < types.core.NWBData & types.untyped.DatasetClass -% IMAGEREFERENCES Ordered dataset of references to Image objects. +% IMAGEREFERENCES - Ordered dataset of references to Image objects. +% +% Required Properties: +% data methods function obj = ImageReferences(varargin) - % IMAGEREFERENCES Constructor for ImageReferences + % IMAGEREFERENCES - Constructor for ImageReferences + % + % Syntax: + % imageReferences = types.core.IMAGEREFERENCES() creates a ImageReferences object with unset property values. + % + % imageReferences = types.core.IMAGEREFERENCES(Name, Value) creates a ImageReferences object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (Object reference to Image) - No description + % + % Output Arguments: + % - imageReferences (types.core.ImageReferences) - A ImageReferences object + obj = obj@types.core.NWBData(varargin{:}); diff --git a/+types/+core/ImageSegmentation.m b/code/+types/+core/ImageSegmentation.m similarity index 55% rename from +types/+core/ImageSegmentation.m rename to code/+types/+core/ImageSegmentation.m index 75d2c312..e4873310 100644 --- a/+types/+core/ImageSegmentation.m +++ b/code/+types/+core/ImageSegmentation.m @@ -1,5 +1,8 @@ classdef ImageSegmentation < types.core.NWBDataInterface & types.untyped.GroupClass -% IMAGESEGMENTATION Stores pixels in an image that represent different regions of interest (ROIs) or masks. All segmentation for a given imaging plane is stored together, with storage for multiple imaging planes (masks) supported. Each ROI is stored in its own subgroup, with the ROI group containing both a 2D mask and a list of pixels that make up this mask. Segments can also be used for masking neuropil. If segmentation is allowed to change with time, a new imaging plane (or module) is required and ROI names should remain consistent between them. +% IMAGESEGMENTATION - Stores pixels in an image that represent different regions of interest (ROIs) or masks. All segmentation for a given imaging plane is stored together, with storage for multiple imaging planes (masks) supported. Each ROI is stored in its own subgroup, with the ROI group containing both a 2D mask and a list of pixels that make up this mask. Segments can also be used for masking neuropil. If segmentation is allowed to change with time, a new imaging plane (or module) is required and ROI names should remain consistent between them. +% +% Required Properties: +% planesegmentation % REQUIRED PROPERTIES @@ -9,7 +12,19 @@ methods function obj = ImageSegmentation(varargin) - % IMAGESEGMENTATION Constructor for ImageSegmentation + % IMAGESEGMENTATION - Constructor for ImageSegmentation + % + % Syntax: + % imageSegmentation = types.core.IMAGESEGMENTATION() creates a ImageSegmentation object with unset property values. + % + % imageSegmentation = types.core.IMAGESEGMENTATION(Name, Value) creates a ImageSegmentation object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - planesegmentation (PlaneSegmentation) - Results from image segmentation of a specific imaging plane. + % + % Output Arguments: + % - imageSegmentation (types.core.ImageSegmentation) - A ImageSegmentation object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.planesegmentation, ivarargin] = types.util.parseConstrained(obj,'planesegmentation', 'types.core.PlaneSegmentation', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/ImageSeries.m b/code/+types/+core/ImageSeries.m similarity index 57% rename from +types/+core/ImageSeries.m rename to code/+types/+core/ImageSeries.m index 2d01e1c3..afb0d73a 100644 --- a/+types/+core/ImageSeries.m +++ b/code/+types/+core/ImageSeries.m @@ -1,5 +1,8 @@ classdef ImageSeries < types.core.TimeSeries & types.untyped.GroupClass -% IMAGESERIES General image data that is common between acquisition and stimulus time series. Sometimes the image data is stored in the file in a raw format while other times it will be stored as a series of external image files in the host file system. The data field will either be binary data, if the data is stored in the NWB file, or empty, if the data is stored in an external image stack. [frame][x][y] or [frame][x][y][z]. +% IMAGESERIES - General image data that is common between acquisition and stimulus time series. Sometimes the image data is stored in the file in a raw format while other times it will be stored as a series of external image files in the host file system. The data field will either be binary data, if the data is stored in the NWB file, or empty, if the data is stored in an external image stack. [frame][x][y] or [frame][x][y][z]. +% +% Required Properties: +% data % OPTIONAL PROPERTIES @@ -13,7 +16,53 @@ methods function obj = ImageSeries(varargin) - % IMAGESERIES Constructor for ImageSeries + % IMAGESERIES - Constructor for ImageSeries + % + % Syntax: + % imageSeries = types.core.IMAGESERIES() creates a ImageSeries object with unset property values. + % + % imageSeries = types.core.IMAGESERIES(Name, Value) creates a ImageSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (numeric) - Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - data_unit (char) - Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + % + % - description (char) - Description of the time series. + % + % - device (Device) - Link to the Device object that was used to capture these images. + % + % - dimension (int32) - Number of pixels on x, y, (and z) axes. + % + % - external_file (char) - Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. + % + % - external_file_starting_frame (int32) - Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. + % + % - format (char) - Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - imageSeries (types.core.ImageSeries) - A ImageSeries object + obj = obj@types.core.TimeSeries(varargin{:}); diff --git a/+types/+core/Images.m b/code/+types/+core/Images.m similarity index 72% rename from +types/+core/Images.m rename to code/+types/+core/Images.m index b07572ad..6d88c6e5 100644 --- a/+types/+core/Images.m +++ b/code/+types/+core/Images.m @@ -1,5 +1,8 @@ classdef Images < types.core.NWBDataInterface & types.untyped.GroupClass -% IMAGES A collection of images with an optional way to specify the order of the images using the "order_of_images" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries. +% IMAGES - A collection of images with an optional way to specify the order of the images using the "order_of_images" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries. +% +% Required Properties: +% image % REQUIRED PROPERTIES @@ -14,7 +17,23 @@ methods function obj = Images(varargin) - % IMAGES Constructor for Images + % IMAGES - Constructor for Images + % + % Syntax: + % images = types.core.IMAGES() creates a Images object with unset property values. + % + % images = types.core.IMAGES(Name, Value) creates a Images object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - description (char) - Description of this collection of images. + % + % - image (Image) - Images stored in this collection. + % + % - order_of_images (ImageReferences) - Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images. + % + % Output Arguments: + % - images (types.core.Images) - A Images object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.image, ivarargin] = types.util.parseConstrained(obj,'image', 'types.core.Image', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/ImagingPlane.m b/code/+types/+core/ImagingPlane.m similarity index 82% rename from +types/+core/ImagingPlane.m rename to code/+types/+core/ImagingPlane.m index e60f420d..d29715be 100644 --- a/+types/+core/ImagingPlane.m +++ b/code/+types/+core/ImagingPlane.m @@ -1,5 +1,8 @@ classdef ImagingPlane < types.core.NWBContainer & types.untyped.GroupClass -% IMAGINGPLANE An imaging plane and its metadata. +% IMAGINGPLANE - An imaging plane and its metadata. +% +% Required Properties: +% excitation_lambda, indicator, location, opticalchannel % REQUIRED PROPERTIES @@ -26,7 +29,47 @@ methods function obj = ImagingPlane(varargin) - % IMAGINGPLANE Constructor for ImagingPlane + % IMAGINGPLANE - Constructor for ImagingPlane + % + % Syntax: + % imagingPlane = types.core.IMAGINGPLANE() creates a ImagingPlane object with unset property values. + % + % imagingPlane = types.core.IMAGINGPLANE(Name, Value) creates a ImagingPlane object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - description (char) - Description of the imaging plane. + % + % - device (Device) - Link to the Device object that was used to record from this electrode. + % + % - excitation_lambda (single) - Excitation wavelength, in nm. + % + % - grid_spacing (single) - Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid. + % + % - grid_spacing_unit (char) - Measurement units for grid_spacing. The default value is 'meters'. + % + % - imaging_rate (single) - Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead. + % + % - indicator (char) - Calcium indicator. + % + % - location (char) - Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. + % + % - manifold (single) - DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing. + % + % - manifold_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000. + % + % - manifold_unit (char) - Base unit of measurement for working with the data. The default value is 'meters'. + % + % - opticalchannel (OpticalChannel) - An optical channel used to record from an imaging plane. + % + % - origin_coords (single) - Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma). + % + % - origin_coords_unit (char) - Measurement units for origin_coords. The default value is 'meters'. + % + % - reference_frame (char) - Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral)." + % + % Output Arguments: + % - imagingPlane (types.core.ImagingPlane) - A ImagingPlane object + varargin = [{'grid_spacing_unit' 'meters' 'manifold_conversion' types.util.correctType(1, 'single') 'manifold_unit' 'meters' 'origin_coords_unit' 'meters'} varargin]; obj = obj@types.core.NWBContainer(varargin{:}); [obj.opticalchannel, ivarargin] = types.util.parseConstrained(obj,'opticalchannel', 'types.core.OpticalChannel', varargin{:}); diff --git a/+types/+core/ImagingRetinotopy.m b/code/+types/+core/ImagingRetinotopy.m similarity index 89% rename from +types/+core/ImagingRetinotopy.m rename to code/+types/+core/ImagingRetinotopy.m index 24dbf26d..68bddb5d 100644 --- a/+types/+core/ImagingRetinotopy.m +++ b/code/+types/+core/ImagingRetinotopy.m @@ -1,5 +1,8 @@ classdef ImagingRetinotopy < types.core.NWBDataInterface & types.untyped.GroupClass -% IMAGINGRETINOTOPY DEPRECATED. Intrinsic signal optical imaging or widefield imaging for measuring retinotopy. Stores orthogonal maps (e.g., altitude/azimuth; radius/theta) of responses to specific stimuli and a combined polarity map from which to identify visual areas. This group does not store the raw responses imaged during retinotopic mapping or the stimuli presented, but rather the resulting phase and power maps after applying a Fourier transform on the averaged responses. Note: for data consistency, all images and arrays are stored in the format [row][column] and [row, col], which equates to [y][x]. Field of view and dimension arrays may appear backward (i.e., y before x). +% IMAGINGRETINOTOPY - DEPRECATED. Intrinsic signal optical imaging or widefield imaging for measuring retinotopy. Stores orthogonal maps (e.g., altitude/azimuth; radius/theta) of responses to specific stimuli and a combined polarity map from which to identify visual areas. This group does not store the raw responses imaged during retinotopic mapping or the stimuli presented, but rather the resulting phase and power maps after applying a Fourier transform on the averaged responses. Note: for data consistency, all images and arrays are stored in the format [row][column] and [row, col], which equates to [y][x]. Field of view and dimension arrays may appear backward (i.e., y before x). +% +% Required Properties: +% axis_1_phase_map, axis_2_phase_map, axis_descriptions, vasculature_image % REQUIRED PROPERTIES @@ -42,7 +45,79 @@ methods function obj = ImagingRetinotopy(varargin) - % IMAGINGRETINOTOPY Constructor for ImagingRetinotopy + % IMAGINGRETINOTOPY - Constructor for ImagingRetinotopy + % + % Syntax: + % imagingRetinotopy = types.core.IMAGINGRETINOTOPY() creates a ImagingRetinotopy object with unset property values. + % + % imagingRetinotopy = types.core.IMAGINGRETINOTOPY(Name, Value) creates a ImagingRetinotopy object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - axis_1_phase_map (single) - Phase response to stimulus on the first measured axis. + % + % - axis_1_phase_map_dimension (int32) - Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + % + % - axis_1_phase_map_field_of_view (single) - Size of viewing area, in meters. + % + % - axis_1_phase_map_unit (char) - Unit that axis data is stored in (e.g., degrees). + % + % - axis_1_power_map (single) - Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. + % + % - axis_1_power_map_dimension (int32) - Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + % + % - axis_1_power_map_field_of_view (single) - Size of viewing area, in meters. + % + % - axis_1_power_map_unit (char) - Unit that axis data is stored in (e.g., degrees). + % + % - axis_2_phase_map (single) - Phase response to stimulus on the second measured axis. + % + % - axis_2_phase_map_dimension (int32) - Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + % + % - axis_2_phase_map_field_of_view (single) - Size of viewing area, in meters. + % + % - axis_2_phase_map_unit (char) - Unit that axis data is stored in (e.g., degrees). + % + % - axis_2_power_map (single) - Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power. + % + % - axis_2_power_map_dimension (int32) - Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + % + % - axis_2_power_map_field_of_view (single) - Size of viewing area, in meters. + % + % - axis_2_power_map_unit (char) - Unit that axis data is stored in (e.g., degrees). + % + % - axis_descriptions (char) - Two-element array describing the contents of the two response axis fields. Description should be something like ['altitude', 'azimuth'] or '['radius', 'theta']. + % + % - focal_depth_image (uint16) - Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns]. + % + % - focal_depth_image_bits_per_pixel (int32) - Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value. + % + % - focal_depth_image_dimension (int32) - Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + % + % - focal_depth_image_field_of_view (single) - Size of viewing area, in meters. + % + % - focal_depth_image_focal_depth (single) - Focal depth offset, in meters. + % + % - focal_depth_image_format (char) - Format of image. Right now only 'raw' is supported. + % + % - sign_map (single) - Sine of the angle between the direction of the gradient in axis_1 and axis_2. + % + % - sign_map_dimension (int32) - Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + % + % - sign_map_field_of_view (single) - Size of viewing area, in meters. + % + % - vasculature_image (uint16) - Gray-scale anatomical image of cortical surface. Array structure: [rows][columns] + % + % - vasculature_image_bits_per_pixel (int32) - Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value + % + % - vasculature_image_dimension (int32) - Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. + % + % - vasculature_image_field_of_view (single) - Size of viewing area, in meters. + % + % - vasculature_image_format (char) - Format of image. Right now only 'raw' is supported. + % + % Output Arguments: + % - imagingRetinotopy (types.core.ImagingRetinotopy) - A ImagingRetinotopy object + obj = obj@types.core.NWBDataInterface(varargin{:}); diff --git a/+types/+core/IndexSeries.m b/code/+types/+core/IndexSeries.m similarity index 59% rename from +types/+core/IndexSeries.m rename to code/+types/+core/IndexSeries.m index 30e53cf6..6a7da599 100644 --- a/+types/+core/IndexSeries.m +++ b/code/+types/+core/IndexSeries.m @@ -1,5 +1,8 @@ classdef IndexSeries < types.core.TimeSeries & types.untyped.GroupClass -% INDEXSERIES Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed. +% INDEXSERIES - Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed. +% +% Required Properties: +% data % OPTIONAL PROPERTIES @@ -10,7 +13,45 @@ methods function obj = IndexSeries(varargin) - % INDEXSERIES Constructor for IndexSeries + % INDEXSERIES - Constructor for IndexSeries + % + % Syntax: + % indexSeries = types.core.INDEXSERIES() creates a IndexSeries object with unset property values. + % + % indexSeries = types.core.INDEXSERIES(Name, Value) creates a IndexSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (uint32) - Index of the image (using zero-indexing) in the linked Images object. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - This field is unused by IndexSeries. + % + % - data_offset (single) - This field is unused by IndexSeries. + % + % - data_resolution (single) - This field is unused by IndexSeries. + % + % - description (char) - Description of the time series. + % + % - indexed_images (Images) - Link to Images object containing an ordered set of images that are indexed. The Images object must contain a 'ordered_images' dataset specifying the order of the images in the Images type. + % + % - indexed_timeseries (ImageSeries) - Link to ImageSeries object containing images that are indexed. Use of this link is discouraged and will be deprecated. Link to an Images type instead. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - indexSeries (types.core.IndexSeries) - A IndexSeries object + varargin = [{'data_unit' 'N/A'} varargin]; obj = obj@types.core.TimeSeries(varargin{:}); diff --git a/code/+types/+core/IntervalSeries.m b/code/+types/+core/IntervalSeries.m new file mode 100644 index 00000000..93cea135 --- /dev/null +++ b/code/+types/+core/IntervalSeries.m @@ -0,0 +1,109 @@ +classdef IntervalSeries < types.core.TimeSeries & types.untyped.GroupClass +% INTERVALSERIES - Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way. +% +% Required Properties: +% data + + + +methods + function obj = IntervalSeries(varargin) + % INTERVALSERIES - Constructor for IntervalSeries + % + % Syntax: + % intervalSeries = types.core.INTERVALSERIES() creates a IntervalSeries object with unset property values. + % + % intervalSeries = types.core.INTERVALSERIES(Name, Value) creates a IntervalSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (int8) - Use values >0 if interval started, <0 if interval ended. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - description (char) - Description of the time series. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - intervalSeries (types.core.IntervalSeries) - A IntervalSeries object + + varargin = [{'data_resolution' types.util.correctType(-1, 'single') 'data_unit' 'n/a'} varargin]; + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_resolution',[]); + addParameter(p, 'data_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_resolution = p.Results.data_resolution; + obj.data_unit = p.Results.data_unit; + if strcmp(class(obj), 'types.core.IntervalSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'int8', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_resolution(obj, val) + if isequal(val, -1) + val = -1; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_resolution'' property of class ''IntervalSeries'' because it is read-only.') + end + end + function val = validate_data_unit(obj, val) + if isequal(val, 'n/a') + val = 'n/a'; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''IntervalSeries'' because it is read-only.') + end + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/IntracellularElectrode.m b/code/+types/+core/IntracellularElectrode.m similarity index 86% rename from +types/+core/IntracellularElectrode.m rename to code/+types/+core/IntracellularElectrode.m index a0426703..ac04e265 100644 --- a/+types/+core/IntracellularElectrode.m +++ b/code/+types/+core/IntracellularElectrode.m @@ -1,5 +1,8 @@ classdef IntracellularElectrode < types.core.NWBContainer & types.untyped.GroupClass -% INTRACELLULARELECTRODE An intracellular electrode and its metadata. +% INTRACELLULARELECTRODE - An intracellular electrode and its metadata. +% +% Required Properties: +% description % REQUIRED PROPERTIES @@ -20,7 +23,35 @@ methods function obj = IntracellularElectrode(varargin) - % INTRACELLULARELECTRODE Constructor for IntracellularElectrode + % INTRACELLULARELECTRODE - Constructor for IntracellularElectrode + % + % Syntax: + % intracellularElectrode = types.core.INTRACELLULARELECTRODE() creates a IntracellularElectrode object with unset property values. + % + % intracellularElectrode = types.core.INTRACELLULARELECTRODE(Name, Value) creates a IntracellularElectrode object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - cell_id (char) - unique ID of the cell + % + % - description (char) - Description of electrode (e.g., whole-cell, sharp, etc.). + % + % - device (Device) - Device that was used to record from this electrode. + % + % - filtering (char) - Electrode specific filtering. + % + % - initial_access_resistance (char) - Initial access resistance. + % + % - location (char) - Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. + % + % - resistance (char) - Electrode resistance, in ohms. + % + % - seal (char) - Information about seal used for recording. + % + % - slice (char) - Information about slice used for recording. + % + % Output Arguments: + % - intracellularElectrode (types.core.IntracellularElectrode) - A IntracellularElectrode object + obj = obj@types.core.NWBContainer(varargin{:}); diff --git a/+types/+core/IntracellularElectrodesTable.m b/code/+types/+core/IntracellularElectrodesTable.m similarity index 63% rename from +types/+core/IntracellularElectrodesTable.m rename to code/+types/+core/IntracellularElectrodesTable.m index ce75faef..037f7a0f 100644 --- a/+types/+core/IntracellularElectrodesTable.m +++ b/code/+types/+core/IntracellularElectrodesTable.m @@ -1,5 +1,8 @@ classdef IntracellularElectrodesTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass -% INTRACELLULARELECTRODESTABLE Table for storing intracellular electrode related metadata. +% INTRACELLULARELECTRODESTABLE - Table for storing intracellular electrode related metadata. +% +% Required Properties: +% electrode, id % REQUIRED PROPERTIES @@ -9,7 +12,25 @@ methods function obj = IntracellularElectrodesTable(varargin) - % INTRACELLULARELECTRODESTABLE Constructor for IntracellularElectrodesTable + % INTRACELLULARELECTRODESTABLE - Constructor for IntracellularElectrodesTable + % + % Syntax: + % intracellularElectrodesTable = types.core.INTRACELLULARELECTRODESTABLE() creates a IntracellularElectrodesTable object with unset property values. + % + % intracellularElectrodesTable = types.core.INTRACELLULARELECTRODESTABLE(Name, Value) creates a IntracellularElectrodesTable object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - electrode (VectorData) - Column for storing the reference to the intracellular electrode. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % Output Arguments: + % - intracellularElectrodesTable (types.core.IntracellularElectrodesTable) - A IntracellularElectrodesTable object + varargin = [{'description' 'Table for storing intracellular electrode related metadata.'} varargin]; obj = obj@types.hdmf_common.DynamicTable(varargin{:}); diff --git a/+types/+core/IntracellularRecordingsTable.m b/code/+types/+core/IntracellularRecordingsTable.m similarity index 53% rename from +types/+core/IntracellularRecordingsTable.m rename to code/+types/+core/IntracellularRecordingsTable.m index 1fc8d5c9..fd36d444 100644 --- a/+types/+core/IntracellularRecordingsTable.m +++ b/code/+types/+core/IntracellularRecordingsTable.m @@ -1,5 +1,8 @@ classdef IntracellularRecordingsTable < types.hdmf_common.AlignedDynamicTable & types.untyped.GroupClass -% INTRACELLULARRECORDINGSTABLE A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response is recorded as part of an experiment. In this case, both the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used. +% INTRACELLULARRECORDINGSTABLE - A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response is recorded as part of an experiment. In this case, both the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used. +% +% Required Properties: +% electrodes, id, responses, stimuli % REQUIRED PROPERTIES @@ -11,7 +14,33 @@ methods function obj = IntracellularRecordingsTable(varargin) - % INTRACELLULARRECORDINGSTABLE Constructor for IntracellularRecordingsTable + % INTRACELLULARRECORDINGSTABLE - Constructor for IntracellularRecordingsTable + % + % Syntax: + % intracellularRecordingsTable = types.core.INTRACELLULARRECORDINGSTABLE() creates a IntracellularRecordingsTable object with unset property values. + % + % intracellularRecordingsTable = types.core.INTRACELLULARRECORDINGSTABLE(Name, Value) creates a IntracellularRecordingsTable object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - categories (char) - The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group. + % + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - dynamictable (DynamicTable) - A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable. + % + % - electrodes (IntracellularElectrodesTable) - Table for storing intracellular electrode related metadata. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - responses (IntracellularResponsesTable) - Table for storing intracellular response related metadata. + % + % - stimuli (IntracellularStimuliTable) - Table for storing intracellular stimulus related metadata. + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % Output Arguments: + % - intracellularRecordingsTable (types.core.IntracellularRecordingsTable) - A IntracellularRecordingsTable object + varargin = [{'description' 'A table to group together a stimulus and response from a single electrode and a single simultaneous recording and for storing metadata about the intracellular recording.'} varargin]; obj = obj@types.hdmf_common.AlignedDynamicTable(varargin{:}); diff --git a/+types/+core/IntracellularResponsesTable.m b/code/+types/+core/IntracellularResponsesTable.m similarity index 63% rename from +types/+core/IntracellularResponsesTable.m rename to code/+types/+core/IntracellularResponsesTable.m index 1b4979b7..b85e938f 100644 --- a/+types/+core/IntracellularResponsesTable.m +++ b/code/+types/+core/IntracellularResponsesTable.m @@ -1,5 +1,8 @@ classdef IntracellularResponsesTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass -% INTRACELLULARRESPONSESTABLE Table for storing intracellular response related metadata. +% INTRACELLULARRESPONSESTABLE - Table for storing intracellular response related metadata. +% +% Required Properties: +% id, response % REQUIRED PROPERTIES @@ -9,7 +12,25 @@ methods function obj = IntracellularResponsesTable(varargin) - % INTRACELLULARRESPONSESTABLE Constructor for IntracellularResponsesTable + % INTRACELLULARRESPONSESTABLE - Constructor for IntracellularResponsesTable + % + % Syntax: + % intracellularResponsesTable = types.core.INTRACELLULARRESPONSESTABLE() creates a IntracellularResponsesTable object with unset property values. + % + % intracellularResponsesTable = types.core.INTRACELLULARRESPONSESTABLE(Name, Value) creates a IntracellularResponsesTable object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - response (TimeSeriesReferenceVectorData) - Column storing the reference to the recorded response for the recording (rows) + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % Output Arguments: + % - intracellularResponsesTable (types.core.IntracellularResponsesTable) - A IntracellularResponsesTable object + varargin = [{'description' 'Table for storing intracellular response related metadata.'} varargin]; obj = obj@types.hdmf_common.DynamicTable(varargin{:}); diff --git a/+types/+core/IntracellularStimuliTable.m b/code/+types/+core/IntracellularStimuliTable.m similarity index 67% rename from +types/+core/IntracellularStimuliTable.m rename to code/+types/+core/IntracellularStimuliTable.m index b0cf39e9..ab1d83e1 100644 --- a/+types/+core/IntracellularStimuliTable.m +++ b/code/+types/+core/IntracellularStimuliTable.m @@ -1,5 +1,8 @@ classdef IntracellularStimuliTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass -% INTRACELLULARSTIMULITABLE Table for storing intracellular stimulus related metadata. +% INTRACELLULARSTIMULITABLE - Table for storing intracellular stimulus related metadata. +% +% Required Properties: +% id, stimulus % REQUIRED PROPERTIES @@ -13,7 +16,27 @@ methods function obj = IntracellularStimuliTable(varargin) - % INTRACELLULARSTIMULITABLE Constructor for IntracellularStimuliTable + % INTRACELLULARSTIMULITABLE - Constructor for IntracellularStimuliTable + % + % Syntax: + % intracellularStimuliTable = types.core.INTRACELLULARSTIMULITABLE() creates a IntracellularStimuliTable object with unset property values. + % + % intracellularStimuliTable = types.core.INTRACELLULARSTIMULITABLE(Name, Value) creates a IntracellularStimuliTable object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - stimulus (TimeSeriesReferenceVectorData) - Column storing the reference to the recorded stimulus for the recording (rows). + % + % - stimulus_template (TimeSeriesReferenceVectorData) - Column storing the reference to the stimulus template for the recording (rows). + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % Output Arguments: + % - intracellularStimuliTable (types.core.IntracellularStimuliTable) - A IntracellularStimuliTable object + varargin = [{'description' 'Table for storing intracellular stimulus related metadata.'} varargin]; obj = obj@types.hdmf_common.DynamicTable(varargin{:}); diff --git a/+types/+core/LFP.m b/code/+types/+core/LFP.m similarity index 65% rename from +types/+core/LFP.m rename to code/+types/+core/LFP.m index 56eaf392..e0e3c19b 100644 --- a/+types/+core/LFP.m +++ b/code/+types/+core/LFP.m @@ -1,5 +1,8 @@ classdef LFP < types.core.NWBDataInterface & types.untyped.GroupClass -% LFP LFP data from one or more channels. The electrode map in each published ElectricalSeries will identify which channels are providing LFP data. Filter properties should be noted in the ElectricalSeries 'filtering' attribute. +% LFP - LFP data from one or more channels. The electrode map in each published ElectricalSeries will identify which channels are providing LFP data. Filter properties should be noted in the ElectricalSeries 'filtering' attribute. +% +% Required Properties: +% electricalseries % REQUIRED PROPERTIES @@ -9,7 +12,19 @@ methods function obj = LFP(varargin) - % LFP Constructor for LFP + % LFP - Constructor for LFP + % + % Syntax: + % lFP = types.core.LFP() creates a LFP object with unset property values. + % + % lFP = types.core.LFP(Name, Value) creates a LFP object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - electricalseries (ElectricalSeries) - ElectricalSeries object(s) containing LFP data for one or more channels. + % + % Output Arguments: + % - lFP (types.core.LFP) - A LFP object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.electricalseries, ivarargin] = types.util.parseConstrained(obj,'electricalseries', 'types.core.ElectricalSeries', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/LabMetaData.m b/code/+types/+core/LabMetaData.m similarity index 64% rename from +types/+core/LabMetaData.m rename to code/+types/+core/LabMetaData.m index 80933fc3..6f9cb0ae 100644 --- a/+types/+core/LabMetaData.m +++ b/code/+types/+core/LabMetaData.m @@ -1,11 +1,21 @@ classdef LabMetaData < types.core.NWBContainer & types.untyped.GroupClass -% LABMETADATA Lab-specific meta-data. +% LABMETADATA - Lab-specific meta-data. +% +% Required Properties: +% None methods function obj = LabMetaData(varargin) - % LABMETADATA Constructor for LabMetaData + % LABMETADATA - Constructor for LabMetaData + % + % Syntax: + % labMetaData = types.core.LABMETADATA() creates a LabMetaData object with unset property values. + % + % Output Arguments: + % - labMetaData (types.core.LabMetaData) - A LabMetaData object + obj = obj@types.core.NWBContainer(varargin{:}); if strcmp(class(obj), 'types.core.LabMetaData') cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); diff --git a/+types/+core/MotionCorrection.m b/code/+types/+core/MotionCorrection.m similarity index 62% rename from +types/+core/MotionCorrection.m rename to code/+types/+core/MotionCorrection.m index 75b428da..9a67b4b7 100644 --- a/+types/+core/MotionCorrection.m +++ b/code/+types/+core/MotionCorrection.m @@ -1,5 +1,8 @@ classdef MotionCorrection < types.core.NWBDataInterface & types.untyped.GroupClass -% MOTIONCORRECTION An image stack where all frames are shifted (registered) to a common coordinate system, to account for movement and drift between frames. Note: each frame at each point in time is assumed to be 2-D (has only x & y dimensions). +% MOTIONCORRECTION - An image stack where all frames are shifted (registered) to a common coordinate system, to account for movement and drift between frames. Note: each frame at each point in time is assumed to be 2-D (has only x & y dimensions). +% +% Required Properties: +% correctedimagestack % REQUIRED PROPERTIES @@ -9,7 +12,19 @@ methods function obj = MotionCorrection(varargin) - % MOTIONCORRECTION Constructor for MotionCorrection + % MOTIONCORRECTION - Constructor for MotionCorrection + % + % Syntax: + % motionCorrection = types.core.MOTIONCORRECTION() creates a MotionCorrection object with unset property values. + % + % motionCorrection = types.core.MOTIONCORRECTION(Name, Value) creates a MotionCorrection object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - correctedimagestack (CorrectedImageStack) - Results from motion correction of an image stack. + % + % Output Arguments: + % - motionCorrection (types.core.MotionCorrection) - A MotionCorrection object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.correctedimagestack, ivarargin] = types.util.parseConstrained(obj,'correctedimagestack', 'types.core.CorrectedImageStack', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/NWBContainer.m b/code/+types/+core/NWBContainer.m similarity index 58% rename from +types/+core/NWBContainer.m rename to code/+types/+core/NWBContainer.m index 7d53229c..02b6d2b1 100644 --- a/+types/+core/NWBContainer.m +++ b/code/+types/+core/NWBContainer.m @@ -1,11 +1,21 @@ classdef NWBContainer < types.hdmf_common.Container & types.untyped.GroupClass -% NWBCONTAINER An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers. +% NWBCONTAINER - An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers. +% +% Required Properties: +% None methods function obj = NWBContainer(varargin) - % NWBCONTAINER Constructor for NWBContainer + % NWBCONTAINER - Constructor for NWBContainer + % + % Syntax: + % nWBContainer = types.core.NWBCONTAINER() creates a NWBContainer object with unset property values. + % + % Output Arguments: + % - nWBContainer (types.core.NWBContainer) - A NWBContainer object + obj = obj@types.hdmf_common.Container(varargin{:}); if strcmp(class(obj), 'types.core.NWBContainer') cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); diff --git a/+types/+core/NWBData.m b/code/+types/+core/NWBData.m similarity index 61% rename from +types/+core/NWBData.m rename to code/+types/+core/NWBData.m index 8a1ef897..8a01aa52 100644 --- a/+types/+core/NWBData.m +++ b/code/+types/+core/NWBData.m @@ -1,11 +1,26 @@ classdef NWBData < types.hdmf_common.Data & types.untyped.DatasetClass -% NWBDATA An abstract data type for a dataset. +% NWBDATA - An abstract data type for a dataset. +% +% Required Properties: +% data methods function obj = NWBData(varargin) - % NWBDATA Constructor for NWBData + % NWBDATA - Constructor for NWBData + % + % Syntax: + % nWBData = types.core.NWBDATA() creates a NWBData object with unset property values. + % + % nWBData = types.core.NWBDATA(Name, Value) creates a NWBData object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (any) - No description + % + % Output Arguments: + % - nWBData (types.core.NWBData) - A NWBData object + obj = obj@types.hdmf_common.Data(varargin{:}); diff --git a/+types/+core/NWBDataInterface.m b/code/+types/+core/NWBDataInterface.m similarity index 58% rename from +types/+core/NWBDataInterface.m rename to code/+types/+core/NWBDataInterface.m index 182ffe34..1012dc86 100644 --- a/+types/+core/NWBDataInterface.m +++ b/code/+types/+core/NWBDataInterface.m @@ -1,11 +1,21 @@ classdef NWBDataInterface < types.core.NWBContainer & types.untyped.GroupClass -% NWBDATAINTERFACE An abstract data type for a generic container storing collections of data, as opposed to metadata. +% NWBDATAINTERFACE - An abstract data type for a generic container storing collections of data, as opposed to metadata. +% +% Required Properties: +% None methods function obj = NWBDataInterface(varargin) - % NWBDATAINTERFACE Constructor for NWBDataInterface + % NWBDATAINTERFACE - Constructor for NWBDataInterface + % + % Syntax: + % nWBDataInterface = types.core.NWBDATAINTERFACE() creates a NWBDataInterface object with unset property values. + % + % Output Arguments: + % - nWBDataInterface (types.core.NWBDataInterface) - A NWBDataInterface object + obj = obj@types.core.NWBContainer(varargin{:}); if strcmp(class(obj), 'types.core.NWBDataInterface') cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); diff --git a/+types/+core/NWBFile.m b/code/+types/+core/NWBFile.m similarity index 85% rename from +types/+core/NWBFile.m rename to code/+types/+core/NWBFile.m index 2b05d58e..5896f6e8 100644 --- a/+types/+core/NWBFile.m +++ b/code/+types/+core/NWBFile.m @@ -1,5 +1,8 @@ classdef NWBFile < types.core.NWBContainer & types.untyped.GroupClass -% NWBFILE An NWB file storing cellular-based neurophysiology data from a single experimental session. +% NWBFILE - An NWB file storing cellular-based neurophysiology data from a single experimental session. +% +% Required Properties: +% file_create_date, identifier, session_description, session_start_time, timestamps_reference_time % READONLY PROPERTIES @@ -64,7 +67,115 @@ methods function obj = NWBFile(varargin) - % NWBFILE Constructor for NWBFile + % NWBFILE - Constructor for NWBFile + % + % Syntax: + % nWBFile = types.core.NWBFILE() creates a NWBFile object with unset property values. + % + % nWBFile = types.core.NWBFILE(Name, Value) creates a NWBFile object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - acquisition (DynamicTable|NWBDataInterface) - Tabular data that is relevant to acquisition + % + % - analysis (DynamicTable|NWBContainer) - Tabular data that is relevant to data stored in analysis + % + % - file_create_date (datetime) - A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array. + % + % - general (LabMetaData) - Place-holder than can be extended so that lab-specific meta-data can be placed in /general. + % + % - general_data_collection (char) - Notes about data collection and analysis. + % + % - general_devices (Device) - Data acquisition devices. + % + % - general_experiment_description (char) - General description of the experiment. + % + % - general_experimenter (char) - Name of person(s) who performed the experiment. Can also specify roles of different people involved. + % + % - general_extracellular_ephys (ElectrodeGroup) - Physical group of electrodes. + % + % - general_extracellular_ephys_electrodes (DynamicTable) - A table of all electrodes (i.e. channels) used for recording. + % + % - general_institution (char) - Institution(s) where experiment was performed. + % + % - general_intracellular_ephys (IntracellularElectrode) - An intracellular electrode. + % + % - general_intracellular_ephys_experimental_conditions (ExperimentalConditionsTable) - A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions. + % + % - general_intracellular_ephys_filtering (char) - [DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries. + % + % - general_intracellular_ephys_intracellular_recordings (IntracellularRecordingsTable) - A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response are recorded as as part of an experiment. In this case both, the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used. + % + % - general_intracellular_ephys_repetitions (RepetitionsTable) - A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence. + % + % - general_intracellular_ephys_sequential_recordings (SequentialRecordingsTable) - A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where the a sequence of stimuli of the same type with varying parameters have been presented in a sequence. + % + % - general_intracellular_ephys_simultaneous_recordings (SimultaneousRecordingsTable) - A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes + % + % - general_intracellular_ephys_sweep_table (SweepTable) - [DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata. + % + % - general_keywords (char) - Terms to search over. + % + % - general_lab (char) - Laboratory where experiment was performed. + % + % - general_notes (char) - Notes about the experiment. + % + % - general_optogenetics (OptogeneticStimulusSite) - An optogenetic stimulation site. + % + % - general_optophysiology (ImagingPlane) - An imaging plane. + % + % - general_pharmacology (char) - Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc. + % + % - general_protocol (char) - Experimental protocol, if applicable. e.g., include IACUC protocol number. + % + % - general_related_publications (char) - Publication information. PMID, DOI, URL, etc. + % + % - general_session_id (char) - Lab-specific ID for the session. + % + % - general_slices (char) - Description of slices, including information about preparation thickness, orientation, temperature, and bath solution. + % + % - general_source_script (char) - Script file or link to public source code used to create this NWB file. + % + % - general_source_script_file_name (char) - Name of script file. + % + % - general_stimulus (char) - Notes about stimuli, such as how and where they were presented. + % + % - general_subject (Subject) - Information about the animal or person from which the data was measured. + % + % - general_surgery (char) - Narrative description about surgery/surgeries, including date(s) and who performed surgery. + % + % - general_virus (char) - Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc. + % + % - general_was_generated_by (char) - Name and version of software package(s) used to generate data contained in this NWB File. For each software package or library, include the name of the software as the first value and the version as the second value. + % + % - identifier (char) - A unique text identifier for the file. For example, concatenated lab name, file creation date/time and experimentalist, or a hash of these and/or other values. The goal is that the string should be unique to all other files. + % + % - intervals (TimeIntervals) - Optional additional table(s) for describing other experimental time intervals. + % + % - intervals_epochs (TimeIntervals) - Divisions in time marking experimental stages or sub-divisions of a single recording session. + % + % - intervals_invalid_times (TimeIntervals) - Time intervals that should be removed from analysis. + % + % - intervals_trials (TimeIntervals) - Repeated experimental events that have a logical grouping. + % + % - processing (ProcessingModule) - Intermediate analysis of acquired data. + % + % - scratch (DynamicTable|NWBContainer|ScratchData) - No description + % + % - session_description (char) - A description of the experimental session and data in the file. + % + % - session_start_time (datetime) - Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with no timezone offset. Date accuracy is up to milliseconds. + % + % - stimulus_presentation (DynamicTable|NWBDataInterface|TimeSeries) - DynamicTable objects containing data of presented stimuli. + % + % - stimulus_templates (Images|TimeSeries) - Images objects containing images of presented stimuli. + % + % - timestamps_reference_time (datetime) - Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero). + % + % - units (Units) - Data about sorted spike units. + % + % Output Arguments: + % - nWBFile (types.core.NWBFile) - A NWBFile object + varargin = [{'nwb_version' '2.8.0'} varargin]; obj = obj@types.core.NWBContainer(varargin{:}); diff --git a/+types/+core/OnePhotonSeries.m b/code/+types/+core/OnePhotonSeries.m similarity index 51% rename from +types/+core/OnePhotonSeries.m rename to code/+types/+core/OnePhotonSeries.m index 3d5b28b2..cd0b7f8a 100644 --- a/+types/+core/OnePhotonSeries.m +++ b/code/+types/+core/OnePhotonSeries.m @@ -1,5 +1,8 @@ classdef OnePhotonSeries < types.core.ImageSeries & types.untyped.GroupClass -% ONEPHOTONSERIES Image stack recorded over time from 1-photon microscope. +% ONEPHOTONSERIES - Image stack recorded over time from 1-photon microscope. +% +% Required Properties: +% data % OPTIONAL PROPERTIES @@ -15,7 +18,67 @@ methods function obj = OnePhotonSeries(varargin) - % ONEPHOTONSERIES Constructor for OnePhotonSeries + % ONEPHOTONSERIES - Constructor for OnePhotonSeries + % + % Syntax: + % onePhotonSeries = types.core.ONEPHOTONSERIES() creates a OnePhotonSeries object with unset property values. + % + % onePhotonSeries = types.core.ONEPHOTONSERIES(Name, Value) creates a OnePhotonSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - binning (uint8) - Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc. + % + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (numeric) - Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - data_unit (char) - Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + % + % - description (char) - Description of the time series. + % + % - device (Device) - Link to the Device object that was used to capture these images. + % + % - dimension (int32) - Number of pixels on x, y, (and z) axes. + % + % - exposure_time (single) - Exposure time of the sample; often the inverse of the frequency. + % + % - external_file (char) - Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. + % + % - external_file_starting_frame (int32) - Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. + % + % - format (char) - Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. + % + % - imaging_plane (ImagingPlane) - Link to ImagingPlane object from which this TimeSeries data was generated. + % + % - intensity (single) - Intensity of the excitation in mW/mm^2, if known. + % + % - pmt_gain (single) - Photomultiplier gain. + % + % - power (single) - Power of the excitation in mW, if known. + % + % - scan_line_rate (single) - Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - onePhotonSeries (types.core.OnePhotonSeries) - A OnePhotonSeries object + obj = obj@types.core.ImageSeries(varargin{:}); diff --git a/+types/+core/OpticalChannel.m b/code/+types/+core/OpticalChannel.m similarity index 78% rename from +types/+core/OpticalChannel.m rename to code/+types/+core/OpticalChannel.m index 038b43a0..92496701 100644 --- a/+types/+core/OpticalChannel.m +++ b/code/+types/+core/OpticalChannel.m @@ -1,5 +1,8 @@ classdef OpticalChannel < types.core.NWBContainer & types.untyped.GroupClass -% OPTICALCHANNEL An optical channel used to record from an imaging plane. +% OPTICALCHANNEL - An optical channel used to record from an imaging plane. +% +% Required Properties: +% description, emission_lambda % REQUIRED PROPERTIES @@ -10,7 +13,21 @@ methods function obj = OpticalChannel(varargin) - % OPTICALCHANNEL Constructor for OpticalChannel + % OPTICALCHANNEL - Constructor for OpticalChannel + % + % Syntax: + % opticalChannel = types.core.OPTICALCHANNEL() creates a OpticalChannel object with unset property values. + % + % opticalChannel = types.core.OPTICALCHANNEL(Name, Value) creates a OpticalChannel object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - description (char) - Description or other notes about the channel. + % + % - emission_lambda (single) - Emission wavelength for channel, in nm. + % + % Output Arguments: + % - opticalChannel (types.core.OpticalChannel) - A OpticalChannel object + obj = obj@types.core.NWBContainer(varargin{:}); diff --git a/code/+types/+core/OpticalSeries.m b/code/+types/+core/OpticalSeries.m new file mode 100644 index 00000000..fc441c1b --- /dev/null +++ b/code/+types/+core/OpticalSeries.m @@ -0,0 +1,205 @@ +classdef OpticalSeries < types.core.ImageSeries & types.untyped.GroupClass +% OPTICALSERIES - Image data that is presented or recorded. A stimulus template movie will be stored only as an image. When the image is presented as stimulus, additional data is required, such as field of view (e.g., how much of the visual field the image covers, or how what is the area of the target being imaged). If the OpticalSeries represents acquired imaging data, orientation is also important. +% +% Required Properties: +% data + + +% OPTIONAL PROPERTIES +properties + distance; % (single) Distance from camera/monitor to target/eye. + field_of_view; % (single) Width, height and depth of image, or imaged area, in meters. + orientation; % (char) Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference. +end + +methods + function obj = OpticalSeries(varargin) + % OPTICALSERIES - Constructor for OpticalSeries + % + % Syntax: + % opticalSeries = types.core.OPTICALSERIES() creates a OpticalSeries object with unset property values. + % + % opticalSeries = types.core.OPTICALSERIES(Name, Value) creates a OpticalSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (numeric) - Images presented to subject, either grayscale or RGB + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - data_unit (char) - Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + % + % - description (char) - Description of the time series. + % + % - device (Device) - Link to the Device object that was used to capture these images. + % + % - dimension (int32) - Number of pixels on x, y, (and z) axes. + % + % - distance (single) - Distance from camera/monitor to target/eye. + % + % - external_file (char) - Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. + % + % - external_file_starting_frame (int32) - Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. + % + % - field_of_view (single) - Width, height and depth of image, or imaged area, in meters. + % + % - format (char) - Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. + % + % - orientation (char) - Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - opticalSeries (types.core.OpticalSeries) - A OpticalSeries object + + obj = obj@types.core.ImageSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'distance',[]); + addParameter(p, 'field_of_view',[]); + addParameter(p, 'orientation',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.distance = p.Results.distance; + obj.field_of_view = p.Results.field_of_view; + obj.orientation = p.Results.orientation; + if strcmp(class(obj), 'types.core.OpticalSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.distance(obj, val) + obj.distance = obj.validate_distance(val); + end + function set.field_of_view(obj, val) + obj.field_of_view = obj.validate_field_of_view(val); + end + function set.orientation(obj, val) + obj.orientation = obj.validate_orientation(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[3,Inf,Inf,Inf], [Inf,Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_distance(obj, val) + val = types.util.checkDtype('distance', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_field_of_view(obj, val) + val = types.util.checkDtype('field_of_view', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[3], [2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_orientation(obj, val) + val = types.util.checkDtype('orientation', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.ImageSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.distance) + if startsWith(class(obj.distance), 'types.untyped.') + refs = obj.distance.export(fid, [fullpath '/distance'], refs); + elseif ~isempty(obj.distance) + io.writeDataset(fid, [fullpath '/distance'], obj.distance); + end + end + if ~isempty(obj.field_of_view) + if startsWith(class(obj.field_of_view), 'types.untyped.') + refs = obj.field_of_view.export(fid, [fullpath '/field_of_view'], refs); + elseif ~isempty(obj.field_of_view) + io.writeDataset(fid, [fullpath '/field_of_view'], obj.field_of_view, 'forceArray'); + end + end + if ~isempty(obj.orientation) + if startsWith(class(obj.orientation), 'types.untyped.') + refs = obj.orientation.export(fid, [fullpath '/orientation'], refs); + elseif ~isempty(obj.orientation) + io.writeDataset(fid, [fullpath '/orientation'], obj.orientation); + end + end + end +end + +end \ No newline at end of file diff --git a/code/+types/+core/OptogeneticSeries.m b/code/+types/+core/OptogeneticSeries.m new file mode 100644 index 00000000..07d5e6ea --- /dev/null +++ b/code/+types/+core/OptogeneticSeries.m @@ -0,0 +1,125 @@ +classdef OptogeneticSeries < types.core.TimeSeries & types.untyped.GroupClass +% OPTOGENETICSERIES - An optogenetic stimulus. +% +% Required Properties: +% data + + +% OPTIONAL PROPERTIES +properties + site; % OptogeneticStimulusSite +end + +methods + function obj = OptogeneticSeries(varargin) + % OPTOGENETICSERIES - Constructor for OptogeneticSeries + % + % Syntax: + % optogeneticSeries = types.core.OPTOGENETICSERIES() creates a OptogeneticSeries object with unset property values. + % + % optogeneticSeries = types.core.OPTOGENETICSERIES(Name, Value) creates a OptogeneticSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (numeric) - Applied power for optogenetic stimulus, in watts. Shape can be 1D or 2D. 2D data is meant to be used in an extension of OptogeneticSeries that defines what the second dimension represents. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - description (char) - Description of the time series. + % + % - site (OptogeneticStimulusSite) - Link to OptogeneticStimulusSite object that describes the site to which this stimulus was applied. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - optogeneticSeries (types.core.OptogeneticSeries) - A OptogeneticSeries object + + varargin = [{'data_unit' 'watts'} varargin]; + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'site',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + obj.site = p.Results.site; + if strcmp(class(obj), 'types.core.OptogeneticSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.site(obj, val) + obj.site = obj.validate_site(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf], [Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_unit(obj, val) + if isequal(val, 'watts') + val = 'watts'; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''OptogeneticSeries'' because it is read-only.') + end + end + function val = validate_site(obj, val) + if isa(val, 'types.untyped.SoftLink') + if isprop(val, 'target') + types.util.checkDtype('site', 'types.core.OptogeneticStimulusSite', val.target); + end + else + val = types.util.checkDtype('site', 'types.core.OptogeneticStimulusSite', val); + if ~isempty(val) + val = types.untyped.SoftLink(val); + end + end + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.site.export(fid, [fullpath '/site'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/OptogeneticStimulusSite.m b/code/+types/+core/OptogeneticStimulusSite.m similarity index 80% rename from +types/+core/OptogeneticStimulusSite.m rename to code/+types/+core/OptogeneticStimulusSite.m index 7a2f6c88..6e44c313 100644 --- a/+types/+core/OptogeneticStimulusSite.m +++ b/code/+types/+core/OptogeneticStimulusSite.m @@ -1,5 +1,8 @@ classdef OptogeneticStimulusSite < types.core.NWBContainer & types.untyped.GroupClass -% OPTOGENETICSTIMULUSSITE A site of optogenetic stimulation. +% OPTOGENETICSTIMULUSSITE - A site of optogenetic stimulation. +% +% Required Properties: +% description, excitation_lambda, location % REQUIRED PROPERTIES @@ -15,7 +18,25 @@ methods function obj = OptogeneticStimulusSite(varargin) - % OPTOGENETICSTIMULUSSITE Constructor for OptogeneticStimulusSite + % OPTOGENETICSTIMULUSSITE - Constructor for OptogeneticStimulusSite + % + % Syntax: + % optogeneticStimulusSite = types.core.OPTOGENETICSTIMULUSSITE() creates a OptogeneticStimulusSite object with unset property values. + % + % optogeneticStimulusSite = types.core.OPTOGENETICSTIMULUSSITE(Name, Value) creates a OptogeneticStimulusSite object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - description (char) - Description of stimulation site. + % + % - device (Device) - Device that generated the stimulus. + % + % - excitation_lambda (single) - Excitation wavelength, in nm. + % + % - location (char) - Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible. + % + % Output Arguments: + % - optogeneticStimulusSite (types.core.OptogeneticStimulusSite) - A OptogeneticStimulusSite object + obj = obj@types.core.NWBContainer(varargin{:}); diff --git a/+types/+core/PatchClampSeries.m b/code/+types/+core/PatchClampSeries.m similarity index 55% rename from +types/+core/PatchClampSeries.m rename to code/+types/+core/PatchClampSeries.m index cbe61d51..5f8220e5 100644 --- a/+types/+core/PatchClampSeries.m +++ b/code/+types/+core/PatchClampSeries.m @@ -1,5 +1,8 @@ classdef PatchClampSeries < types.core.TimeSeries & types.untyped.GroupClass -% PATCHCLAMPSERIES An abstract base class for patch-clamp data - stimulus or response, current or voltage. +% PATCHCLAMPSERIES - An abstract base class for patch-clamp data - stimulus or response, current or voltage. +% +% Required Properties: +% data % OPTIONAL PROPERTIES @@ -12,7 +15,51 @@ methods function obj = PatchClampSeries(varargin) - % PATCHCLAMPSERIES Constructor for PatchClampSeries + % PATCHCLAMPSERIES - Constructor for PatchClampSeries + % + % Syntax: + % patchClampSeries = types.core.PATCHCLAMPSERIES() creates a PatchClampSeries object with unset property values. + % + % patchClampSeries = types.core.PATCHCLAMPSERIES(Name, Value) creates a PatchClampSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (numeric) - Recorded voltage or current. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - data_unit (char) - Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + % + % - description (char) - Description of the time series. + % + % - electrode (IntracellularElectrode) - Link to IntracellularElectrode object that describes the electrode that was used to apply or record this data. + % + % - gain (single) - Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - stimulus_description (char) - Protocol/stimulus name for this patch-clamp dataset. + % + % - sweep_number (uint32) - Sweep number, allows to group different PatchClampSeries together. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - patchClampSeries (types.core.PatchClampSeries) - A PatchClampSeries object + obj = obj@types.core.TimeSeries(varargin{:}); diff --git a/+types/+core/PlaneSegmentation.m b/code/+types/+core/PlaneSegmentation.m similarity index 72% rename from +types/+core/PlaneSegmentation.m rename to code/+types/+core/PlaneSegmentation.m index 492ae7ea..58f06e95 100644 --- a/+types/+core/PlaneSegmentation.m +++ b/code/+types/+core/PlaneSegmentation.m @@ -1,5 +1,8 @@ classdef PlaneSegmentation < types.hdmf_common.DynamicTable & types.untyped.GroupClass -% PLANESEGMENTATION Results from image segmentation of a specific imaging plane. +% PLANESEGMENTATION - Results from image segmentation of a specific imaging plane. +% +% Required Properties: +% id % OPTIONAL PROPERTIES @@ -15,7 +18,39 @@ methods function obj = PlaneSegmentation(varargin) - % PLANESEGMENTATION Constructor for PlaneSegmentation + % PLANESEGMENTATION - Constructor for PlaneSegmentation + % + % Syntax: + % planeSegmentation = types.core.PLANESEGMENTATION() creates a PlaneSegmentation object with unset property values. + % + % planeSegmentation = types.core.PLANESEGMENTATION(Name, Value) creates a PlaneSegmentation object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - description (char) - Description of what is in this dynamic table. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - image_mask (VectorData) - ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero. + % + % - imaging_plane (ImagingPlane) - Link to ImagingPlane object from which this data was generated. + % + % - pixel_mask (VectorData) - Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + % + % - pixel_mask_index (VectorIndex) - Index into pixel_mask. + % + % - reference_images (ImageSeries) - One or more image stacks that the masks apply to (can be one-element stack). + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % - voxel_mask (VectorData) - Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation + % + % - voxel_mask_index (VectorIndex) - Index into voxel_mask. + % + % Output Arguments: + % - planeSegmentation (types.core.PlaneSegmentation) - A PlaneSegmentation object + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); diff --git a/+types/+core/Position.m b/code/+types/+core/Position.m similarity index 68% rename from +types/+core/Position.m rename to code/+types/+core/Position.m index 58b4ebaf..389ca1ef 100644 --- a/+types/+core/Position.m +++ b/code/+types/+core/Position.m @@ -1,5 +1,8 @@ classdef Position < types.core.NWBDataInterface & types.untyped.GroupClass -% POSITION Position data, whether along the x, x/y or x/y/z axis. +% POSITION - Position data, whether along the x, x/y or x/y/z axis. +% +% Required Properties: +% spatialseries % REQUIRED PROPERTIES @@ -9,7 +12,19 @@ methods function obj = Position(varargin) - % POSITION Constructor for Position + % POSITION - Constructor for Position + % + % Syntax: + % position = types.core.POSITION() creates a Position object with unset property values. + % + % position = types.core.POSITION(Name, Value) creates a Position object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - spatialseries (SpatialSeries) - SpatialSeries object containing position data. + % + % Output Arguments: + % - position (types.core.Position) - A Position object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.spatialseries, ivarargin] = types.util.parseConstrained(obj,'spatialseries', 'types.core.SpatialSeries', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/ProcessingModule.m b/code/+types/+core/ProcessingModule.m similarity index 77% rename from +types/+core/ProcessingModule.m rename to code/+types/+core/ProcessingModule.m index c925acde..accc72fe 100644 --- a/+types/+core/ProcessingModule.m +++ b/code/+types/+core/ProcessingModule.m @@ -1,5 +1,8 @@ classdef ProcessingModule < types.core.NWBContainer & types.untyped.GroupClass -% PROCESSINGMODULE A collection of processed data. +% PROCESSINGMODULE - A collection of processed data. +% +% Required Properties: +% None % OPTIONAL PROPERTIES @@ -11,7 +14,23 @@ methods function obj = ProcessingModule(varargin) - % PROCESSINGMODULE Constructor for ProcessingModule + % PROCESSINGMODULE - Constructor for ProcessingModule + % + % Syntax: + % processingModule = types.core.PROCESSINGMODULE() creates a ProcessingModule object with unset property values. + % + % processingModule = types.core.PROCESSINGMODULE(Name, Value) creates a ProcessingModule object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - description (char) - Description of this collection of processed data. + % + % - dynamictable (DynamicTable) - Tables stored in this collection. + % + % - nwbdatainterface (NWBDataInterface) - Data objects stored in this collection. + % + % Output Arguments: + % - processingModule (types.core.ProcessingModule) - A ProcessingModule object + obj = obj@types.core.NWBContainer(varargin{:}); [obj.dynamictable, ivarargin] = types.util.parseConstrained(obj,'dynamictable', 'types.hdmf_common.DynamicTable', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/PupilTracking.m b/code/+types/+core/PupilTracking.m similarity index 66% rename from +types/+core/PupilTracking.m rename to code/+types/+core/PupilTracking.m index f7018341..0a4f149a 100644 --- a/+types/+core/PupilTracking.m +++ b/code/+types/+core/PupilTracking.m @@ -1,5 +1,8 @@ classdef PupilTracking < types.core.NWBDataInterface & types.untyped.GroupClass -% PUPILTRACKING Eye-tracking data, representing pupil size. +% PUPILTRACKING - Eye-tracking data, representing pupil size. +% +% Required Properties: +% timeseries % REQUIRED PROPERTIES @@ -9,7 +12,19 @@ methods function obj = PupilTracking(varargin) - % PUPILTRACKING Constructor for PupilTracking + % PUPILTRACKING - Constructor for PupilTracking + % + % Syntax: + % pupilTracking = types.core.PUPILTRACKING() creates a PupilTracking object with unset property values. + % + % pupilTracking = types.core.PUPILTRACKING(Name, Value) creates a PupilTracking object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - timeseries (TimeSeries) - TimeSeries object containing time series data on pupil size. + % + % Output Arguments: + % - pupilTracking (types.core.PupilTracking) - A PupilTracking object + obj = obj@types.core.NWBDataInterface(varargin{:}); [obj.timeseries, ivarargin] = types.util.parseConstrained(obj,'timeseries', 'types.core.TimeSeries', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+core/RGBAImage.m b/code/+types/+core/RGBAImage.m similarity index 55% rename from +types/+core/RGBAImage.m rename to code/+types/+core/RGBAImage.m index 3ae47ea7..4d1953ba 100644 --- a/+types/+core/RGBAImage.m +++ b/code/+types/+core/RGBAImage.m @@ -1,11 +1,30 @@ classdef RGBAImage < types.core.Image & types.untyped.DatasetClass -% RGBAIMAGE A color image with transparency. +% RGBAIMAGE - A color image with transparency. +% +% Required Properties: +% data methods function obj = RGBAImage(varargin) - % RGBAIMAGE Constructor for RGBAImage + % RGBAIMAGE - Constructor for RGBAImage + % + % Syntax: + % rGBAImage = types.core.RGBAIMAGE() creates a RGBAImage object with unset property values. + % + % rGBAImage = types.core.RGBAIMAGE(Name, Value) creates a RGBAImage object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (numeric) - No description + % + % - description (char) - Description of the image. + % + % - resolution (single) - Pixel resolution of the image, in pixels per centimeter. + % + % Output Arguments: + % - rGBAImage (types.core.RGBAImage) - A RGBAImage object + obj = obj@types.core.Image(varargin{:}); diff --git a/+types/+core/RGBImage.m b/code/+types/+core/RGBImage.m similarity index 56% rename from +types/+core/RGBImage.m rename to code/+types/+core/RGBImage.m index 5aa72807..d5c5da6e 100644 --- a/+types/+core/RGBImage.m +++ b/code/+types/+core/RGBImage.m @@ -1,11 +1,30 @@ classdef RGBImage < types.core.Image & types.untyped.DatasetClass -% RGBIMAGE A color image. +% RGBIMAGE - A color image. +% +% Required Properties: +% data methods function obj = RGBImage(varargin) - % RGBIMAGE Constructor for RGBImage + % RGBIMAGE - Constructor for RGBImage + % + % Syntax: + % rGBImage = types.core.RGBIMAGE() creates a RGBImage object with unset property values. + % + % rGBImage = types.core.RGBIMAGE(Name, Value) creates a RGBImage object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (numeric) - No description + % + % - description (char) - Description of the image. + % + % - resolution (single) - Pixel resolution of the image, in pixels per centimeter. + % + % Output Arguments: + % - rGBImage (types.core.RGBImage) - A RGBImage object + obj = obj@types.core.Image(varargin{:}); diff --git a/+types/+core/RepetitionsTable.m b/code/+types/+core/RepetitionsTable.m similarity index 59% rename from +types/+core/RepetitionsTable.m rename to code/+types/+core/RepetitionsTable.m index a7d3b163..814ec47f 100644 --- a/+types/+core/RepetitionsTable.m +++ b/code/+types/+core/RepetitionsTable.m @@ -1,5 +1,8 @@ classdef RepetitionsTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass -% REPETITIONSTABLE A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence. +% REPETITIONSTABLE - A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence. +% +% Required Properties: +% id, sequential_recordings, sequential_recordings_index % REQUIRED PROPERTIES @@ -10,7 +13,29 @@ methods function obj = RepetitionsTable(varargin) - % REPETITIONSTABLE Constructor for RepetitionsTable + % REPETITIONSTABLE - Constructor for RepetitionsTable + % + % Syntax: + % repetitionsTable = types.core.REPETITIONSTABLE() creates a RepetitionsTable object with unset property values. + % + % repetitionsTable = types.core.REPETITIONSTABLE(Name, Value) creates a RepetitionsTable object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - description (char) - Description of what is in this dynamic table. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - sequential_recordings (DynamicTableRegion) - A reference to one or more rows in the SequentialRecordingsTable table. + % + % - sequential_recordings_index (VectorIndex) - Index dataset for the sequential_recordings column. + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % Output Arguments: + % - repetitionsTable (types.core.RepetitionsTable) - A RepetitionsTable object + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); diff --git a/code/+types/+core/RoiResponseSeries.m b/code/+types/+core/RoiResponseSeries.m new file mode 100644 index 00000000..b76999f9 --- /dev/null +++ b/code/+types/+core/RoiResponseSeries.m @@ -0,0 +1,108 @@ +classdef RoiResponseSeries < types.core.TimeSeries & types.untyped.GroupClass +% ROIRESPONSESERIES - ROI responses over an imaging plane. The first dimension represents time. The second dimension, if present, represents ROIs. +% +% Required Properties: +% data, rois + + +% REQUIRED PROPERTIES +properties + rois; % REQUIRED (DynamicTableRegion) DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. +end + +methods + function obj = RoiResponseSeries(varargin) + % ROIRESPONSESERIES - Constructor for RoiResponseSeries + % + % Syntax: + % roiResponseSeries = types.core.ROIRESPONSESERIES() creates a RoiResponseSeries object with unset property values. + % + % roiResponseSeries = types.core.ROIRESPONSESERIES(Name, Value) creates a RoiResponseSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (numeric) - Signals from ROIs. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - data_unit (char) - Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + % + % - description (char) - Description of the time series. + % + % - rois (DynamicTableRegion) - DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - roiResponseSeries (types.core.RoiResponseSeries) - A RoiResponseSeries object + + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'rois',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.rois = p.Results.rois; + if strcmp(class(obj), 'types.core.RoiResponseSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.rois(obj, val) + obj.rois = obj.validate_rois(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf], [Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_rois(obj, val) + val = types.util.checkDtype('rois', 'types.hdmf_common.DynamicTableRegion', val); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + refs = obj.rois.export(fid, [fullpath '/rois'], refs); + end +end + +end \ No newline at end of file diff --git a/+types/+core/ScratchData.m b/code/+types/+core/ScratchData.m similarity index 71% rename from +types/+core/ScratchData.m rename to code/+types/+core/ScratchData.m index 0d7923b3..c2bd4715 100644 --- a/+types/+core/ScratchData.m +++ b/code/+types/+core/ScratchData.m @@ -1,5 +1,8 @@ classdef ScratchData < types.core.NWBData & types.untyped.DatasetClass -% SCRATCHDATA Any one-off datasets +% SCRATCHDATA - Any one-off datasets +% +% Required Properties: +% data % OPTIONAL PROPERTIES @@ -9,7 +12,21 @@ methods function obj = ScratchData(varargin) - % SCRATCHDATA Constructor for ScratchData + % SCRATCHDATA - Constructor for ScratchData + % + % Syntax: + % scratchData = types.core.SCRATCHDATA() creates a ScratchData object with unset property values. + % + % scratchData = types.core.SCRATCHDATA(Name, Value) creates a ScratchData object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (any) - No description + % + % - notes (char) - Any notes the user has about the dataset being stored + % + % Output Arguments: + % - scratchData (types.core.ScratchData) - A ScratchData object + obj = obj@types.core.NWBData(varargin{:}); diff --git a/+types/+core/SequentialRecordingsTable.m b/code/+types/+core/SequentialRecordingsTable.m similarity index 61% rename from +types/+core/SequentialRecordingsTable.m rename to code/+types/+core/SequentialRecordingsTable.m index edcb85ab..96e3c970 100644 --- a/+types/+core/SequentialRecordingsTable.m +++ b/code/+types/+core/SequentialRecordingsTable.m @@ -1,5 +1,8 @@ classdef SequentialRecordingsTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass -% SEQUENTIALRECORDINGSTABLE A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where a sequence of stimuli of the same type with varying parameters have been presented in a sequence. +% SEQUENTIALRECORDINGSTABLE - A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where a sequence of stimuli of the same type with varying parameters have been presented in a sequence. +% +% Required Properties: +% id, simultaneous_recordings, simultaneous_recordings_index, stimulus_type % REQUIRED PROPERTIES @@ -11,7 +14,31 @@ methods function obj = SequentialRecordingsTable(varargin) - % SEQUENTIALRECORDINGSTABLE Constructor for SequentialRecordingsTable + % SEQUENTIALRECORDINGSTABLE - Constructor for SequentialRecordingsTable + % + % Syntax: + % sequentialRecordingsTable = types.core.SEQUENTIALRECORDINGSTABLE() creates a SequentialRecordingsTable object with unset property values. + % + % sequentialRecordingsTable = types.core.SEQUENTIALRECORDINGSTABLE(Name, Value) creates a SequentialRecordingsTable object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - description (char) - Description of what is in this dynamic table. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - simultaneous_recordings (DynamicTableRegion) - A reference to one or more rows in the SimultaneousRecordingsTable table. + % + % - simultaneous_recordings_index (VectorIndex) - Index dataset for the simultaneous_recordings column. + % + % - stimulus_type (VectorData) - The type of stimulus used for the sequential recording. + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % Output Arguments: + % - sequentialRecordingsTable (types.core.SequentialRecordingsTable) - A SequentialRecordingsTable object + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); diff --git a/+types/+core/SimultaneousRecordingsTable.m b/code/+types/+core/SimultaneousRecordingsTable.m similarity index 57% rename from +types/+core/SimultaneousRecordingsTable.m rename to code/+types/+core/SimultaneousRecordingsTable.m index c4c1fddc..9b1e325d 100644 --- a/+types/+core/SimultaneousRecordingsTable.m +++ b/code/+types/+core/SimultaneousRecordingsTable.m @@ -1,5 +1,8 @@ classdef SimultaneousRecordingsTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass -% SIMULTANEOUSRECORDINGSTABLE A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes. +% SIMULTANEOUSRECORDINGSTABLE - A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes. +% +% Required Properties: +% id, recordings, recordings_index % REQUIRED PROPERTIES @@ -10,7 +13,29 @@ methods function obj = SimultaneousRecordingsTable(varargin) - % SIMULTANEOUSRECORDINGSTABLE Constructor for SimultaneousRecordingsTable + % SIMULTANEOUSRECORDINGSTABLE - Constructor for SimultaneousRecordingsTable + % + % Syntax: + % simultaneousRecordingsTable = types.core.SIMULTANEOUSRECORDINGSTABLE() creates a SimultaneousRecordingsTable object with unset property values. + % + % simultaneousRecordingsTable = types.core.SIMULTANEOUSRECORDINGSTABLE(Name, Value) creates a SimultaneousRecordingsTable object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - description (char) - Description of what is in this dynamic table. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - recordings (DynamicTableRegion) - A reference to one or more rows in the IntracellularRecordingsTable table. + % + % - recordings_index (VectorIndex) - Index dataset for the recordings column. + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % Output Arguments: + % - simultaneousRecordingsTable (types.core.SimultaneousRecordingsTable) - A SimultaneousRecordingsTable object + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); diff --git a/code/+types/+core/SpatialSeries.m b/code/+types/+core/SpatialSeries.m new file mode 100644 index 00000000..25dd6b5b --- /dev/null +++ b/code/+types/+core/SpatialSeries.m @@ -0,0 +1,150 @@ +classdef SpatialSeries < types.core.TimeSeries & types.untyped.GroupClass +% SPATIALSERIES - Direction, e.g., of gaze or travel, or position. The TimeSeries::data field is a 2D array storing position or direction relative to some reference frame. Array structure: [num measurements] [num dimensions]. Each SpatialSeries has a text dataset reference_frame that indicates the zero-position, or the zero-axes for direction. For example, if representing gaze direction, 'straight-ahead' might be a specific pixel on the monitor, or some other point in space. For position data, the 0,0 point might be the top-left corner of an enclosure, as viewed from the tracking camera. The unit of data will indicate how to interpret SpatialSeries values. +% +% Required Properties: +% data + + +% OPTIONAL PROPERTIES +properties + reference_frame; % (char) Description defining what exactly 'straight-ahead' means. +end + +methods + function obj = SpatialSeries(varargin) + % SPATIALSERIES - Constructor for SpatialSeries + % + % Syntax: + % spatialSeries = types.core.SPATIALSERIES() creates a SpatialSeries object with unset property values. + % + % spatialSeries = types.core.SPATIALSERIES(Name, Value) creates a SpatialSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (numeric) - 1-D or 2-D array storing position or direction relative to some reference frame. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - data_unit (char) - Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + % + % - description (char) - Description of the time series. + % + % - reference_frame (char) - Description defining what exactly 'straight-ahead' means. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - spatialSeries (types.core.SpatialSeries) - A SpatialSeries object + + varargin = [{'data_unit' 'meters'} varargin]; + obj = obj@types.core.TimeSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'reference_frame',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + obj.reference_frame = p.Results.reference_frame; + if strcmp(class(obj), 'types.core.SpatialSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.reference_frame(obj, val) + obj.reference_frame = obj.validate_reference_frame(val); + end + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[3,Inf], [2,Inf], [1,Inf], [Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_unit(obj, val) + val = types.util.checkDtype('data_unit', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_reference_frame(obj, val) + val = types.util.checkDtype('reference_frame', 'char', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.TimeSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.reference_frame) + if startsWith(class(obj.reference_frame), 'types.untyped.') + refs = obj.reference_frame.export(fid, [fullpath '/reference_frame'], refs); + elseif ~isempty(obj.reference_frame) + io.writeDataset(fid, [fullpath '/reference_frame'], obj.reference_frame); + end + end + end +end + +end \ No newline at end of file diff --git a/code/+types/+core/SpikeEventSeries.m b/code/+types/+core/SpikeEventSeries.m new file mode 100644 index 00000000..44e8a075 --- /dev/null +++ b/code/+types/+core/SpikeEventSeries.m @@ -0,0 +1,146 @@ +classdef SpikeEventSeries < types.core.ElectricalSeries & types.untyped.GroupClass +% SPIKEEVENTSERIES - Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode). +% +% Required Properties: +% data, electrodes, timestamps + + + +methods + function obj = SpikeEventSeries(varargin) + % SPIKEEVENTSERIES - Constructor for SpikeEventSeries + % + % Syntax: + % spikeEventSeries = types.core.SPIKEEVENTSERIES() creates a SpikeEventSeries object with unset property values. + % + % spikeEventSeries = types.core.SPIKEEVENTSERIES(Name, Value) creates a SpikeEventSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - channel_conversion (single) - Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels. + % + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (numeric) - Spike waveforms. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - description (char) - Description of the time series. + % + % - electrodes (DynamicTableRegion) - DynamicTableRegion pointer to the electrodes that this time series was generated from. + % + % - filtering (char) - Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be "High-pass 4-pole Bessel filter at 500 Hz". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be "Low-pass filter at 300 Hz". If a non-standard filter type is used, provide as much detail about the filter properties as possible. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here. + % + % Output Arguments: + % - spikeEventSeries (types.core.SpikeEventSeries) - A SpikeEventSeries object + + varargin = [{'data_unit' 'volts' 'timestamps_interval' types.util.correctType(1, 'int32') 'timestamps_unit' 'seconds'} varargin]; + obj = obj@types.core.ElectricalSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + addParameter(p, 'timestamps',[]); + addParameter(p, 'timestamps_interval',[]); + addParameter(p, 'timestamps_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + obj.timestamps = p.Results.timestamps; + obj.timestamps_interval = p.Results.timestamps_interval; + obj.timestamps_unit = p.Results.timestamps_unit; + if strcmp(class(obj), 'types.core.SpikeEventSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + val = types.util.checkDtype('data', 'numeric', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf,Inf,Inf], [Inf,Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_data_unit(obj, val) + if isequal(val, 'volts') + val = 'volts'; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''SpikeEventSeries'' because it is read-only.') + end + end + function val = validate_timestamps(obj, val) + val = types.util.checkDtype('timestamps', 'double', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[Inf]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_timestamps_interval(obj, val) + if isequal(val, 1) + val = 1; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''timestamps_interval'' property of class ''SpikeEventSeries'' because it is read-only.') + end + end + function val = validate_timestamps_unit(obj, val) + if isequal(val, 'seconds') + val = 'seconds'; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''timestamps_unit'' property of class ''SpikeEventSeries'' because it is read-only.') + end + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.ElectricalSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/Subject.m b/code/+types/+core/Subject.m similarity index 88% rename from +types/+core/Subject.m rename to code/+types/+core/Subject.m index bfd4e1ae..4d6927c9 100644 --- a/+types/+core/Subject.m +++ b/code/+types/+core/Subject.m @@ -1,5 +1,8 @@ classdef Subject < types.core.NWBContainer & types.untyped.GroupClass -% SUBJECT Information about the animal or person from which the data was measured. +% SUBJECT - Information about the animal or person from which the data was measured. +% +% Required Properties: +% None % OPTIONAL PROPERTIES @@ -18,7 +21,37 @@ methods function obj = Subject(varargin) - % SUBJECT Constructor for Subject + % SUBJECT - Constructor for Subject + % + % Syntax: + % subject = types.core.SUBJECT() creates a Subject object with unset property values. + % + % subject = types.core.SUBJECT(Name, Value) creates a Subject object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - age (char) - Age of subject. Can be supplied instead of 'date_of_birth'. + % + % - age_reference (char) - Age is with reference to this event. Can be 'birth' or 'gestational'. If reference is omitted, 'birth' is implied. + % + % - date_of_birth (datetime) - Date of birth of subject. Can be supplied instead of 'age'. + % + % - description (char) - Description of subject and where subject came from (e.g., breeder, if animal). + % + % - genotype (char) - Genetic strain. If absent, assume Wild Type (WT). + % + % - sex (char) - Gender of subject. + % + % - species (char) - Species of subject. + % + % - strain (char) - Strain of subject. + % + % - subject_id (char) - ID of animal/person used/participating in experiment (lab convention). + % + % - weight (char) - Weight at time of experiment, at time of surgery and at other important times. + % + % Output Arguments: + % - subject (types.core.Subject) - A Subject object + varargin = [{'age_reference' 'birth'} varargin]; obj = obj@types.core.NWBContainer(varargin{:}); diff --git a/+types/+core/SweepTable.m b/code/+types/+core/SweepTable.m similarity index 60% rename from +types/+core/SweepTable.m rename to code/+types/+core/SweepTable.m index be7b8b81..4746b27c 100644 --- a/+types/+core/SweepTable.m +++ b/code/+types/+core/SweepTable.m @@ -1,5 +1,8 @@ classdef SweepTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass -% SWEEPTABLE [DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable, and ExperimentalConditions tables provide enhanced support for experiment metadata. +% SWEEPTABLE - [DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable, and ExperimentalConditions tables provide enhanced support for experiment metadata. +% +% Required Properties: +% id, series, series_index, sweep_number % REQUIRED PROPERTIES @@ -11,7 +14,31 @@ methods function obj = SweepTable(varargin) - % SWEEPTABLE Constructor for SweepTable + % SWEEPTABLE - Constructor for SweepTable + % + % Syntax: + % sweepTable = types.core.SWEEPTABLE() creates a SweepTable object with unset property values. + % + % sweepTable = types.core.SWEEPTABLE(Name, Value) creates a SweepTable object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - description (char) - Description of what is in this dynamic table. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - series (VectorData) - The PatchClampSeries with the sweep number in that row. + % + % - series_index (VectorIndex) - Index for series. + % + % - sweep_number (VectorData) - Sweep number of the PatchClampSeries in that row. + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % Output Arguments: + % - sweepTable (types.core.SweepTable) - A SweepTable object + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); diff --git a/+types/+core/TimeIntervals.m b/code/+types/+core/TimeIntervals.m similarity index 71% rename from +types/+core/TimeIntervals.m rename to code/+types/+core/TimeIntervals.m index 74555b63..49a50c5d 100644 --- a/+types/+core/TimeIntervals.m +++ b/code/+types/+core/TimeIntervals.m @@ -1,5 +1,8 @@ classdef TimeIntervals < types.hdmf_common.DynamicTable & types.untyped.GroupClass -% TIMEINTERVALS A container for aggregating epoch data and the TimeSeries that each epoch applies to. +% TIMEINTERVALS - A container for aggregating epoch data and the TimeSeries that each epoch applies to. +% +% Required Properties: +% id, start_time, stop_time % REQUIRED PROPERTIES @@ -17,7 +20,37 @@ methods function obj = TimeIntervals(varargin) - % TIMEINTERVALS Constructor for TimeIntervals + % TIMEINTERVALS - Constructor for TimeIntervals + % + % Syntax: + % timeIntervals = types.core.TIMEINTERVALS() creates a TimeIntervals object with unset property values. + % + % timeIntervals = types.core.TIMEINTERVALS(Name, Value) creates a TimeIntervals object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - description (char) - Description of what is in this dynamic table. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - start_time (VectorData) - Start time of epoch, in seconds. + % + % - stop_time (VectorData) - Stop time of epoch, in seconds. + % + % - tags (VectorData) - User-defined tags that identify or categorize events. + % + % - tags_index (VectorIndex) - Index for tags. + % + % - timeseries (TimeSeriesReferenceVectorData) - An index into a TimeSeries object. + % + % - timeseries_index (VectorIndex) - Index for timeseries. + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % Output Arguments: + % - timeIntervals (types.core.TimeIntervals) - A TimeIntervals object + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); diff --git a/+types/+core/TimeSeries.m b/code/+types/+core/TimeSeries.m similarity index 82% rename from +types/+core/TimeSeries.m rename to code/+types/+core/TimeSeries.m index 2fad2f65..f1f369b3 100644 --- a/+types/+core/TimeSeries.m +++ b/code/+types/+core/TimeSeries.m @@ -1,5 +1,8 @@ classdef TimeSeries < types.core.NWBDataInterface & types.untyped.GroupClass -% TIMESERIES General purpose time series. +% TIMESERIES - General purpose time series. +% +% Required Properties: +% data % READONLY PROPERTIES @@ -30,7 +33,43 @@ methods function obj = TimeSeries(varargin) - % TIMESERIES Constructor for TimeSeries + % TIMESERIES - Constructor for TimeSeries + % + % Syntax: + % timeSeries = types.core.TIMESERIES() creates a TimeSeries object with unset property values. + % + % timeSeries = types.core.TIMESERIES(Name, Value) creates a TimeSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (any) - Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - data_unit (char) - Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + % + % - description (char) - Description of the time series. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - timeSeries (types.core.TimeSeries) - A TimeSeries object + varargin = [{'comments' 'no comments' 'data_conversion' types.util.correctType(1, 'single') 'data_offset' types.util.correctType(0, 'single') 'data_resolution' types.util.correctType(-1, 'single') 'description' 'no description' 'starting_time_unit' 'seconds' 'timestamps_interval' types.util.correctType(1, 'int32') 'timestamps_unit' 'seconds'} varargin]; obj = obj@types.core.NWBDataInterface(varargin{:}); diff --git a/+types/+core/TimeSeriesReferenceVectorData.m b/code/+types/+core/TimeSeriesReferenceVectorData.m similarity index 56% rename from +types/+core/TimeSeriesReferenceVectorData.m rename to code/+types/+core/TimeSeriesReferenceVectorData.m index 719c12d2..04cb7695 100644 --- a/+types/+core/TimeSeriesReferenceVectorData.m +++ b/code/+types/+core/TimeSeriesReferenceVectorData.m @@ -1,11 +1,28 @@ classdef TimeSeriesReferenceVectorData < types.hdmf_common.VectorData & types.untyped.DatasetClass -% TIMESERIESREFERENCEVECTORDATA Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. +% TIMESERIESREFERENCEVECTORDATA - Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries. +% +% Required Properties: +% data methods function obj = TimeSeriesReferenceVectorData(varargin) - % TIMESERIESREFERENCEVECTORDATA Constructor for TimeSeriesReferenceVectorData + % TIMESERIESREFERENCEVECTORDATA - Constructor for TimeSeriesReferenceVectorData + % + % Syntax: + % timeSeriesReferenceVectorData = types.core.TIMESERIESREFERENCEVECTORDATA() creates a TimeSeriesReferenceVectorData object with unset property values. + % + % timeSeriesReferenceVectorData = types.core.TIMESERIESREFERENCEVECTORDATA(Name, Value) creates a TimeSeriesReferenceVectorData object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (Table with columns: (int32, int32, Object reference to TimeSeries)) - No description + % + % - description (char) - Description of what these vectors represent. + % + % Output Arguments: + % - timeSeriesReferenceVectorData (types.core.TimeSeriesReferenceVectorData) - A TimeSeriesReferenceVectorData object + obj = obj@types.hdmf_common.VectorData(varargin{:}); diff --git a/code/+types/+core/TwoPhotonSeries.m b/code/+types/+core/TwoPhotonSeries.m new file mode 100644 index 00000000..59427e31 --- /dev/null +++ b/code/+types/+core/TwoPhotonSeries.m @@ -0,0 +1,198 @@ +classdef TwoPhotonSeries < types.core.ImageSeries & types.untyped.GroupClass +% TWOPHOTONSERIES - Image stack recorded over time from 2-photon microscope. +% +% Required Properties: +% data + + +% OPTIONAL PROPERTIES +properties + field_of_view; % (single) Width, height and depth of image, or imaged area, in meters. + imaging_plane; % ImagingPlane + pmt_gain; % (single) Photomultiplier gain. + scan_line_rate; % (single) Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. +end + +methods + function obj = TwoPhotonSeries(varargin) + % TWOPHOTONSERIES - Constructor for TwoPhotonSeries + % + % Syntax: + % twoPhotonSeries = types.core.TWOPHOTONSERIES() creates a TwoPhotonSeries object with unset property values. + % + % twoPhotonSeries = types.core.TWOPHOTONSERIES(Name, Value) creates a TwoPhotonSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (numeric) - Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - data_unit (char) - Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'. + % + % - description (char) - Description of the time series. + % + % - device (Device) - Link to the Device object that was used to capture these images. + % + % - dimension (int32) - Number of pixels on x, y, (and z) axes. + % + % - external_file (char) - Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file. + % + % - external_file_starting_frame (int32) - Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]. + % + % - field_of_view (single) - Width, height and depth of image, or imaged area, in meters. + % + % - format (char) - Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed. + % + % - imaging_plane (ImagingPlane) - Link to ImagingPlane object from which this TimeSeries data was generated. + % + % - pmt_gain (single) - Photomultiplier gain. + % + % - scan_line_rate (single) - Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - twoPhotonSeries (types.core.TwoPhotonSeries) - A TwoPhotonSeries object + + obj = obj@types.core.ImageSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'field_of_view',[]); + addParameter(p, 'imaging_plane',[]); + addParameter(p, 'pmt_gain',[]); + addParameter(p, 'scan_line_rate',[]); + misc.parseSkipInvalidName(p, varargin); + obj.field_of_view = p.Results.field_of_view; + obj.imaging_plane = p.Results.imaging_plane; + obj.pmt_gain = p.Results.pmt_gain; + obj.scan_line_rate = p.Results.scan_line_rate; + if strcmp(class(obj), 'types.core.TwoPhotonSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + function set.field_of_view(obj, val) + obj.field_of_view = obj.validate_field_of_view(val); + end + function set.imaging_plane(obj, val) + obj.imaging_plane = obj.validate_imaging_plane(val); + end + function set.pmt_gain(obj, val) + obj.pmt_gain = obj.validate_pmt_gain(val); + end + function set.scan_line_rate(obj, val) + obj.scan_line_rate = obj.validate_scan_line_rate(val); + end + %% VALIDATORS + + function val = validate_field_of_view(obj, val) + val = types.util.checkDtype('field_of_view', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[3], [2]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_imaging_plane(obj, val) + if isa(val, 'types.untyped.SoftLink') + if isprop(val, 'target') + types.util.checkDtype('imaging_plane', 'types.core.ImagingPlane', val.target); + end + else + val = types.util.checkDtype('imaging_plane', 'types.core.ImagingPlane', val); + if ~isempty(val) + val = types.untyped.SoftLink(val); + end + end + end + function val = validate_pmt_gain(obj, val) + val = types.util.checkDtype('pmt_gain', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + function val = validate_scan_line_rate(obj, val) + val = types.util.checkDtype('scan_line_rate', 'single', val); + if isa(val, 'types.untyped.DataStub') + if 1 == val.ndims + valsz = [val.dims 1]; + else + valsz = val.dims; + end + elseif istable(val) + valsz = [height(val) 1]; + elseif ischar(val) + valsz = [size(val, 1) 1]; + else + valsz = size(val); + end + validshapes = {[1]}; + types.util.checkDims(valsz, validshapes); + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.ImageSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + if ~isempty(obj.field_of_view) + if startsWith(class(obj.field_of_view), 'types.untyped.') + refs = obj.field_of_view.export(fid, [fullpath '/field_of_view'], refs); + elseif ~isempty(obj.field_of_view) + io.writeDataset(fid, [fullpath '/field_of_view'], obj.field_of_view, 'forceArray'); + end + end + refs = obj.imaging_plane.export(fid, [fullpath '/imaging_plane'], refs); + if ~isempty(obj.pmt_gain) + io.writeAttribute(fid, [fullpath '/pmt_gain'], obj.pmt_gain); + end + if ~isempty(obj.scan_line_rate) + io.writeAttribute(fid, [fullpath '/scan_line_rate'], obj.scan_line_rate); + end + end +end + +end \ No newline at end of file diff --git a/+types/+core/Units.m b/code/+types/+core/Units.m similarity index 70% rename from +types/+core/Units.m rename to code/+types/+core/Units.m index 75f2f708..e42631d0 100644 --- a/+types/+core/Units.m +++ b/code/+types/+core/Units.m @@ -1,5 +1,8 @@ classdef Units < types.hdmf_common.DynamicTable & types.untyped.GroupClass -% UNITS Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times. +% UNITS - Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times. +% +% Required Properties: +% id % OPTIONAL PROPERTIES @@ -20,7 +23,49 @@ methods function obj = Units(varargin) - % UNITS Constructor for Units + % UNITS - Constructor for Units + % + % Syntax: + % units = types.core.UNITS() creates a Units object with unset property values. + % + % units = types.core.UNITS(Name, Value) creates a Units object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - description (char) - Description of what is in this dynamic table. + % + % - electrode_group (VectorData) - Electrode group that each spike unit came from. + % + % - electrodes (DynamicTableRegion) - Electrode that each spike unit came from, specified using a DynamicTableRegion. + % + % - electrodes_index (VectorIndex) - Index into electrodes. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - obs_intervals (VectorData) - Observation intervals for each unit. + % + % - obs_intervals_index (VectorIndex) - Index into the obs_intervals dataset. + % + % - spike_times (VectorData) - Spike times for each unit in seconds. + % + % - spike_times_index (VectorIndex) - Index into the spike_times dataset. + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % - waveform_mean (VectorData) - Spike waveform mean for each spike unit. + % + % - waveform_sd (VectorData) - Spike waveform standard deviation for each spike unit. + % + % - waveforms (VectorData) - Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be the same as the order of the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same. + % + % - waveforms_index (VectorIndex) - Index into the 'waveforms' dataset. One value for every spike event. See 'waveforms' for more detail. + % + % - waveforms_index_index (VectorIndex) - Index into the 'waveforms_index' dataset. One value for every unit (row in the table). See 'waveforms' for more detail. + % + % Output Arguments: + % - units (types.core.Units) - A Units object + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); diff --git a/+types/+core/VoltageClampSeries.m b/code/+types/+core/VoltageClampSeries.m similarity index 74% rename from +types/+core/VoltageClampSeries.m rename to code/+types/+core/VoltageClampSeries.m index 50984fc5..2d2e3f50 100644 --- a/+types/+core/VoltageClampSeries.m +++ b/code/+types/+core/VoltageClampSeries.m @@ -1,5 +1,8 @@ classdef VoltageClampSeries < types.core.PatchClampSeries & types.untyped.GroupClass -% VOLTAGECLAMPSERIES Current data from an intracellular voltage-clamp recording. A corresponding VoltageClampStimulusSeries (stored separately as a stimulus) is used to store the voltage injected. +% VOLTAGECLAMPSERIES - Current data from an intracellular voltage-clamp recording. A corresponding VoltageClampStimulusSeries (stored separately as a stimulus) is used to store the voltage injected. +% +% Required Properties: +% data % READONLY PROPERTIES @@ -25,7 +28,63 @@ methods function obj = VoltageClampSeries(varargin) - % VOLTAGECLAMPSERIES Constructor for VoltageClampSeries + % VOLTAGECLAMPSERIES - Constructor for VoltageClampSeries + % + % Syntax: + % voltageClampSeries = types.core.VOLTAGECLAMPSERIES() creates a VoltageClampSeries object with unset property values. + % + % voltageClampSeries = types.core.VOLTAGECLAMPSERIES(Name, Value) creates a VoltageClampSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - capacitance_fast (single) - Fast capacitance, in farads. + % + % - capacitance_slow (single) - Slow capacitance, in farads. + % + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (any) - Recorded current. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - description (char) - Description of the time series. + % + % - electrode (IntracellularElectrode) - Link to IntracellularElectrode object that describes the electrode that was used to apply or record this data. + % + % - gain (single) - Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). + % + % - resistance_comp_bandwidth (single) - Resistance compensation bandwidth, in hertz. + % + % - resistance_comp_correction (single) - Resistance compensation correction, in percent. + % + % - resistance_comp_prediction (single) - Resistance compensation prediction, in percent. + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - stimulus_description (char) - Protocol/stimulus name for this patch-clamp dataset. + % + % - sweep_number (uint32) - Sweep number, allows to group different PatchClampSeries together. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % - whole_cell_capacitance_comp (single) - Whole cell capacitance compensation, in farads. + % + % - whole_cell_series_resistance_comp (single) - Whole cell series resistance compensation, in ohms. + % + % Output Arguments: + % - voltageClampSeries (types.core.VoltageClampSeries) - A VoltageClampSeries object + varargin = [{'capacitance_fast_unit' 'farads' 'capacitance_slow_unit' 'farads' 'data_unit' 'amperes' 'resistance_comp_bandwidth_unit' 'hertz' 'resistance_comp_correction_unit' 'percent' 'resistance_comp_prediction_unit' 'percent' 'whole_cell_capacitance_comp_unit' 'farads' 'whole_cell_series_resistance_comp_unit' 'ohms'} varargin]; obj = obj@types.core.PatchClampSeries(varargin{:}); diff --git a/code/+types/+core/VoltageClampStimulusSeries.m b/code/+types/+core/VoltageClampStimulusSeries.m new file mode 100644 index 00000000..cf0925a7 --- /dev/null +++ b/code/+types/+core/VoltageClampStimulusSeries.m @@ -0,0 +1,95 @@ +classdef VoltageClampStimulusSeries < types.core.PatchClampSeries & types.untyped.GroupClass +% VOLTAGECLAMPSTIMULUSSERIES - Stimulus voltage applied during a voltage clamp recording. +% +% Required Properties: +% data + + + +methods + function obj = VoltageClampStimulusSeries(varargin) + % VOLTAGECLAMPSTIMULUSSERIES - Constructor for VoltageClampStimulusSeries + % + % Syntax: + % voltageClampStimulusSeries = types.core.VOLTAGECLAMPSTIMULUSSERIES() creates a VoltageClampStimulusSeries object with unset property values. + % + % voltageClampStimulusSeries = types.core.VOLTAGECLAMPSTIMULUSSERIES(Name, Value) creates a VoltageClampStimulusSeries object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - comments (char) - Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string. + % + % - control (uint8) - Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data. + % + % - control_description (char) - Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0. + % + % - data (any) - Stimulus voltage applied. + % + % - data_continuity (char) - Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable. + % + % - data_conversion (single) - Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9. + % + % - data_offset (single) - Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units. + % + % - data_resolution (single) - Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0. + % + % - description (char) - Description of the time series. + % + % - electrode (IntracellularElectrode) - Link to IntracellularElectrode object that describes the electrode that was used to apply or record this data. + % + % - gain (single) - Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). + % + % - starting_time (double) - Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute. + % + % - starting_time_rate (single) - Sampling rate, in Hz. + % + % - stimulus_description (char) - Protocol/stimulus name for this patch-clamp dataset. + % + % - sweep_number (uint32) - Sweep number, allows to group different PatchClampSeries together. + % + % - timestamps (double) - Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. + % + % Output Arguments: + % - voltageClampStimulusSeries (types.core.VoltageClampStimulusSeries) - A VoltageClampStimulusSeries object + + varargin = [{'data_unit' 'volts'} varargin]; + obj = obj@types.core.PatchClampSeries(varargin{:}); + + + p = inputParser; + p.KeepUnmatched = true; + p.PartialMatching = false; + p.StructExpand = false; + addParameter(p, 'data',[]); + addParameter(p, 'data_unit',[]); + misc.parseSkipInvalidName(p, varargin); + obj.data = p.Results.data; + obj.data_unit = p.Results.data_unit; + if strcmp(class(obj), 'types.core.VoltageClampStimulusSeries') + cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); + types.util.checkUnset(obj, unique(cellStringArguments)); + end + end + %% SETTERS + + %% VALIDATORS + + function val = validate_data(obj, val) + + end + function val = validate_data_unit(obj, val) + if isequal(val, 'volts') + val = 'volts'; + else + error('NWB:Type:ReadOnlyProperty', 'Unable to set the ''data_unit'' property of class ''VoltageClampStimulusSeries'' because it is read-only.') + end + end + %% EXPORT + function refs = export(obj, fid, fullpath, refs) + refs = export@types.core.PatchClampSeries(obj, fid, fullpath, refs); + if any(strcmp(refs, fullpath)) + return; + end + end +end + +end \ No newline at end of file diff --git a/+types/+hdmf_common/AlignedDynamicTable.m b/code/+types/+hdmf_common/AlignedDynamicTable.m similarity index 57% rename from +types/+hdmf_common/AlignedDynamicTable.m rename to code/+types/+hdmf_common/AlignedDynamicTable.m index 1754bee8..f3232e15 100644 --- a/+types/+hdmf_common/AlignedDynamicTable.m +++ b/code/+types/+hdmf_common/AlignedDynamicTable.m @@ -1,5 +1,8 @@ classdef AlignedDynamicTable < types.hdmf_common.DynamicTable & types.untyped.GroupClass -% ALIGNEDDYNAMICTABLE DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. +% ALIGNEDDYNAMICTABLE - DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group. +% +% Required Properties: +% id % OPTIONAL PROPERTIES @@ -10,7 +13,29 @@ methods function obj = AlignedDynamicTable(varargin) - % ALIGNEDDYNAMICTABLE Constructor for AlignedDynamicTable + % ALIGNEDDYNAMICTABLE - Constructor for AlignedDynamicTable + % + % Syntax: + % alignedDynamicTable = types.hdmf_common.ALIGNEDDYNAMICTABLE() creates a AlignedDynamicTable object with unset property values. + % + % alignedDynamicTable = types.hdmf_common.ALIGNEDDYNAMICTABLE(Name, Value) creates a AlignedDynamicTable object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - categories (char) - The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in the parent group. This attribute should be used to specify an order of categories and the category names must match the names of the corresponding DynamicTable in the group. + % + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - description (char) - Description of what is in this dynamic table. + % + % - dynamictable (DynamicTable) - A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable parent container. The name of the category is given by the name of the DynamicTable and its description by the description attribute of the DynamicTable. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % Output Arguments: + % - alignedDynamicTable (types.hdmf_common.AlignedDynamicTable) - A AlignedDynamicTable object + obj = obj@types.hdmf_common.DynamicTable(varargin{:}); [obj.dynamictable, ivarargin] = types.util.parseConstrained(obj,'dynamictable', 'types.hdmf_common.DynamicTable', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+hdmf_common/CSRMatrix.m b/code/+types/+hdmf_common/CSRMatrix.m similarity index 79% rename from +types/+hdmf_common/CSRMatrix.m rename to code/+types/+hdmf_common/CSRMatrix.m index 64e9b4d5..3b8e1c6c 100644 --- a/+types/+hdmf_common/CSRMatrix.m +++ b/code/+types/+hdmf_common/CSRMatrix.m @@ -1,5 +1,8 @@ classdef CSRMatrix < types.hdmf_common.Container & types.untyped.GroupClass -% CSRMATRIX A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. +% CSRMATRIX - A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]]. +% +% Required Properties: +% data, indices, indptr % REQUIRED PROPERTIES @@ -15,7 +18,25 @@ methods function obj = CSRMatrix(varargin) - % CSRMATRIX Constructor for CSRMatrix + % CSRMATRIX - Constructor for CSRMatrix + % + % Syntax: + % cSRMatrix = types.hdmf_common.CSRMATRIX() creates a CSRMatrix object with unset property values. + % + % cSRMatrix = types.hdmf_common.CSRMATRIX(Name, Value) creates a CSRMatrix object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (any) - The non-zero values in the matrix. + % + % - indices (uint) - The column indices. + % + % - indptr (uint) - The row index pointer. + % + % - shape (uint) - The shape (number of rows, number of columns) of this sparse matrix. + % + % Output Arguments: + % - cSRMatrix (types.hdmf_common.CSRMatrix) - A CSRMatrix object + obj = obj@types.hdmf_common.Container(varargin{:}); diff --git a/+types/+hdmf_common/Container.m b/code/+types/+hdmf_common/Container.m similarity index 58% rename from +types/+hdmf_common/Container.m rename to code/+types/+hdmf_common/Container.m index b0075373..80468457 100644 --- a/+types/+hdmf_common/Container.m +++ b/code/+types/+hdmf_common/Container.m @@ -1,11 +1,21 @@ classdef Container < types.untyped.MetaClass & types.untyped.GroupClass -% CONTAINER An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. +% CONTAINER - An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers. +% +% Required Properties: +% None methods function obj = Container(varargin) - % CONTAINER Constructor for Container + % CONTAINER - Constructor for Container + % + % Syntax: + % container = types.hdmf_common.CONTAINER() creates a Container object with unset property values. + % + % Output Arguments: + % - container (types.hdmf_common.Container) - A Container object + obj = obj@types.untyped.MetaClass(varargin{:}); if strcmp(class(obj), 'types.hdmf_common.Container') cellStringArguments = convertContainedStringsToChars(varargin(1:2:end)); diff --git a/+types/+hdmf_common/Data.m b/code/+types/+hdmf_common/Data.m similarity index 65% rename from +types/+hdmf_common/Data.m rename to code/+types/+hdmf_common/Data.m index a53a8e72..40c20f04 100644 --- a/+types/+hdmf_common/Data.m +++ b/code/+types/+hdmf_common/Data.m @@ -1,5 +1,8 @@ classdef Data < types.untyped.MetaClass & types.untyped.DatasetClass -% DATA An abstract data type for a dataset. +% DATA - An abstract data type for a dataset. +% +% Required Properties: +% data % REQUIRED PROPERTIES @@ -9,7 +12,19 @@ methods function obj = Data(varargin) - % DATA Constructor for Data + % DATA - Constructor for Data + % + % Syntax: + % data = types.hdmf_common.DATA() creates a Data object with unset property values. + % + % data = types.hdmf_common.DATA(Name, Value) creates a Data object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (any) - No description + % + % Output Arguments: + % - data (types.hdmf_common.Data) - A Data object + obj = obj@types.untyped.MetaClass(varargin{:}); diff --git a/+types/+hdmf_common/DynamicTable.m b/code/+types/+hdmf_common/DynamicTable.m similarity index 64% rename from +types/+hdmf_common/DynamicTable.m rename to code/+types/+hdmf_common/DynamicTable.m index 80bc4d01..6abca75a 100644 --- a/+types/+hdmf_common/DynamicTable.m +++ b/code/+types/+hdmf_common/DynamicTable.m @@ -1,5 +1,8 @@ classdef DynamicTable < types.hdmf_common.Container & types.untyped.GroupClass -% DYNAMICTABLE A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. +% DYNAMICTABLE - A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable. +% +% Required Properties: +% id % REQUIRED PROPERTIES @@ -15,7 +18,25 @@ methods function obj = DynamicTable(varargin) - % DYNAMICTABLE Constructor for DynamicTable + % DYNAMICTABLE - Constructor for DynamicTable + % + % Syntax: + % dynamicTable = types.hdmf_common.DYNAMICTABLE() creates a DynamicTable object with unset property values. + % + % dynamicTable = types.hdmf_common.DYNAMICTABLE(Name, Value) creates a DynamicTable object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - colnames (char) - The names of the columns in this table. This should be used to specify an order to the columns. + % + % - description (char) - Description of what is in this dynamic table. + % + % - id (ElementIdentifiers) - Array of unique identifiers for the rows of this dynamic table. + % + % - vectordata (VectorData) - Vector columns, including index columns, of this dynamic table. + % + % Output Arguments: + % - dynamicTable (types.hdmf_common.DynamicTable) - A DynamicTable object + obj = obj@types.hdmf_common.Container(varargin{:}); [obj.vectordata, ivarargin] = types.util.parseConstrained(obj,'vectordata', 'types.hdmf_common.VectorData', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+hdmf_common/DynamicTableRegion.m b/code/+types/+hdmf_common/DynamicTableRegion.m similarity index 53% rename from +types/+hdmf_common/DynamicTableRegion.m rename to code/+types/+hdmf_common/DynamicTableRegion.m index 506b6b02..6a20ed2a 100644 --- a/+types/+hdmf_common/DynamicTableRegion.m +++ b/code/+types/+hdmf_common/DynamicTableRegion.m @@ -1,5 +1,8 @@ classdef DynamicTableRegion < types.hdmf_common.VectorData & types.untyped.DatasetClass -% DYNAMICTABLEREGION DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. +% DYNAMICTABLEREGION - DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`. +% +% Required Properties: +% data % OPTIONAL PROPERTIES @@ -9,7 +12,27 @@ methods function obj = DynamicTableRegion(varargin) - % DYNAMICTABLEREGION Constructor for DynamicTableRegion + % DYNAMICTABLEREGION - Constructor for DynamicTableRegion + % + % Syntax: + % dynamicTableRegion = types.hdmf_common.DYNAMICTABLEREGION() creates a DynamicTableRegion object with unset property values. + % + % dynamicTableRegion = types.hdmf_common.DYNAMICTABLEREGION(Name, Value) creates a DynamicTableRegion object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (int8) - No description + % + % - description (char) - Description of what this table region points to. + % + % - resolution (double) - NOTE: this is a special value for compatibility with the Units table and is only written to file when detected to be in that specific HDF5 Group. The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. + % + % - sampling_rate (single) - NOTE: this is a special value for compatibility with the Units table and is only written to file when detected to be in that specific HDF5 Group. Must be Hertz + % + % - table (Object reference to DynamicTable) - Reference to the DynamicTable object that this region applies to. + % + % Output Arguments: + % - dynamicTableRegion (types.hdmf_common.DynamicTableRegion) - A DynamicTableRegion object + obj = obj@types.hdmf_common.VectorData(varargin{:}); diff --git a/+types/+hdmf_common/ElementIdentifiers.m b/code/+types/+hdmf_common/ElementIdentifiers.m similarity index 56% rename from +types/+hdmf_common/ElementIdentifiers.m rename to code/+types/+hdmf_common/ElementIdentifiers.m index d9bd22b0..e819ad66 100644 --- a/+types/+hdmf_common/ElementIdentifiers.m +++ b/code/+types/+hdmf_common/ElementIdentifiers.m @@ -1,11 +1,26 @@ classdef ElementIdentifiers < types.hdmf_common.Data & types.untyped.DatasetClass -% ELEMENTIDENTIFIERS A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. +% ELEMENTIDENTIFIERS - A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable. +% +% Required Properties: +% data methods function obj = ElementIdentifiers(varargin) - % ELEMENTIDENTIFIERS Constructor for ElementIdentifiers + % ELEMENTIDENTIFIERS - Constructor for ElementIdentifiers + % + % Syntax: + % elementIdentifiers = types.hdmf_common.ELEMENTIDENTIFIERS() creates a ElementIdentifiers object with unset property values. + % + % elementIdentifiers = types.hdmf_common.ELEMENTIDENTIFIERS(Name, Value) creates a ElementIdentifiers object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (int8) - No description + % + % Output Arguments: + % - elementIdentifiers (types.hdmf_common.ElementIdentifiers) - A ElementIdentifiers object + obj = obj@types.hdmf_common.Data(varargin{:}); diff --git a/+types/+hdmf_common/SimpleMultiContainer.m b/code/+types/+hdmf_common/SimpleMultiContainer.m similarity index 69% rename from +types/+hdmf_common/SimpleMultiContainer.m rename to code/+types/+hdmf_common/SimpleMultiContainer.m index fbc4746b..49945024 100644 --- a/+types/+hdmf_common/SimpleMultiContainer.m +++ b/code/+types/+hdmf_common/SimpleMultiContainer.m @@ -1,5 +1,8 @@ classdef SimpleMultiContainer < types.hdmf_common.Container & types.untyped.GroupClass -% SIMPLEMULTICONTAINER A simple Container for holding onto multiple containers. +% SIMPLEMULTICONTAINER - A simple Container for holding onto multiple containers. +% +% Required Properties: +% None % OPTIONAL PROPERTIES @@ -10,7 +13,21 @@ methods function obj = SimpleMultiContainer(varargin) - % SIMPLEMULTICONTAINER Constructor for SimpleMultiContainer + % SIMPLEMULTICONTAINER - Constructor for SimpleMultiContainer + % + % Syntax: + % simpleMultiContainer = types.hdmf_common.SIMPLEMULTICONTAINER() creates a SimpleMultiContainer object with unset property values. + % + % simpleMultiContainer = types.hdmf_common.SIMPLEMULTICONTAINER(Name, Value) creates a SimpleMultiContainer object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - container (Container) - Container objects held within this SimpleMultiContainer. + % + % - data (Data) - Data objects held within this SimpleMultiContainer. + % + % Output Arguments: + % - simpleMultiContainer (types.hdmf_common.SimpleMultiContainer) - A SimpleMultiContainer object + obj = obj@types.hdmf_common.Container(varargin{:}); [obj.container, ivarargin] = types.util.parseConstrained(obj,'container', 'types.hdmf_common.Container', varargin{:}); varargin(ivarargin) = []; diff --git a/+types/+hdmf_common/VectorData.m b/code/+types/+hdmf_common/VectorData.m similarity index 72% rename from +types/+hdmf_common/VectorData.m rename to code/+types/+hdmf_common/VectorData.m index cc269845..0e6f22d9 100644 --- a/+types/+hdmf_common/VectorData.m +++ b/code/+types/+hdmf_common/VectorData.m @@ -1,5 +1,8 @@ classdef VectorData < types.hdmf_common.Data & types.untyped.DatasetClass -% VECTORDATA An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. +% VECTORDATA - An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on. +% +% Required Properties: +% data % HIDDEN READONLY PROPERTIES @@ -18,7 +21,25 @@ methods function obj = VectorData(varargin) - % VECTORDATA Constructor for VectorData + % VECTORDATA - Constructor for VectorData + % + % Syntax: + % vectorData = types.hdmf_common.VECTORDATA() creates a VectorData object with unset property values. + % + % vectorData = types.hdmf_common.VECTORDATA(Name, Value) creates a VectorData object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (any) - No description + % + % - description (char) - Description of what these vectors represent. + % + % - resolution (double) - NOTE: this is a special value for compatibility with the Units table and is only written to file when detected to be in that specific HDF5 Group. The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. + % + % - sampling_rate (single) - NOTE: this is a special value for compatibility with the Units table and is only written to file when detected to be in that specific HDF5 Group. Must be Hertz + % + % Output Arguments: + % - vectorData (types.hdmf_common.VectorData) - A VectorData object + varargin = [{'unit' 'volts'} varargin]; obj = obj@types.hdmf_common.Data(varargin{:}); diff --git a/+types/+hdmf_common/VectorIndex.m b/code/+types/+hdmf_common/VectorIndex.m similarity index 53% rename from +types/+hdmf_common/VectorIndex.m rename to code/+types/+hdmf_common/VectorIndex.m index 4a40a936..a875dcb9 100644 --- a/+types/+hdmf_common/VectorIndex.m +++ b/code/+types/+hdmf_common/VectorIndex.m @@ -1,5 +1,8 @@ classdef VectorIndex < types.hdmf_common.VectorData & types.untyped.DatasetClass -% VECTORINDEX Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by "_index". +% VECTORINDEX - Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by "_index". +% +% Required Properties: +% data % OPTIONAL PROPERTIES @@ -9,7 +12,27 @@ methods function obj = VectorIndex(varargin) - % VECTORINDEX Constructor for VectorIndex + % VECTORINDEX - Constructor for VectorIndex + % + % Syntax: + % vectorIndex = types.hdmf_common.VECTORINDEX() creates a VectorIndex object with unset property values. + % + % vectorIndex = types.hdmf_common.VECTORINDEX(Name, Value) creates a VectorIndex object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (uint8) - No description + % + % - description (char) - Description of what these vectors represent. + % + % - resolution (double) - NOTE: this is a special value for compatibility with the Units table and is only written to file when detected to be in that specific HDF5 Group. The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples. + % + % - sampling_rate (single) - NOTE: this is a special value for compatibility with the Units table and is only written to file when detected to be in that specific HDF5 Group. Must be Hertz + % + % - target (Object reference to VectorData) - Reference to the target dataset that this index applies to. + % + % Output Arguments: + % - vectorIndex (types.hdmf_common.VectorIndex) - A VectorIndex object + obj = obj@types.hdmf_common.VectorData(varargin{:}); diff --git a/+types/+hdmf_experimental/EnumData.m b/code/+types/+hdmf_experimental/EnumData.m similarity index 67% rename from +types/+hdmf_experimental/EnumData.m rename to code/+types/+hdmf_experimental/EnumData.m index e22843e1..0c334e5f 100644 --- a/+types/+hdmf_experimental/EnumData.m +++ b/code/+types/+hdmf_experimental/EnumData.m @@ -1,5 +1,8 @@ classdef EnumData < types.hdmf_common.VectorData & types.untyped.DatasetClass -% ENUMDATA Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. +% ENUMDATA - Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute. +% +% Required Properties: +% data % OPTIONAL PROPERTIES @@ -9,7 +12,23 @@ methods function obj = EnumData(varargin) - % ENUMDATA Constructor for EnumData + % ENUMDATA - Constructor for EnumData + % + % Syntax: + % enumData = types.hdmf_experimental.ENUMDATA() creates a EnumData object with unset property values. + % + % enumData = types.hdmf_experimental.ENUMDATA(Name, Value) creates a EnumData object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - data (uint8) - No description + % + % - description (char) - Description of what these vectors represent. + % + % - elements (Object reference to VectorData) - Reference to the VectorData object that contains the enumerable elements + % + % Output Arguments: + % - enumData (types.hdmf_experimental.EnumData) - A EnumData object + obj = obj@types.hdmf_common.VectorData(varargin{:}); diff --git a/+types/+hdmf_experimental/HERD.m b/code/+types/+hdmf_experimental/HERD.m similarity index 73% rename from +types/+hdmf_experimental/HERD.m rename to code/+types/+hdmf_experimental/HERD.m index 38fe06d1..efa48a4d 100644 --- a/+types/+hdmf_experimental/HERD.m +++ b/code/+types/+hdmf_experimental/HERD.m @@ -1,5 +1,8 @@ classdef HERD < types.hdmf_common.Container & types.untyped.GroupClass -% HERD HDMF External Resources Data Structure. A set of six tables for tracking external resource references in a file or across multiple files. +% HERD - HDMF External Resources Data Structure. A set of six tables for tracking external resource references in a file or across multiple files. +% +% Required Properties: +% entities, entity_keys, files, keys, object_keys, objects % REQUIRED PROPERTIES @@ -14,7 +17,29 @@ methods function obj = HERD(varargin) - % HERD Constructor for HERD + % HERD - Constructor for HERD + % + % Syntax: + % hERD = types.hdmf_experimental.HERD() creates a HERD object with unset property values. + % + % hERD = types.hdmf_experimental.HERD(Name, Value) creates a HERD object where one or more property values are specified using name-value pairs. + % + % Input Arguments (Name-Value Arguments): + % - entities (Data) - A table for mapping user terms (i.e., keys) to resource entities. + % + % - entity_keys (Data) - A table for identifying which keys use which entity. + % + % - files (Data) - A table for storing object ids of files used in external resources. + % + % - keys (Data) - A table for storing user terms that are used to refer to external resources. + % + % - object_keys (Data) - A table for identifying which objects use which keys. + % + % - objects (Data) - A table for identifying which objects in a file contain references to external resources. + % + % Output Arguments: + % - hERD (types.hdmf_experimental.HERD) - A HERD object + obj = obj@types.hdmf_common.Container(varargin{:}); diff --git a/+types/+untyped/+datapipe/+dynamic/Filter.m b/code/+types/+untyped/+datapipe/+dynamic/Filter.m similarity index 100% rename from +types/+untyped/+datapipe/+dynamic/Filter.m rename to code/+types/+untyped/+datapipe/+dynamic/Filter.m diff --git a/+types/+untyped/+datapipe/+properties/Chunking.m b/code/+types/+untyped/+datapipe/+properties/Chunking.m similarity index 100% rename from +types/+untyped/+datapipe/+properties/Chunking.m rename to code/+types/+untyped/+datapipe/+properties/Chunking.m diff --git a/+types/+untyped/+datapipe/+properties/Compression.m b/code/+types/+untyped/+datapipe/+properties/Compression.m similarity index 100% rename from +types/+untyped/+datapipe/+properties/Compression.m rename to code/+types/+untyped/+datapipe/+properties/Compression.m diff --git a/+types/+untyped/+datapipe/+properties/DynamicFilter.m b/code/+types/+untyped/+datapipe/+properties/DynamicFilter.m similarity index 100% rename from +types/+untyped/+datapipe/+properties/DynamicFilter.m rename to code/+types/+untyped/+datapipe/+properties/DynamicFilter.m diff --git a/+types/+untyped/+datapipe/+properties/Shuffle.m b/code/+types/+untyped/+datapipe/+properties/Shuffle.m similarity index 100% rename from +types/+untyped/+datapipe/+properties/Shuffle.m rename to code/+types/+untyped/+datapipe/+properties/Shuffle.m diff --git a/+types/+untyped/+datapipe/BlueprintPipe.m b/code/+types/+untyped/+datapipe/BlueprintPipe.m similarity index 100% rename from +types/+untyped/+datapipe/BlueprintPipe.m rename to code/+types/+untyped/+datapipe/BlueprintPipe.m diff --git a/+types/+untyped/+datapipe/BoundPipe.m b/code/+types/+untyped/+datapipe/BoundPipe.m similarity index 100% rename from +types/+untyped/+datapipe/BoundPipe.m rename to code/+types/+untyped/+datapipe/BoundPipe.m diff --git a/+types/+untyped/+datapipe/Configuration.m b/code/+types/+untyped/+datapipe/Configuration.m similarity index 100% rename from +types/+untyped/+datapipe/Configuration.m rename to code/+types/+untyped/+datapipe/Configuration.m diff --git a/+types/+untyped/+datapipe/Pipe.m b/code/+types/+untyped/+datapipe/Pipe.m similarity index 100% rename from +types/+untyped/+datapipe/Pipe.m rename to code/+types/+untyped/+datapipe/Pipe.m diff --git a/+types/+untyped/+datapipe/Property.m b/code/+types/+untyped/+datapipe/Property.m similarity index 100% rename from +types/+untyped/+datapipe/Property.m rename to code/+types/+untyped/+datapipe/Property.m diff --git a/+types/+untyped/+datapipe/guessChunkSize.m b/code/+types/+untyped/+datapipe/guessChunkSize.m similarity index 100% rename from +types/+untyped/+datapipe/guessChunkSize.m rename to code/+types/+untyped/+datapipe/guessChunkSize.m diff --git a/+types/+untyped/@DataStub/DataStub.m b/code/+types/+untyped/@DataStub/DataStub.m similarity index 100% rename from +types/+untyped/@DataStub/DataStub.m rename to code/+types/+untyped/@DataStub/DataStub.m diff --git a/+types/+untyped/@DataStub/export.m b/code/+types/+untyped/@DataStub/export.m similarity index 100% rename from +types/+untyped/@DataStub/export.m rename to code/+types/+untyped/@DataStub/export.m diff --git a/+types/+untyped/@DataStub/load_mat_style.m b/code/+types/+untyped/@DataStub/load_mat_style.m similarity index 100% rename from +types/+untyped/@DataStub/load_mat_style.m rename to code/+types/+untyped/@DataStub/load_mat_style.m diff --git a/+types/+untyped/Anon.m b/code/+types/+untyped/Anon.m similarity index 100% rename from +types/+untyped/Anon.m rename to code/+types/+untyped/Anon.m diff --git a/+types/+untyped/DataPipe.m b/code/+types/+untyped/DataPipe.m similarity index 100% rename from +types/+untyped/DataPipe.m rename to code/+types/+untyped/DataPipe.m diff --git a/+types/+untyped/DatasetClass.m b/code/+types/+untyped/DatasetClass.m similarity index 100% rename from +types/+untyped/DatasetClass.m rename to code/+types/+untyped/DatasetClass.m diff --git a/+types/+untyped/ExternalLink.m b/code/+types/+untyped/ExternalLink.m similarity index 100% rename from +types/+untyped/ExternalLink.m rename to code/+types/+untyped/ExternalLink.m diff --git a/+types/+untyped/GroupClass.m b/code/+types/+untyped/GroupClass.m similarity index 100% rename from +types/+untyped/GroupClass.m rename to code/+types/+untyped/GroupClass.m diff --git a/+types/+untyped/MetaClass.m b/code/+types/+untyped/MetaClass.m similarity index 100% rename from +types/+untyped/MetaClass.m rename to code/+types/+untyped/MetaClass.m diff --git a/+types/+untyped/ObjectView.m b/code/+types/+untyped/ObjectView.m similarity index 100% rename from +types/+untyped/ObjectView.m rename to code/+types/+untyped/ObjectView.m diff --git a/+types/+untyped/RegionView.m b/code/+types/+untyped/RegionView.m similarity index 100% rename from +types/+untyped/RegionView.m rename to code/+types/+untyped/RegionView.m diff --git a/+types/+untyped/Set.m b/code/+types/+untyped/Set.m similarity index 100% rename from +types/+untyped/Set.m rename to code/+types/+untyped/Set.m diff --git a/+types/+untyped/SoftLink.m b/code/+types/+untyped/SoftLink.m similarity index 100% rename from +types/+untyped/SoftLink.m rename to code/+types/+untyped/SoftLink.m diff --git a/+types/+util/+dynamictable/addColumn.m b/code/+types/+util/+dynamictable/addColumn.m similarity index 100% rename from +types/+util/+dynamictable/addColumn.m rename to code/+types/+util/+dynamictable/addColumn.m diff --git a/+types/+util/+dynamictable/addRawData.m b/code/+types/+util/+dynamictable/addRawData.m similarity index 100% rename from +types/+util/+dynamictable/addRawData.m rename to code/+types/+util/+dynamictable/addRawData.m diff --git a/+types/+util/+dynamictable/addRow.m b/code/+types/+util/+dynamictable/addRow.m similarity index 100% rename from +types/+util/+dynamictable/addRow.m rename to code/+types/+util/+dynamictable/addRow.m diff --git a/+types/+util/+dynamictable/addTableColumn.m b/code/+types/+util/+dynamictable/addTableColumn.m similarity index 100% rename from +types/+util/+dynamictable/addTableColumn.m rename to code/+types/+util/+dynamictable/addTableColumn.m diff --git a/+types/+util/+dynamictable/addVarargColumn.m b/code/+types/+util/+dynamictable/addVarargColumn.m similarity index 100% rename from +types/+util/+dynamictable/addVarargColumn.m rename to code/+types/+util/+dynamictable/addVarargColumn.m diff --git a/+types/+util/+dynamictable/addVarargRow.m b/code/+types/+util/+dynamictable/addVarargRow.m similarity index 100% rename from +types/+util/+dynamictable/addVarargRow.m rename to code/+types/+util/+dynamictable/addVarargRow.m diff --git a/+types/+util/+dynamictable/addVecInd.m b/code/+types/+util/+dynamictable/addVecInd.m similarity index 100% rename from +types/+util/+dynamictable/addVecInd.m rename to code/+types/+util/+dynamictable/addVecInd.m diff --git a/+types/+util/+dynamictable/checkConfig.m b/code/+types/+util/+dynamictable/checkConfig.m similarity index 100% rename from +types/+util/+dynamictable/checkConfig.m rename to code/+types/+util/+dynamictable/checkConfig.m diff --git a/+types/+util/+dynamictable/clear.m b/code/+types/+util/+dynamictable/clear.m similarity index 100% rename from +types/+util/+dynamictable/clear.m rename to code/+types/+util/+dynamictable/clear.m diff --git a/+types/+util/+dynamictable/getIndex.m b/code/+types/+util/+dynamictable/getIndex.m similarity index 100% rename from +types/+util/+dynamictable/getIndex.m rename to code/+types/+util/+dynamictable/getIndex.m diff --git a/+types/+util/+dynamictable/getRow.m b/code/+types/+util/+dynamictable/getRow.m similarity index 100% rename from +types/+util/+dynamictable/getRow.m rename to code/+types/+util/+dynamictable/getRow.m diff --git a/+types/+util/+dynamictable/getTypeMap.m b/code/+types/+util/+dynamictable/getTypeMap.m similarity index 100% rename from +types/+util/+dynamictable/getTypeMap.m rename to code/+types/+util/+dynamictable/getTypeMap.m diff --git a/+types/+util/+dynamictable/nwbToTable.m b/code/+types/+util/+dynamictable/nwbToTable.m similarity index 100% rename from +types/+util/+dynamictable/nwbToTable.m rename to code/+types/+util/+dynamictable/nwbToTable.m diff --git a/+types/+util/checkConstraint.m b/code/+types/+util/checkConstraint.m similarity index 100% rename from +types/+util/checkConstraint.m rename to code/+types/+util/checkConstraint.m diff --git a/+types/+util/checkDims.m b/code/+types/+util/checkDims.m similarity index 100% rename from +types/+util/checkDims.m rename to code/+types/+util/checkDims.m diff --git a/+types/+util/checkDtype.m b/code/+types/+util/checkDtype.m similarity index 100% rename from +types/+util/checkDtype.m rename to code/+types/+util/checkDtype.m diff --git a/+types/+util/checkSet.m b/code/+types/+util/checkSet.m similarity index 100% rename from +types/+util/checkSet.m rename to code/+types/+util/checkSet.m diff --git a/+types/+util/checkUnset.m b/code/+types/+util/checkUnset.m similarity index 100% rename from +types/+util/checkUnset.m rename to code/+types/+util/checkUnset.m diff --git a/+types/+util/correctType.m b/code/+types/+util/correctType.m similarity index 100% rename from +types/+util/correctType.m rename to code/+types/+util/correctType.m diff --git a/+types/+util/parseAnon.m b/code/+types/+util/parseAnon.m similarity index 100% rename from +types/+util/parseAnon.m rename to code/+types/+util/parseAnon.m diff --git a/+types/+util/parseConstrained.m b/code/+types/+util/parseConstrained.m similarity index 100% rename from +types/+util/parseConstrained.m rename to code/+types/+util/parseConstrained.m diff --git a/+util/createElectrodeTable.m b/code/+util/createElectrodeTable.m similarity index 100% rename from +util/createElectrodeTable.m rename to code/+util/createElectrodeTable.m diff --git a/+util/create_indexed_column.m b/code/+util/create_indexed_column.m similarity index 100% rename from +util/create_indexed_column.m rename to code/+util/create_indexed_column.m diff --git a/+util/getSchemaVersion.m b/code/+util/getSchemaVersion.m similarity index 100% rename from +util/getSchemaVersion.m rename to code/+util/getSchemaVersion.m diff --git a/+util/getUnitWaveforms.m b/code/+util/getUnitWaveforms.m similarity index 100% rename from +util/getUnitWaveforms.m rename to code/+util/getUnitWaveforms.m diff --git a/+util/loadEventAlignedSpikeTimes.m b/code/+util/loadEventAlignedSpikeTimes.m similarity index 100% rename from +util/loadEventAlignedSpikeTimes.m rename to code/+util/loadEventAlignedSpikeTimes.m diff --git a/+util/loadEventAlignedTimeSeriesData.m b/code/+util/loadEventAlignedTimeSeriesData.m similarity index 100% rename from +util/loadEventAlignedTimeSeriesData.m rename to code/+util/loadEventAlignedTimeSeriesData.m diff --git a/+util/loadTimeSeriesData.m b/code/+util/loadTimeSeriesData.m similarity index 100% rename from +util/loadTimeSeriesData.m rename to code/+util/loadTimeSeriesData.m diff --git a/+util/loadTimeSeriesTimestamps.m b/code/+util/loadTimeSeriesTimestamps.m similarity index 100% rename from +util/loadTimeSeriesTimestamps.m rename to code/+util/loadTimeSeriesTimestamps.m diff --git a/+util/loadTrialAlignedSpikeTimes.m b/code/+util/loadTrialAlignedSpikeTimes.m similarity index 100% rename from +util/loadTrialAlignedSpikeTimes.m rename to code/+util/loadTrialAlignedSpikeTimes.m diff --git a/+util/loadTrialAlignedTimeSeriesData.m b/code/+util/loadTrialAlignedTimeSeriesData.m similarity index 100% rename from +util/loadTrialAlignedTimeSeriesData.m rename to code/+util/loadTrialAlignedTimeSeriesData.m diff --git a/+util/nwbTree.m b/code/+util/nwbTree.m similarity index 100% rename from +util/nwbTree.m rename to code/+util/nwbTree.m diff --git a/+util/read_indexed_column.m b/code/+util/read_indexed_column.m similarity index 100% rename from +util/read_indexed_column.m rename to code/+util/read_indexed_column.m diff --git a/+util/table2nwb.m b/code/+util/table2nwb.m similarity index 100% rename from +util/table2nwb.m rename to code/+util/table2nwb.m diff --git a/NwbFile.m b/code/NwbFile.m similarity index 89% rename from NwbFile.m rename to code/NwbFile.m index e7dcddbc..71bf9478 100644 --- a/NwbFile.m +++ b/code/NwbFile.m @@ -1,18 +1,23 @@ classdef NwbFile < types.core.NWBFile - % NWBFILE Root object representing data read from an NWB file. - % - % Requires that core and extension NWB types have been generated - % and reside in a 'types' package on the matlab path. - % - % Example. Construct an object from scratch for export: - % nwb = NwbFile; - % nwb.epochs = types.core.Epochs; - % nwbExport(nwb, 'epoch.nwb'); - % - % See also NWBREAD, GENERATECORE, GENERATEEXTENSION +% NWBFILE - Root object representing an NWB file. +% +% Requires that core and extension NWB types have been generated +% and reside in a ``+types`` namespace on the MATLAB search path. +% +% Usage: +% Example 1 - Construct a simple NwbFile object for export:: +% +% nwb = NwbFile; +% nwb.epochs = types.core.Epochs; +% nwbExport(nwb, 'epoch.nwb'); +% +% See also: +% nwbRead, generateCore, generateExtension methods function obj = NwbFile(propValues) + % NWBFILE - Create an NWB File object + arguments propValues.?types.core.NWBFile propValues.nwb_version @@ -26,7 +31,7 @@ end function export(obj, filename, mode) - % export - Export NWB file object + % EXPORT - Export NWB file object arguments obj (1,1) NwbFile @@ -98,10 +103,14 @@ function export(obj, filename, mode) end function objectMap = searchFor(obj, typename, varargin) - % Searches this NwbFile object for a given typename + % searchFor - Search for for a given typename within the NwbFile object + % % Including the full namespace is optional. - % WARNING: The returned paths are resolvable but do not necessarily - % indicate a real HDF5 path. Their only function is to be resolvable. + % + % .. warning:: + % The returned paths are resolvable but do not necessarily + % indicate a real HDF5 path. Their only function is to be resolvable. + objectMap = searchProperties(... containers.Map,... obj,... diff --git a/external_packages/fastsearch/fastsearch.m b/code/external_packages/fastsearch/fastsearch.m similarity index 100% rename from external_packages/fastsearch/fastsearch.m rename to code/external_packages/fastsearch/fastsearch.m diff --git a/external_packages/fastsearch/license.txt b/code/external_packages/fastsearch/license.txt similarity index 100% rename from external_packages/fastsearch/license.txt rename to code/external_packages/fastsearch/license.txt diff --git a/code/generateCore.m b/code/generateCore.m new file mode 100644 index 00000000..27a3a39e --- /dev/null +++ b/code/generateCore.m @@ -0,0 +1,65 @@ +function generateCore(version, options) +% GENERATECORE - Generate Matlab classes from NWB core schema files +% +% Syntax: +% GENERATECORE() Generate classes (Matlab m-files) from the +% NWB core namespace file. By default, generates off of the most recent +% nwb-schema release. +% +% GENERATECORE(version) Generate classes for the +% core namespace of the listed version. +% +% GENERATECORE(__, Name, Value) Generate classes based on optional +% name-value pairs controlling the output . +% +% A cache of schema data is generated in the ``namespaces`` subdirectory in +% the matnwb root directory. This is for allowing cross-referencing +% classes between multiple namespaces. +% +% Output files are placed in a ``+types`` subdirectory in the +% matnwb root directory directory. +% +% Usage: +% Example 1 - Generate core schemas for the latest version of NWB:: +% +% generateCore(); +% +% Example 2 - Generate core schemas for an older version of NWB:: +% +% generateCore('2.2.3'); +% +% Example 3 - Generate and save classes in a custom location:: +% +% % Generates the core class files in the specified directory. +% generateCore('savedir', saveDirectory) +% +% See also: +% generateExtension + + arguments + version (1,1) string {matnwb.common.mustBeValidSchemaVersion} = "latest" + options.savedir (1,1) string = misc.getMatnwbDir() + end + + if version == "latest" + version = matnwb.common.findLatestSchemaVersion(); + end + + schemaPath = fullfile(misc.getMatnwbDir(), "nwb-schema", version); + corePath = fullfile(schemaPath, "core", "nwb.namespace.yaml"); + commonPath = fullfile(schemaPath, ... + "hdmf-common-schema", ... + "common", ... + "namespace.yaml"); + assert(isfile(corePath), ... + 'NWB:GenerateCore:MissingCoreSchema', ... + 'Cannot find suitable core namespace for schema version `%s`', ... + version); + + namespaceFiles = corePath; + if isfile(commonPath) + % Important: generate common before core if common is available + namespaceFiles = [commonPath, namespaceFiles]; + end + generateExtension(namespaceFiles{:}, 'savedir', options.savedir); +end diff --git a/generateExtension.m b/code/generateExtension.m similarity index 52% rename from generateExtension.m rename to code/generateExtension.m index f4184350..21f6f6fa 100644 --- a/generateExtension.m +++ b/code/generateExtension.m @@ -1,22 +1,37 @@ function generateExtension(namespaceFilePath, options) - % GENERATEEXTENSION Generate Matlab classes from NWB extension schema file - % GENERATEEXTENSION(extension_path...) Generate classes - % (Matlab m-files) from one or more NWB schema extension namespace - % files. A registry of already generated core types is used to resolve - % dependent types. - % - % A cache of schema data is generated in the 'namespaces' subdirectory in - % the current working directory. This is for allowing cross-referencing - % classes between multiple namespaces. - % - % Output files are generated placed in a '+types' subdirectory in the - % current working directory. - % - % Example: - % generateExtension('schema\myext\myextension.namespace.yaml', 'schema\myext2\myext2.namespace.yaml'); - % - % See also GENERATECORE - +% GENERATEEXTENSION - Generate Matlab classes from NWB extension schema file +% +% Syntax: +% GENERATEEXTENSION(extension_path...) Generate classes (Matlab m-files) +% from one or more NWB schema extension namespace files. A registry of +% already generated core types is used to resolve dependent types. +% +% A cache of schema data is generated in the ``namespaces`` subdirectory in +% the matnwb root directory. This is for allowing cross-referencing +% classes between multiple namespaces. +% +% Output files are placed in a ``+types`` subdirectory in the +% matnwb root directory directory. +% +% Input Arguments: +% - namespaceFilePath (string) - +% Filepath pointing to a schema extension namespace file. This is a +% repeating argument, so multiple filepaths can be provided +% +% - options (name-value pairs) - +% Optional name-value pairs. Available options: +% +% - savedir (string) - +% A folder to save generated classes for NWB/extension types. +% +% Usage: +% Example 1 - Generate classes for custom schema extensions:: +% +% generateExtension('schema\myext\myextension.namespace.yaml', 'schema\myext2\myext2.namespace.yaml'); +% +% See also: +% generateCore + arguments (Repeating) namespaceFilePath (1,1) string {mustBeYamlFile} end diff --git a/jar/schema.jar b/code/jar/schema.jar similarity index 100% rename from jar/schema.jar rename to code/jar/schema.jar diff --git a/nwb-schema/2.0.2/core/nwb.base.yaml b/code/nwb-schema/2.0.2/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.0.2/core/nwb.base.yaml rename to code/nwb-schema/2.0.2/core/nwb.base.yaml diff --git a/nwb-schema/2.0.2/core/nwb.behavior.yaml b/code/nwb-schema/2.0.2/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.0.2/core/nwb.behavior.yaml rename to code/nwb-schema/2.0.2/core/nwb.behavior.yaml diff --git a/nwb-schema/2.0.2/core/nwb.ecephys.yaml b/code/nwb-schema/2.0.2/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.0.2/core/nwb.ecephys.yaml rename to code/nwb-schema/2.0.2/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.0.2/core/nwb.epoch.yaml b/code/nwb-schema/2.0.2/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.0.2/core/nwb.epoch.yaml rename to code/nwb-schema/2.0.2/core/nwb.epoch.yaml diff --git a/nwb-schema/2.0.2/core/nwb.file.yaml b/code/nwb-schema/2.0.2/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.0.2/core/nwb.file.yaml rename to code/nwb-schema/2.0.2/core/nwb.file.yaml diff --git a/nwb-schema/2.0.2/core/nwb.icephys.yaml b/code/nwb-schema/2.0.2/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.0.2/core/nwb.icephys.yaml rename to code/nwb-schema/2.0.2/core/nwb.icephys.yaml diff --git a/nwb-schema/2.0.2/core/nwb.image.yaml b/code/nwb-schema/2.0.2/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.0.2/core/nwb.image.yaml rename to code/nwb-schema/2.0.2/core/nwb.image.yaml diff --git a/nwb-schema/2.0.2/core/nwb.misc.yaml b/code/nwb-schema/2.0.2/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.0.2/core/nwb.misc.yaml rename to code/nwb-schema/2.0.2/core/nwb.misc.yaml diff --git a/nwb-schema/2.0.2/core/nwb.namespace.yaml b/code/nwb-schema/2.0.2/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.0.2/core/nwb.namespace.yaml rename to code/nwb-schema/2.0.2/core/nwb.namespace.yaml diff --git a/nwb-schema/2.0.2/core/nwb.ogen.yaml b/code/nwb-schema/2.0.2/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.0.2/core/nwb.ogen.yaml rename to code/nwb-schema/2.0.2/core/nwb.ogen.yaml diff --git a/nwb-schema/2.0.2/core/nwb.ophys.yaml b/code/nwb-schema/2.0.2/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.0.2/core/nwb.ophys.yaml rename to code/nwb-schema/2.0.2/core/nwb.ophys.yaml diff --git a/nwb-schema/2.0.2/core/nwb.retinotopy.yaml b/code/nwb-schema/2.0.2/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.0.2/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.0.2/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.1.0/core/nwb.base.yaml b/code/nwb-schema/2.1.0/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.1.0/core/nwb.base.yaml rename to code/nwb-schema/2.1.0/core/nwb.base.yaml diff --git a/nwb-schema/2.1.0/core/nwb.behavior.yaml b/code/nwb-schema/2.1.0/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.1.0/core/nwb.behavior.yaml rename to code/nwb-schema/2.1.0/core/nwb.behavior.yaml diff --git a/nwb-schema/2.1.0/core/nwb.ecephys.yaml b/code/nwb-schema/2.1.0/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.1.0/core/nwb.ecephys.yaml rename to code/nwb-schema/2.1.0/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.1.0/core/nwb.epoch.yaml b/code/nwb-schema/2.1.0/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.1.0/core/nwb.epoch.yaml rename to code/nwb-schema/2.1.0/core/nwb.epoch.yaml diff --git a/nwb-schema/2.1.0/core/nwb.file.yaml b/code/nwb-schema/2.1.0/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.1.0/core/nwb.file.yaml rename to code/nwb-schema/2.1.0/core/nwb.file.yaml diff --git a/nwb-schema/2.1.0/core/nwb.icephys.yaml b/code/nwb-schema/2.1.0/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.1.0/core/nwb.icephys.yaml rename to code/nwb-schema/2.1.0/core/nwb.icephys.yaml diff --git a/nwb-schema/2.1.0/core/nwb.image.yaml b/code/nwb-schema/2.1.0/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.1.0/core/nwb.image.yaml rename to code/nwb-schema/2.1.0/core/nwb.image.yaml diff --git a/nwb-schema/2.1.0/core/nwb.misc.yaml b/code/nwb-schema/2.1.0/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.1.0/core/nwb.misc.yaml rename to code/nwb-schema/2.1.0/core/nwb.misc.yaml diff --git a/nwb-schema/2.1.0/core/nwb.namespace.yaml b/code/nwb-schema/2.1.0/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.1.0/core/nwb.namespace.yaml rename to code/nwb-schema/2.1.0/core/nwb.namespace.yaml diff --git a/nwb-schema/2.1.0/core/nwb.ogen.yaml b/code/nwb-schema/2.1.0/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.1.0/core/nwb.ogen.yaml rename to code/nwb-schema/2.1.0/core/nwb.ogen.yaml diff --git a/nwb-schema/2.1.0/core/nwb.ophys.yaml b/code/nwb-schema/2.1.0/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.1.0/core/nwb.ophys.yaml rename to code/nwb-schema/2.1.0/core/nwb.ophys.yaml diff --git a/nwb-schema/2.1.0/core/nwb.retinotopy.yaml b/code/nwb-schema/2.1.0/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.1.0/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.1.0/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.2.0/core/nwb.base.yaml b/code/nwb-schema/2.2.0/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.2.0/core/nwb.base.yaml rename to code/nwb-schema/2.2.0/core/nwb.base.yaml diff --git a/nwb-schema/2.2.0/core/nwb.behavior.yaml b/code/nwb-schema/2.2.0/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.2.0/core/nwb.behavior.yaml rename to code/nwb-schema/2.2.0/core/nwb.behavior.yaml diff --git a/nwb-schema/2.2.0/core/nwb.device.yaml b/code/nwb-schema/2.2.0/core/nwb.device.yaml similarity index 100% rename from nwb-schema/2.2.0/core/nwb.device.yaml rename to code/nwb-schema/2.2.0/core/nwb.device.yaml diff --git a/nwb-schema/2.2.0/core/nwb.ecephys.yaml b/code/nwb-schema/2.2.0/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.2.0/core/nwb.ecephys.yaml rename to code/nwb-schema/2.2.0/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.2.0/core/nwb.epoch.yaml b/code/nwb-schema/2.2.0/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.2.0/core/nwb.epoch.yaml rename to code/nwb-schema/2.2.0/core/nwb.epoch.yaml diff --git a/nwb-schema/2.2.0/core/nwb.file.yaml b/code/nwb-schema/2.2.0/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.2.0/core/nwb.file.yaml rename to code/nwb-schema/2.2.0/core/nwb.file.yaml diff --git a/nwb-schema/2.2.0/core/nwb.icephys.yaml b/code/nwb-schema/2.2.0/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.2.0/core/nwb.icephys.yaml rename to code/nwb-schema/2.2.0/core/nwb.icephys.yaml diff --git a/nwb-schema/2.2.0/core/nwb.image.yaml b/code/nwb-schema/2.2.0/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.2.0/core/nwb.image.yaml rename to code/nwb-schema/2.2.0/core/nwb.image.yaml diff --git a/nwb-schema/2.2.0/core/nwb.misc.yaml b/code/nwb-schema/2.2.0/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.2.0/core/nwb.misc.yaml rename to code/nwb-schema/2.2.0/core/nwb.misc.yaml diff --git a/nwb-schema/2.2.0/core/nwb.namespace.yaml b/code/nwb-schema/2.2.0/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.2.0/core/nwb.namespace.yaml rename to code/nwb-schema/2.2.0/core/nwb.namespace.yaml diff --git a/nwb-schema/2.2.0/core/nwb.ogen.yaml b/code/nwb-schema/2.2.0/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.2.0/core/nwb.ogen.yaml rename to code/nwb-schema/2.2.0/core/nwb.ogen.yaml diff --git a/nwb-schema/2.2.0/core/nwb.ophys.yaml b/code/nwb-schema/2.2.0/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.2.0/core/nwb.ophys.yaml rename to code/nwb-schema/2.2.0/core/nwb.ophys.yaml diff --git a/nwb-schema/2.2.0/core/nwb.retinotopy.yaml b/code/nwb-schema/2.2.0/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.2.0/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.2.0/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.2.0/hdmf-common-schema/common/namespace.yaml b/code/nwb-schema/2.2.0/hdmf-common-schema/common/namespace.yaml similarity index 100% rename from nwb-schema/2.2.0/hdmf-common-schema/common/namespace.yaml rename to code/nwb-schema/2.2.0/hdmf-common-schema/common/namespace.yaml diff --git a/nwb-schema/2.2.0/hdmf-common-schema/common/sparse.yaml b/code/nwb-schema/2.2.0/hdmf-common-schema/common/sparse.yaml similarity index 100% rename from nwb-schema/2.2.0/hdmf-common-schema/common/sparse.yaml rename to code/nwb-schema/2.2.0/hdmf-common-schema/common/sparse.yaml diff --git a/nwb-schema/2.2.0/hdmf-common-schema/common/table.yaml b/code/nwb-schema/2.2.0/hdmf-common-schema/common/table.yaml similarity index 100% rename from nwb-schema/2.2.0/hdmf-common-schema/common/table.yaml rename to code/nwb-schema/2.2.0/hdmf-common-schema/common/table.yaml diff --git a/nwb-schema/2.2.1/core/nwb.base.yaml b/code/nwb-schema/2.2.1/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.2.1/core/nwb.base.yaml rename to code/nwb-schema/2.2.1/core/nwb.base.yaml diff --git a/nwb-schema/2.2.1/core/nwb.behavior.yaml b/code/nwb-schema/2.2.1/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.2.1/core/nwb.behavior.yaml rename to code/nwb-schema/2.2.1/core/nwb.behavior.yaml diff --git a/nwb-schema/2.2.1/core/nwb.device.yaml b/code/nwb-schema/2.2.1/core/nwb.device.yaml similarity index 100% rename from nwb-schema/2.2.1/core/nwb.device.yaml rename to code/nwb-schema/2.2.1/core/nwb.device.yaml diff --git a/nwb-schema/2.2.1/core/nwb.ecephys.yaml b/code/nwb-schema/2.2.1/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.2.1/core/nwb.ecephys.yaml rename to code/nwb-schema/2.2.1/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.2.1/core/nwb.epoch.yaml b/code/nwb-schema/2.2.1/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.2.1/core/nwb.epoch.yaml rename to code/nwb-schema/2.2.1/core/nwb.epoch.yaml diff --git a/nwb-schema/2.2.1/core/nwb.file.yaml b/code/nwb-schema/2.2.1/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.2.1/core/nwb.file.yaml rename to code/nwb-schema/2.2.1/core/nwb.file.yaml diff --git a/nwb-schema/2.2.1/core/nwb.icephys.yaml b/code/nwb-schema/2.2.1/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.2.1/core/nwb.icephys.yaml rename to code/nwb-schema/2.2.1/core/nwb.icephys.yaml diff --git a/nwb-schema/2.2.1/core/nwb.image.yaml b/code/nwb-schema/2.2.1/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.2.1/core/nwb.image.yaml rename to code/nwb-schema/2.2.1/core/nwb.image.yaml diff --git a/nwb-schema/2.2.1/core/nwb.misc.yaml b/code/nwb-schema/2.2.1/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.2.1/core/nwb.misc.yaml rename to code/nwb-schema/2.2.1/core/nwb.misc.yaml diff --git a/nwb-schema/2.2.1/core/nwb.namespace.yaml b/code/nwb-schema/2.2.1/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.2.1/core/nwb.namespace.yaml rename to code/nwb-schema/2.2.1/core/nwb.namespace.yaml diff --git a/nwb-schema/2.2.1/core/nwb.ogen.yaml b/code/nwb-schema/2.2.1/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.2.1/core/nwb.ogen.yaml rename to code/nwb-schema/2.2.1/core/nwb.ogen.yaml diff --git a/nwb-schema/2.2.1/core/nwb.ophys.yaml b/code/nwb-schema/2.2.1/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.2.1/core/nwb.ophys.yaml rename to code/nwb-schema/2.2.1/core/nwb.ophys.yaml diff --git a/nwb-schema/2.2.1/core/nwb.retinotopy.yaml b/code/nwb-schema/2.2.1/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.2.1/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.2.1/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.2.1/hdmf-common-schema/common/namespace.yaml b/code/nwb-schema/2.2.1/hdmf-common-schema/common/namespace.yaml similarity index 100% rename from nwb-schema/2.2.1/hdmf-common-schema/common/namespace.yaml rename to code/nwb-schema/2.2.1/hdmf-common-schema/common/namespace.yaml diff --git a/nwb-schema/2.2.1/hdmf-common-schema/common/sparse.yaml b/code/nwb-schema/2.2.1/hdmf-common-schema/common/sparse.yaml similarity index 100% rename from nwb-schema/2.2.1/hdmf-common-schema/common/sparse.yaml rename to code/nwb-schema/2.2.1/hdmf-common-schema/common/sparse.yaml diff --git a/nwb-schema/2.2.1/hdmf-common-schema/common/table.yaml b/code/nwb-schema/2.2.1/hdmf-common-schema/common/table.yaml similarity index 100% rename from nwb-schema/2.2.1/hdmf-common-schema/common/table.yaml rename to code/nwb-schema/2.2.1/hdmf-common-schema/common/table.yaml diff --git a/nwb-schema/2.2.2/core/nwb.base.yaml b/code/nwb-schema/2.2.2/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.2.2/core/nwb.base.yaml rename to code/nwb-schema/2.2.2/core/nwb.base.yaml diff --git a/nwb-schema/2.2.2/core/nwb.behavior.yaml b/code/nwb-schema/2.2.2/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.2.2/core/nwb.behavior.yaml rename to code/nwb-schema/2.2.2/core/nwb.behavior.yaml diff --git a/nwb-schema/2.2.2/core/nwb.device.yaml b/code/nwb-schema/2.2.2/core/nwb.device.yaml similarity index 100% rename from nwb-schema/2.2.2/core/nwb.device.yaml rename to code/nwb-schema/2.2.2/core/nwb.device.yaml diff --git a/nwb-schema/2.2.2/core/nwb.ecephys.yaml b/code/nwb-schema/2.2.2/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.2.2/core/nwb.ecephys.yaml rename to code/nwb-schema/2.2.2/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.2.2/core/nwb.epoch.yaml b/code/nwb-schema/2.2.2/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.2.2/core/nwb.epoch.yaml rename to code/nwb-schema/2.2.2/core/nwb.epoch.yaml diff --git a/nwb-schema/2.2.2/core/nwb.file.yaml b/code/nwb-schema/2.2.2/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.2.2/core/nwb.file.yaml rename to code/nwb-schema/2.2.2/core/nwb.file.yaml diff --git a/nwb-schema/2.2.2/core/nwb.icephys.yaml b/code/nwb-schema/2.2.2/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.2.2/core/nwb.icephys.yaml rename to code/nwb-schema/2.2.2/core/nwb.icephys.yaml diff --git a/nwb-schema/2.2.2/core/nwb.image.yaml b/code/nwb-schema/2.2.2/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.2.2/core/nwb.image.yaml rename to code/nwb-schema/2.2.2/core/nwb.image.yaml diff --git a/nwb-schema/2.2.2/core/nwb.misc.yaml b/code/nwb-schema/2.2.2/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.2.2/core/nwb.misc.yaml rename to code/nwb-schema/2.2.2/core/nwb.misc.yaml diff --git a/nwb-schema/2.2.2/core/nwb.namespace.yaml b/code/nwb-schema/2.2.2/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.2.2/core/nwb.namespace.yaml rename to code/nwb-schema/2.2.2/core/nwb.namespace.yaml diff --git a/nwb-schema/2.2.2/core/nwb.ogen.yaml b/code/nwb-schema/2.2.2/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.2.2/core/nwb.ogen.yaml rename to code/nwb-schema/2.2.2/core/nwb.ogen.yaml diff --git a/nwb-schema/2.2.2/core/nwb.ophys.yaml b/code/nwb-schema/2.2.2/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.2.2/core/nwb.ophys.yaml rename to code/nwb-schema/2.2.2/core/nwb.ophys.yaml diff --git a/nwb-schema/2.2.2/core/nwb.retinotopy.yaml b/code/nwb-schema/2.2.2/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.2.2/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.2.2/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.2.2/hdmf-common-schema/common/namespace.yaml b/code/nwb-schema/2.2.2/hdmf-common-schema/common/namespace.yaml similarity index 100% rename from nwb-schema/2.2.2/hdmf-common-schema/common/namespace.yaml rename to code/nwb-schema/2.2.2/hdmf-common-schema/common/namespace.yaml diff --git a/nwb-schema/2.2.2/hdmf-common-schema/common/sparse.yaml b/code/nwb-schema/2.2.2/hdmf-common-schema/common/sparse.yaml similarity index 100% rename from nwb-schema/2.2.2/hdmf-common-schema/common/sparse.yaml rename to code/nwb-schema/2.2.2/hdmf-common-schema/common/sparse.yaml diff --git a/nwb-schema/2.2.2/hdmf-common-schema/common/table.yaml b/code/nwb-schema/2.2.2/hdmf-common-schema/common/table.yaml similarity index 100% rename from nwb-schema/2.2.2/hdmf-common-schema/common/table.yaml rename to code/nwb-schema/2.2.2/hdmf-common-schema/common/table.yaml diff --git a/nwb-schema/2.2.3/core/nwb.base.yaml b/code/nwb-schema/2.2.3/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.2.3/core/nwb.base.yaml rename to code/nwb-schema/2.2.3/core/nwb.base.yaml diff --git a/nwb-schema/2.2.3/core/nwb.behavior.yaml b/code/nwb-schema/2.2.3/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.2.3/core/nwb.behavior.yaml rename to code/nwb-schema/2.2.3/core/nwb.behavior.yaml diff --git a/nwb-schema/2.2.3/core/nwb.device.yaml b/code/nwb-schema/2.2.3/core/nwb.device.yaml similarity index 100% rename from nwb-schema/2.2.3/core/nwb.device.yaml rename to code/nwb-schema/2.2.3/core/nwb.device.yaml diff --git a/nwb-schema/2.2.3/core/nwb.ecephys.yaml b/code/nwb-schema/2.2.3/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.2.3/core/nwb.ecephys.yaml rename to code/nwb-schema/2.2.3/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.2.3/core/nwb.epoch.yaml b/code/nwb-schema/2.2.3/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.2.3/core/nwb.epoch.yaml rename to code/nwb-schema/2.2.3/core/nwb.epoch.yaml diff --git a/nwb-schema/2.2.3/core/nwb.file.yaml b/code/nwb-schema/2.2.3/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.2.3/core/nwb.file.yaml rename to code/nwb-schema/2.2.3/core/nwb.file.yaml diff --git a/nwb-schema/2.2.3/core/nwb.icephys.yaml b/code/nwb-schema/2.2.3/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.2.3/core/nwb.icephys.yaml rename to code/nwb-schema/2.2.3/core/nwb.icephys.yaml diff --git a/nwb-schema/2.2.3/core/nwb.image.yaml b/code/nwb-schema/2.2.3/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.2.3/core/nwb.image.yaml rename to code/nwb-schema/2.2.3/core/nwb.image.yaml diff --git a/nwb-schema/2.2.3/core/nwb.misc.yaml b/code/nwb-schema/2.2.3/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.2.3/core/nwb.misc.yaml rename to code/nwb-schema/2.2.3/core/nwb.misc.yaml diff --git a/nwb-schema/2.2.3/core/nwb.namespace.yaml b/code/nwb-schema/2.2.3/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.2.3/core/nwb.namespace.yaml rename to code/nwb-schema/2.2.3/core/nwb.namespace.yaml diff --git a/nwb-schema/2.2.3/core/nwb.ogen.yaml b/code/nwb-schema/2.2.3/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.2.3/core/nwb.ogen.yaml rename to code/nwb-schema/2.2.3/core/nwb.ogen.yaml diff --git a/nwb-schema/2.2.3/core/nwb.ophys.yaml b/code/nwb-schema/2.2.3/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.2.3/core/nwb.ophys.yaml rename to code/nwb-schema/2.2.3/core/nwb.ophys.yaml diff --git a/nwb-schema/2.2.3/core/nwb.retinotopy.yaml b/code/nwb-schema/2.2.3/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.2.3/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.2.3/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.2.3/hdmf-common-schema/common/namespace.yaml b/code/nwb-schema/2.2.3/hdmf-common-schema/common/namespace.yaml similarity index 100% rename from nwb-schema/2.2.3/hdmf-common-schema/common/namespace.yaml rename to code/nwb-schema/2.2.3/hdmf-common-schema/common/namespace.yaml diff --git a/nwb-schema/2.2.3/hdmf-common-schema/common/sparse.yaml b/code/nwb-schema/2.2.3/hdmf-common-schema/common/sparse.yaml similarity index 100% rename from nwb-schema/2.2.3/hdmf-common-schema/common/sparse.yaml rename to code/nwb-schema/2.2.3/hdmf-common-schema/common/sparse.yaml diff --git a/nwb-schema/2.2.3/hdmf-common-schema/common/table.yaml b/code/nwb-schema/2.2.3/hdmf-common-schema/common/table.yaml similarity index 100% rename from nwb-schema/2.2.3/hdmf-common-schema/common/table.yaml rename to code/nwb-schema/2.2.3/hdmf-common-schema/common/table.yaml diff --git a/nwb-schema/2.2.4/core/nwb.base.yaml b/code/nwb-schema/2.2.4/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.2.4/core/nwb.base.yaml rename to code/nwb-schema/2.2.4/core/nwb.base.yaml diff --git a/nwb-schema/2.2.4/core/nwb.behavior.yaml b/code/nwb-schema/2.2.4/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.2.4/core/nwb.behavior.yaml rename to code/nwb-schema/2.2.4/core/nwb.behavior.yaml diff --git a/nwb-schema/2.2.4/core/nwb.device.yaml b/code/nwb-schema/2.2.4/core/nwb.device.yaml similarity index 100% rename from nwb-schema/2.2.4/core/nwb.device.yaml rename to code/nwb-schema/2.2.4/core/nwb.device.yaml diff --git a/nwb-schema/2.2.4/core/nwb.ecephys.yaml b/code/nwb-schema/2.2.4/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.2.4/core/nwb.ecephys.yaml rename to code/nwb-schema/2.2.4/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.2.4/core/nwb.epoch.yaml b/code/nwb-schema/2.2.4/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.2.4/core/nwb.epoch.yaml rename to code/nwb-schema/2.2.4/core/nwb.epoch.yaml diff --git a/nwb-schema/2.2.4/core/nwb.file.yaml b/code/nwb-schema/2.2.4/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.2.4/core/nwb.file.yaml rename to code/nwb-schema/2.2.4/core/nwb.file.yaml diff --git a/nwb-schema/2.2.4/core/nwb.icephys.yaml b/code/nwb-schema/2.2.4/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.2.4/core/nwb.icephys.yaml rename to code/nwb-schema/2.2.4/core/nwb.icephys.yaml diff --git a/nwb-schema/2.2.4/core/nwb.image.yaml b/code/nwb-schema/2.2.4/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.2.4/core/nwb.image.yaml rename to code/nwb-schema/2.2.4/core/nwb.image.yaml diff --git a/nwb-schema/2.2.4/core/nwb.misc.yaml b/code/nwb-schema/2.2.4/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.2.4/core/nwb.misc.yaml rename to code/nwb-schema/2.2.4/core/nwb.misc.yaml diff --git a/nwb-schema/2.2.4/core/nwb.namespace.yaml b/code/nwb-schema/2.2.4/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.2.4/core/nwb.namespace.yaml rename to code/nwb-schema/2.2.4/core/nwb.namespace.yaml diff --git a/nwb-schema/2.2.4/core/nwb.ogen.yaml b/code/nwb-schema/2.2.4/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.2.4/core/nwb.ogen.yaml rename to code/nwb-schema/2.2.4/core/nwb.ogen.yaml diff --git a/nwb-schema/2.2.4/core/nwb.ophys.yaml b/code/nwb-schema/2.2.4/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.2.4/core/nwb.ophys.yaml rename to code/nwb-schema/2.2.4/core/nwb.ophys.yaml diff --git a/nwb-schema/2.2.4/core/nwb.retinotopy.yaml b/code/nwb-schema/2.2.4/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.2.4/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.2.4/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.2.4/hdmf-common-schema/common/namespace.yaml b/code/nwb-schema/2.2.4/hdmf-common-schema/common/namespace.yaml similarity index 100% rename from nwb-schema/2.2.4/hdmf-common-schema/common/namespace.yaml rename to code/nwb-schema/2.2.4/hdmf-common-schema/common/namespace.yaml diff --git a/nwb-schema/2.2.4/hdmf-common-schema/common/sparse.yaml b/code/nwb-schema/2.2.4/hdmf-common-schema/common/sparse.yaml similarity index 100% rename from nwb-schema/2.2.4/hdmf-common-schema/common/sparse.yaml rename to code/nwb-schema/2.2.4/hdmf-common-schema/common/sparse.yaml diff --git a/nwb-schema/2.2.4/hdmf-common-schema/common/table.yaml b/code/nwb-schema/2.2.4/hdmf-common-schema/common/table.yaml similarity index 100% rename from nwb-schema/2.2.4/hdmf-common-schema/common/table.yaml rename to code/nwb-schema/2.2.4/hdmf-common-schema/common/table.yaml diff --git a/nwb-schema/2.2.5/core/nwb.base.yaml b/code/nwb-schema/2.2.5/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.2.5/core/nwb.base.yaml rename to code/nwb-schema/2.2.5/core/nwb.base.yaml diff --git a/nwb-schema/2.2.5/core/nwb.behavior.yaml b/code/nwb-schema/2.2.5/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.2.5/core/nwb.behavior.yaml rename to code/nwb-schema/2.2.5/core/nwb.behavior.yaml diff --git a/nwb-schema/2.2.5/core/nwb.device.yaml b/code/nwb-schema/2.2.5/core/nwb.device.yaml similarity index 100% rename from nwb-schema/2.2.5/core/nwb.device.yaml rename to code/nwb-schema/2.2.5/core/nwb.device.yaml diff --git a/nwb-schema/2.2.5/core/nwb.ecephys.yaml b/code/nwb-schema/2.2.5/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.2.5/core/nwb.ecephys.yaml rename to code/nwb-schema/2.2.5/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.2.5/core/nwb.epoch.yaml b/code/nwb-schema/2.2.5/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.2.5/core/nwb.epoch.yaml rename to code/nwb-schema/2.2.5/core/nwb.epoch.yaml diff --git a/nwb-schema/2.2.5/core/nwb.file.yaml b/code/nwb-schema/2.2.5/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.2.5/core/nwb.file.yaml rename to code/nwb-schema/2.2.5/core/nwb.file.yaml diff --git a/nwb-schema/2.2.5/core/nwb.icephys.yaml b/code/nwb-schema/2.2.5/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.2.5/core/nwb.icephys.yaml rename to code/nwb-schema/2.2.5/core/nwb.icephys.yaml diff --git a/nwb-schema/2.2.5/core/nwb.image.yaml b/code/nwb-schema/2.2.5/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.2.5/core/nwb.image.yaml rename to code/nwb-schema/2.2.5/core/nwb.image.yaml diff --git a/nwb-schema/2.2.5/core/nwb.misc.yaml b/code/nwb-schema/2.2.5/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.2.5/core/nwb.misc.yaml rename to code/nwb-schema/2.2.5/core/nwb.misc.yaml diff --git a/nwb-schema/2.2.5/core/nwb.namespace.yaml b/code/nwb-schema/2.2.5/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.2.5/core/nwb.namespace.yaml rename to code/nwb-schema/2.2.5/core/nwb.namespace.yaml diff --git a/nwb-schema/2.2.5/core/nwb.ogen.yaml b/code/nwb-schema/2.2.5/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.2.5/core/nwb.ogen.yaml rename to code/nwb-schema/2.2.5/core/nwb.ogen.yaml diff --git a/nwb-schema/2.2.5/core/nwb.ophys.yaml b/code/nwb-schema/2.2.5/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.2.5/core/nwb.ophys.yaml rename to code/nwb-schema/2.2.5/core/nwb.ophys.yaml diff --git a/nwb-schema/2.2.5/core/nwb.retinotopy.yaml b/code/nwb-schema/2.2.5/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.2.5/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.2.5/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.2.5/hdmf-common-schema/common/namespace.yaml b/code/nwb-schema/2.2.5/hdmf-common-schema/common/namespace.yaml similarity index 100% rename from nwb-schema/2.2.5/hdmf-common-schema/common/namespace.yaml rename to code/nwb-schema/2.2.5/hdmf-common-schema/common/namespace.yaml diff --git a/nwb-schema/2.2.5/hdmf-common-schema/common/sparse.yaml b/code/nwb-schema/2.2.5/hdmf-common-schema/common/sparse.yaml similarity index 100% rename from nwb-schema/2.2.5/hdmf-common-schema/common/sparse.yaml rename to code/nwb-schema/2.2.5/hdmf-common-schema/common/sparse.yaml diff --git a/nwb-schema/2.2.5/hdmf-common-schema/common/table.yaml b/code/nwb-schema/2.2.5/hdmf-common-schema/common/table.yaml similarity index 100% rename from nwb-schema/2.2.5/hdmf-common-schema/common/table.yaml rename to code/nwb-schema/2.2.5/hdmf-common-schema/common/table.yaml diff --git a/nwb-schema/2.3.0/core/nwb.base.yaml b/code/nwb-schema/2.3.0/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.3.0/core/nwb.base.yaml rename to code/nwb-schema/2.3.0/core/nwb.base.yaml diff --git a/nwb-schema/2.3.0/core/nwb.behavior.yaml b/code/nwb-schema/2.3.0/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.3.0/core/nwb.behavior.yaml rename to code/nwb-schema/2.3.0/core/nwb.behavior.yaml diff --git a/nwb-schema/2.3.0/core/nwb.device.yaml b/code/nwb-schema/2.3.0/core/nwb.device.yaml similarity index 100% rename from nwb-schema/2.3.0/core/nwb.device.yaml rename to code/nwb-schema/2.3.0/core/nwb.device.yaml diff --git a/nwb-schema/2.3.0/core/nwb.ecephys.yaml b/code/nwb-schema/2.3.0/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.3.0/core/nwb.ecephys.yaml rename to code/nwb-schema/2.3.0/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.3.0/core/nwb.epoch.yaml b/code/nwb-schema/2.3.0/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.3.0/core/nwb.epoch.yaml rename to code/nwb-schema/2.3.0/core/nwb.epoch.yaml diff --git a/nwb-schema/2.3.0/core/nwb.file.yaml b/code/nwb-schema/2.3.0/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.3.0/core/nwb.file.yaml rename to code/nwb-schema/2.3.0/core/nwb.file.yaml diff --git a/nwb-schema/2.3.0/core/nwb.icephys.yaml b/code/nwb-schema/2.3.0/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.3.0/core/nwb.icephys.yaml rename to code/nwb-schema/2.3.0/core/nwb.icephys.yaml diff --git a/nwb-schema/2.3.0/core/nwb.image.yaml b/code/nwb-schema/2.3.0/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.3.0/core/nwb.image.yaml rename to code/nwb-schema/2.3.0/core/nwb.image.yaml diff --git a/nwb-schema/2.3.0/core/nwb.misc.yaml b/code/nwb-schema/2.3.0/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.3.0/core/nwb.misc.yaml rename to code/nwb-schema/2.3.0/core/nwb.misc.yaml diff --git a/nwb-schema/2.3.0/core/nwb.namespace.yaml b/code/nwb-schema/2.3.0/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.3.0/core/nwb.namespace.yaml rename to code/nwb-schema/2.3.0/core/nwb.namespace.yaml diff --git a/nwb-schema/2.3.0/core/nwb.ogen.yaml b/code/nwb-schema/2.3.0/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.3.0/core/nwb.ogen.yaml rename to code/nwb-schema/2.3.0/core/nwb.ogen.yaml diff --git a/nwb-schema/2.3.0/core/nwb.ophys.yaml b/code/nwb-schema/2.3.0/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.3.0/core/nwb.ophys.yaml rename to code/nwb-schema/2.3.0/core/nwb.ophys.yaml diff --git a/nwb-schema/2.3.0/core/nwb.retinotopy.yaml b/code/nwb-schema/2.3.0/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.3.0/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.3.0/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.3.0/hdmf-common-schema/common/base.yaml b/code/nwb-schema/2.3.0/hdmf-common-schema/common/base.yaml similarity index 100% rename from nwb-schema/2.3.0/hdmf-common-schema/common/base.yaml rename to code/nwb-schema/2.3.0/hdmf-common-schema/common/base.yaml diff --git a/nwb-schema/2.3.0/hdmf-common-schema/common/experimental.yaml b/code/nwb-schema/2.3.0/hdmf-common-schema/common/experimental.yaml similarity index 100% rename from nwb-schema/2.3.0/hdmf-common-schema/common/experimental.yaml rename to code/nwb-schema/2.3.0/hdmf-common-schema/common/experimental.yaml diff --git a/nwb-schema/2.3.0/hdmf-common-schema/common/namespace.yaml b/code/nwb-schema/2.3.0/hdmf-common-schema/common/namespace.yaml similarity index 100% rename from nwb-schema/2.3.0/hdmf-common-schema/common/namespace.yaml rename to code/nwb-schema/2.3.0/hdmf-common-schema/common/namespace.yaml diff --git a/nwb-schema/2.3.0/hdmf-common-schema/common/resources.yaml b/code/nwb-schema/2.3.0/hdmf-common-schema/common/resources.yaml similarity index 100% rename from nwb-schema/2.3.0/hdmf-common-schema/common/resources.yaml rename to code/nwb-schema/2.3.0/hdmf-common-schema/common/resources.yaml diff --git a/nwb-schema/2.3.0/hdmf-common-schema/common/sparse.yaml b/code/nwb-schema/2.3.0/hdmf-common-schema/common/sparse.yaml similarity index 100% rename from nwb-schema/2.3.0/hdmf-common-schema/common/sparse.yaml rename to code/nwb-schema/2.3.0/hdmf-common-schema/common/sparse.yaml diff --git a/nwb-schema/2.3.0/hdmf-common-schema/common/table.yaml b/code/nwb-schema/2.3.0/hdmf-common-schema/common/table.yaml similarity index 100% rename from nwb-schema/2.3.0/hdmf-common-schema/common/table.yaml rename to code/nwb-schema/2.3.0/hdmf-common-schema/common/table.yaml diff --git a/nwb-schema/2.4.0/core/nwb.base.yaml b/code/nwb-schema/2.4.0/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.4.0/core/nwb.base.yaml rename to code/nwb-schema/2.4.0/core/nwb.base.yaml diff --git a/nwb-schema/2.4.0/core/nwb.behavior.yaml b/code/nwb-schema/2.4.0/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.4.0/core/nwb.behavior.yaml rename to code/nwb-schema/2.4.0/core/nwb.behavior.yaml diff --git a/nwb-schema/2.4.0/core/nwb.device.yaml b/code/nwb-schema/2.4.0/core/nwb.device.yaml similarity index 100% rename from nwb-schema/2.4.0/core/nwb.device.yaml rename to code/nwb-schema/2.4.0/core/nwb.device.yaml diff --git a/nwb-schema/2.4.0/core/nwb.ecephys.yaml b/code/nwb-schema/2.4.0/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.4.0/core/nwb.ecephys.yaml rename to code/nwb-schema/2.4.0/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.4.0/core/nwb.epoch.yaml b/code/nwb-schema/2.4.0/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.4.0/core/nwb.epoch.yaml rename to code/nwb-schema/2.4.0/core/nwb.epoch.yaml diff --git a/nwb-schema/2.4.0/core/nwb.file.yaml b/code/nwb-schema/2.4.0/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.4.0/core/nwb.file.yaml rename to code/nwb-schema/2.4.0/core/nwb.file.yaml diff --git a/nwb-schema/2.4.0/core/nwb.icephys.yaml b/code/nwb-schema/2.4.0/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.4.0/core/nwb.icephys.yaml rename to code/nwb-schema/2.4.0/core/nwb.icephys.yaml diff --git a/nwb-schema/2.4.0/core/nwb.image.yaml b/code/nwb-schema/2.4.0/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.4.0/core/nwb.image.yaml rename to code/nwb-schema/2.4.0/core/nwb.image.yaml diff --git a/nwb-schema/2.4.0/core/nwb.misc.yaml b/code/nwb-schema/2.4.0/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.4.0/core/nwb.misc.yaml rename to code/nwb-schema/2.4.0/core/nwb.misc.yaml diff --git a/nwb-schema/2.4.0/core/nwb.namespace.yaml b/code/nwb-schema/2.4.0/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.4.0/core/nwb.namespace.yaml rename to code/nwb-schema/2.4.0/core/nwb.namespace.yaml diff --git a/nwb-schema/2.4.0/core/nwb.ogen.yaml b/code/nwb-schema/2.4.0/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.4.0/core/nwb.ogen.yaml rename to code/nwb-schema/2.4.0/core/nwb.ogen.yaml diff --git a/nwb-schema/2.4.0/core/nwb.ophys.yaml b/code/nwb-schema/2.4.0/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.4.0/core/nwb.ophys.yaml rename to code/nwb-schema/2.4.0/core/nwb.ophys.yaml diff --git a/nwb-schema/2.4.0/core/nwb.retinotopy.yaml b/code/nwb-schema/2.4.0/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.4.0/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.4.0/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.4.0/hdmf-common-schema/common/base.yaml b/code/nwb-schema/2.4.0/hdmf-common-schema/common/base.yaml similarity index 100% rename from nwb-schema/2.4.0/hdmf-common-schema/common/base.yaml rename to code/nwb-schema/2.4.0/hdmf-common-schema/common/base.yaml diff --git a/nwb-schema/2.4.0/hdmf-common-schema/common/experimental.yaml b/code/nwb-schema/2.4.0/hdmf-common-schema/common/experimental.yaml similarity index 100% rename from nwb-schema/2.4.0/hdmf-common-schema/common/experimental.yaml rename to code/nwb-schema/2.4.0/hdmf-common-schema/common/experimental.yaml diff --git a/nwb-schema/2.4.0/hdmf-common-schema/common/namespace.yaml b/code/nwb-schema/2.4.0/hdmf-common-schema/common/namespace.yaml similarity index 100% rename from nwb-schema/2.4.0/hdmf-common-schema/common/namespace.yaml rename to code/nwb-schema/2.4.0/hdmf-common-schema/common/namespace.yaml diff --git a/nwb-schema/2.4.0/hdmf-common-schema/common/resources.yaml b/code/nwb-schema/2.4.0/hdmf-common-schema/common/resources.yaml similarity index 100% rename from nwb-schema/2.4.0/hdmf-common-schema/common/resources.yaml rename to code/nwb-schema/2.4.0/hdmf-common-schema/common/resources.yaml diff --git a/nwb-schema/2.4.0/hdmf-common-schema/common/sparse.yaml b/code/nwb-schema/2.4.0/hdmf-common-schema/common/sparse.yaml similarity index 100% rename from nwb-schema/2.4.0/hdmf-common-schema/common/sparse.yaml rename to code/nwb-schema/2.4.0/hdmf-common-schema/common/sparse.yaml diff --git a/nwb-schema/2.4.0/hdmf-common-schema/common/table.yaml b/code/nwb-schema/2.4.0/hdmf-common-schema/common/table.yaml similarity index 100% rename from nwb-schema/2.4.0/hdmf-common-schema/common/table.yaml rename to code/nwb-schema/2.4.0/hdmf-common-schema/common/table.yaml diff --git a/nwb-schema/2.5.0/core/nwb.base.yaml b/code/nwb-schema/2.5.0/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.5.0/core/nwb.base.yaml rename to code/nwb-schema/2.5.0/core/nwb.base.yaml diff --git a/nwb-schema/2.5.0/core/nwb.behavior.yaml b/code/nwb-schema/2.5.0/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.5.0/core/nwb.behavior.yaml rename to code/nwb-schema/2.5.0/core/nwb.behavior.yaml diff --git a/nwb-schema/2.5.0/core/nwb.device.yaml b/code/nwb-schema/2.5.0/core/nwb.device.yaml similarity index 100% rename from nwb-schema/2.5.0/core/nwb.device.yaml rename to code/nwb-schema/2.5.0/core/nwb.device.yaml diff --git a/nwb-schema/2.5.0/core/nwb.ecephys.yaml b/code/nwb-schema/2.5.0/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.5.0/core/nwb.ecephys.yaml rename to code/nwb-schema/2.5.0/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.5.0/core/nwb.epoch.yaml b/code/nwb-schema/2.5.0/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.5.0/core/nwb.epoch.yaml rename to code/nwb-schema/2.5.0/core/nwb.epoch.yaml diff --git a/nwb-schema/2.5.0/core/nwb.file.yaml b/code/nwb-schema/2.5.0/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.5.0/core/nwb.file.yaml rename to code/nwb-schema/2.5.0/core/nwb.file.yaml diff --git a/nwb-schema/2.5.0/core/nwb.icephys.yaml b/code/nwb-schema/2.5.0/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.5.0/core/nwb.icephys.yaml rename to code/nwb-schema/2.5.0/core/nwb.icephys.yaml diff --git a/nwb-schema/2.5.0/core/nwb.image.yaml b/code/nwb-schema/2.5.0/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.5.0/core/nwb.image.yaml rename to code/nwb-schema/2.5.0/core/nwb.image.yaml diff --git a/nwb-schema/2.5.0/core/nwb.misc.yaml b/code/nwb-schema/2.5.0/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.5.0/core/nwb.misc.yaml rename to code/nwb-schema/2.5.0/core/nwb.misc.yaml diff --git a/nwb-schema/2.5.0/core/nwb.namespace.yaml b/code/nwb-schema/2.5.0/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.5.0/core/nwb.namespace.yaml rename to code/nwb-schema/2.5.0/core/nwb.namespace.yaml diff --git a/nwb-schema/2.5.0/core/nwb.ogen.yaml b/code/nwb-schema/2.5.0/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.5.0/core/nwb.ogen.yaml rename to code/nwb-schema/2.5.0/core/nwb.ogen.yaml diff --git a/nwb-schema/2.5.0/core/nwb.ophys.yaml b/code/nwb-schema/2.5.0/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.5.0/core/nwb.ophys.yaml rename to code/nwb-schema/2.5.0/core/nwb.ophys.yaml diff --git a/nwb-schema/2.5.0/core/nwb.retinotopy.yaml b/code/nwb-schema/2.5.0/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.5.0/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.5.0/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.5.0/hdmf-common-schema/common/base.yaml b/code/nwb-schema/2.5.0/hdmf-common-schema/common/base.yaml similarity index 100% rename from nwb-schema/2.5.0/hdmf-common-schema/common/base.yaml rename to code/nwb-schema/2.5.0/hdmf-common-schema/common/base.yaml diff --git a/nwb-schema/2.5.0/hdmf-common-schema/common/experimental.yaml b/code/nwb-schema/2.5.0/hdmf-common-schema/common/experimental.yaml similarity index 100% rename from nwb-schema/2.5.0/hdmf-common-schema/common/experimental.yaml rename to code/nwb-schema/2.5.0/hdmf-common-schema/common/experimental.yaml diff --git a/nwb-schema/2.5.0/hdmf-common-schema/common/namespace.yaml b/code/nwb-schema/2.5.0/hdmf-common-schema/common/namespace.yaml similarity index 100% rename from nwb-schema/2.5.0/hdmf-common-schema/common/namespace.yaml rename to code/nwb-schema/2.5.0/hdmf-common-schema/common/namespace.yaml diff --git a/nwb-schema/2.5.0/hdmf-common-schema/common/resources.yaml b/code/nwb-schema/2.5.0/hdmf-common-schema/common/resources.yaml similarity index 100% rename from nwb-schema/2.5.0/hdmf-common-schema/common/resources.yaml rename to code/nwb-schema/2.5.0/hdmf-common-schema/common/resources.yaml diff --git a/nwb-schema/2.5.0/hdmf-common-schema/common/sparse.yaml b/code/nwb-schema/2.5.0/hdmf-common-schema/common/sparse.yaml similarity index 100% rename from nwb-schema/2.5.0/hdmf-common-schema/common/sparse.yaml rename to code/nwb-schema/2.5.0/hdmf-common-schema/common/sparse.yaml diff --git a/nwb-schema/2.5.0/hdmf-common-schema/common/table.yaml b/code/nwb-schema/2.5.0/hdmf-common-schema/common/table.yaml similarity index 100% rename from nwb-schema/2.5.0/hdmf-common-schema/common/table.yaml rename to code/nwb-schema/2.5.0/hdmf-common-schema/common/table.yaml diff --git a/nwb-schema/2.6.0/core/nwb.base.yaml b/code/nwb-schema/2.6.0/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.6.0/core/nwb.base.yaml rename to code/nwb-schema/2.6.0/core/nwb.base.yaml diff --git a/nwb-schema/2.6.0/core/nwb.behavior.yaml b/code/nwb-schema/2.6.0/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.6.0/core/nwb.behavior.yaml rename to code/nwb-schema/2.6.0/core/nwb.behavior.yaml diff --git a/nwb-schema/2.6.0/core/nwb.device.yaml b/code/nwb-schema/2.6.0/core/nwb.device.yaml similarity index 100% rename from nwb-schema/2.6.0/core/nwb.device.yaml rename to code/nwb-schema/2.6.0/core/nwb.device.yaml diff --git a/nwb-schema/2.6.0/core/nwb.ecephys.yaml b/code/nwb-schema/2.6.0/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.6.0/core/nwb.ecephys.yaml rename to code/nwb-schema/2.6.0/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.6.0/core/nwb.epoch.yaml b/code/nwb-schema/2.6.0/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.6.0/core/nwb.epoch.yaml rename to code/nwb-schema/2.6.0/core/nwb.epoch.yaml diff --git a/nwb-schema/2.6.0/core/nwb.file.yaml b/code/nwb-schema/2.6.0/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.6.0/core/nwb.file.yaml rename to code/nwb-schema/2.6.0/core/nwb.file.yaml diff --git a/nwb-schema/2.6.0/core/nwb.icephys.yaml b/code/nwb-schema/2.6.0/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.6.0/core/nwb.icephys.yaml rename to code/nwb-schema/2.6.0/core/nwb.icephys.yaml diff --git a/nwb-schema/2.6.0/core/nwb.image.yaml b/code/nwb-schema/2.6.0/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.6.0/core/nwb.image.yaml rename to code/nwb-schema/2.6.0/core/nwb.image.yaml diff --git a/nwb-schema/2.6.0/core/nwb.misc.yaml b/code/nwb-schema/2.6.0/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.6.0/core/nwb.misc.yaml rename to code/nwb-schema/2.6.0/core/nwb.misc.yaml diff --git a/nwb-schema/2.6.0/core/nwb.namespace.yaml b/code/nwb-schema/2.6.0/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.6.0/core/nwb.namespace.yaml rename to code/nwb-schema/2.6.0/core/nwb.namespace.yaml diff --git a/nwb-schema/2.6.0/core/nwb.ogen.yaml b/code/nwb-schema/2.6.0/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.6.0/core/nwb.ogen.yaml rename to code/nwb-schema/2.6.0/core/nwb.ogen.yaml diff --git a/nwb-schema/2.6.0/core/nwb.ophys.yaml b/code/nwb-schema/2.6.0/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.6.0/core/nwb.ophys.yaml rename to code/nwb-schema/2.6.0/core/nwb.ophys.yaml diff --git a/nwb-schema/2.6.0/core/nwb.retinotopy.yaml b/code/nwb-schema/2.6.0/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.6.0/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.6.0/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.6.0/hdmf-common-schema/common/base.yaml b/code/nwb-schema/2.6.0/hdmf-common-schema/common/base.yaml similarity index 100% rename from nwb-schema/2.6.0/hdmf-common-schema/common/base.yaml rename to code/nwb-schema/2.6.0/hdmf-common-schema/common/base.yaml diff --git a/nwb-schema/2.6.0/hdmf-common-schema/common/experimental.yaml b/code/nwb-schema/2.6.0/hdmf-common-schema/common/experimental.yaml similarity index 100% rename from nwb-schema/2.6.0/hdmf-common-schema/common/experimental.yaml rename to code/nwb-schema/2.6.0/hdmf-common-schema/common/experimental.yaml diff --git a/nwb-schema/2.6.0/hdmf-common-schema/common/namespace.yaml b/code/nwb-schema/2.6.0/hdmf-common-schema/common/namespace.yaml similarity index 100% rename from nwb-schema/2.6.0/hdmf-common-schema/common/namespace.yaml rename to code/nwb-schema/2.6.0/hdmf-common-schema/common/namespace.yaml diff --git a/nwb-schema/2.6.0/hdmf-common-schema/common/resources.yaml b/code/nwb-schema/2.6.0/hdmf-common-schema/common/resources.yaml similarity index 100% rename from nwb-schema/2.6.0/hdmf-common-schema/common/resources.yaml rename to code/nwb-schema/2.6.0/hdmf-common-schema/common/resources.yaml diff --git a/nwb-schema/2.6.0/hdmf-common-schema/common/sparse.yaml b/code/nwb-schema/2.6.0/hdmf-common-schema/common/sparse.yaml similarity index 100% rename from nwb-schema/2.6.0/hdmf-common-schema/common/sparse.yaml rename to code/nwb-schema/2.6.0/hdmf-common-schema/common/sparse.yaml diff --git a/nwb-schema/2.6.0/hdmf-common-schema/common/table.yaml b/code/nwb-schema/2.6.0/hdmf-common-schema/common/table.yaml similarity index 100% rename from nwb-schema/2.6.0/hdmf-common-schema/common/table.yaml rename to code/nwb-schema/2.6.0/hdmf-common-schema/common/table.yaml diff --git a/nwb-schema/2.7.0/core/nwb.base.yaml b/code/nwb-schema/2.7.0/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.7.0/core/nwb.base.yaml rename to code/nwb-schema/2.7.0/core/nwb.base.yaml diff --git a/nwb-schema/2.7.0/core/nwb.behavior.yaml b/code/nwb-schema/2.7.0/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.7.0/core/nwb.behavior.yaml rename to code/nwb-schema/2.7.0/core/nwb.behavior.yaml diff --git a/nwb-schema/2.7.0/core/nwb.device.yaml b/code/nwb-schema/2.7.0/core/nwb.device.yaml similarity index 100% rename from nwb-schema/2.7.0/core/nwb.device.yaml rename to code/nwb-schema/2.7.0/core/nwb.device.yaml diff --git a/nwb-schema/2.7.0/core/nwb.ecephys.yaml b/code/nwb-schema/2.7.0/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.7.0/core/nwb.ecephys.yaml rename to code/nwb-schema/2.7.0/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.7.0/core/nwb.epoch.yaml b/code/nwb-schema/2.7.0/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.7.0/core/nwb.epoch.yaml rename to code/nwb-schema/2.7.0/core/nwb.epoch.yaml diff --git a/nwb-schema/2.7.0/core/nwb.file.yaml b/code/nwb-schema/2.7.0/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.7.0/core/nwb.file.yaml rename to code/nwb-schema/2.7.0/core/nwb.file.yaml diff --git a/nwb-schema/2.7.0/core/nwb.icephys.yaml b/code/nwb-schema/2.7.0/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.7.0/core/nwb.icephys.yaml rename to code/nwb-schema/2.7.0/core/nwb.icephys.yaml diff --git a/nwb-schema/2.7.0/core/nwb.image.yaml b/code/nwb-schema/2.7.0/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.7.0/core/nwb.image.yaml rename to code/nwb-schema/2.7.0/core/nwb.image.yaml diff --git a/nwb-schema/2.7.0/core/nwb.misc.yaml b/code/nwb-schema/2.7.0/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.7.0/core/nwb.misc.yaml rename to code/nwb-schema/2.7.0/core/nwb.misc.yaml diff --git a/nwb-schema/2.7.0/core/nwb.namespace.yaml b/code/nwb-schema/2.7.0/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.7.0/core/nwb.namespace.yaml rename to code/nwb-schema/2.7.0/core/nwb.namespace.yaml diff --git a/nwb-schema/2.7.0/core/nwb.ogen.yaml b/code/nwb-schema/2.7.0/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.7.0/core/nwb.ogen.yaml rename to code/nwb-schema/2.7.0/core/nwb.ogen.yaml diff --git a/nwb-schema/2.7.0/core/nwb.ophys.yaml b/code/nwb-schema/2.7.0/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.7.0/core/nwb.ophys.yaml rename to code/nwb-schema/2.7.0/core/nwb.ophys.yaml diff --git a/nwb-schema/2.7.0/core/nwb.retinotopy.yaml b/code/nwb-schema/2.7.0/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.7.0/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.7.0/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.7.0/hdmf-common-schema/common/base.yaml b/code/nwb-schema/2.7.0/hdmf-common-schema/common/base.yaml similarity index 100% rename from nwb-schema/2.7.0/hdmf-common-schema/common/base.yaml rename to code/nwb-schema/2.7.0/hdmf-common-schema/common/base.yaml diff --git a/nwb-schema/2.7.0/hdmf-common-schema/common/experimental.yaml b/code/nwb-schema/2.7.0/hdmf-common-schema/common/experimental.yaml similarity index 100% rename from nwb-schema/2.7.0/hdmf-common-schema/common/experimental.yaml rename to code/nwb-schema/2.7.0/hdmf-common-schema/common/experimental.yaml diff --git a/nwb-schema/2.7.0/hdmf-common-schema/common/namespace.yaml b/code/nwb-schema/2.7.0/hdmf-common-schema/common/namespace.yaml similarity index 100% rename from nwb-schema/2.7.0/hdmf-common-schema/common/namespace.yaml rename to code/nwb-schema/2.7.0/hdmf-common-schema/common/namespace.yaml diff --git a/nwb-schema/2.7.0/hdmf-common-schema/common/resources.yaml b/code/nwb-schema/2.7.0/hdmf-common-schema/common/resources.yaml similarity index 100% rename from nwb-schema/2.7.0/hdmf-common-schema/common/resources.yaml rename to code/nwb-schema/2.7.0/hdmf-common-schema/common/resources.yaml diff --git a/nwb-schema/2.7.0/hdmf-common-schema/common/sparse.yaml b/code/nwb-schema/2.7.0/hdmf-common-schema/common/sparse.yaml similarity index 100% rename from nwb-schema/2.7.0/hdmf-common-schema/common/sparse.yaml rename to code/nwb-schema/2.7.0/hdmf-common-schema/common/sparse.yaml diff --git a/nwb-schema/2.7.0/hdmf-common-schema/common/table.yaml b/code/nwb-schema/2.7.0/hdmf-common-schema/common/table.yaml similarity index 100% rename from nwb-schema/2.7.0/hdmf-common-schema/common/table.yaml rename to code/nwb-schema/2.7.0/hdmf-common-schema/common/table.yaml diff --git a/nwb-schema/2.8.0/core/nwb.base.yaml b/code/nwb-schema/2.8.0/core/nwb.base.yaml similarity index 100% rename from nwb-schema/2.8.0/core/nwb.base.yaml rename to code/nwb-schema/2.8.0/core/nwb.base.yaml diff --git a/nwb-schema/2.8.0/core/nwb.behavior.yaml b/code/nwb-schema/2.8.0/core/nwb.behavior.yaml similarity index 100% rename from nwb-schema/2.8.0/core/nwb.behavior.yaml rename to code/nwb-schema/2.8.0/core/nwb.behavior.yaml diff --git a/nwb-schema/2.8.0/core/nwb.device.yaml b/code/nwb-schema/2.8.0/core/nwb.device.yaml similarity index 100% rename from nwb-schema/2.8.0/core/nwb.device.yaml rename to code/nwb-schema/2.8.0/core/nwb.device.yaml diff --git a/nwb-schema/2.8.0/core/nwb.ecephys.yaml b/code/nwb-schema/2.8.0/core/nwb.ecephys.yaml similarity index 100% rename from nwb-schema/2.8.0/core/nwb.ecephys.yaml rename to code/nwb-schema/2.8.0/core/nwb.ecephys.yaml diff --git a/nwb-schema/2.8.0/core/nwb.epoch.yaml b/code/nwb-schema/2.8.0/core/nwb.epoch.yaml similarity index 100% rename from nwb-schema/2.8.0/core/nwb.epoch.yaml rename to code/nwb-schema/2.8.0/core/nwb.epoch.yaml diff --git a/nwb-schema/2.8.0/core/nwb.file.yaml b/code/nwb-schema/2.8.0/core/nwb.file.yaml similarity index 100% rename from nwb-schema/2.8.0/core/nwb.file.yaml rename to code/nwb-schema/2.8.0/core/nwb.file.yaml diff --git a/nwb-schema/2.8.0/core/nwb.icephys.yaml b/code/nwb-schema/2.8.0/core/nwb.icephys.yaml similarity index 100% rename from nwb-schema/2.8.0/core/nwb.icephys.yaml rename to code/nwb-schema/2.8.0/core/nwb.icephys.yaml diff --git a/nwb-schema/2.8.0/core/nwb.image.yaml b/code/nwb-schema/2.8.0/core/nwb.image.yaml similarity index 100% rename from nwb-schema/2.8.0/core/nwb.image.yaml rename to code/nwb-schema/2.8.0/core/nwb.image.yaml diff --git a/nwb-schema/2.8.0/core/nwb.misc.yaml b/code/nwb-schema/2.8.0/core/nwb.misc.yaml similarity index 100% rename from nwb-schema/2.8.0/core/nwb.misc.yaml rename to code/nwb-schema/2.8.0/core/nwb.misc.yaml diff --git a/nwb-schema/2.8.0/core/nwb.namespace.yaml b/code/nwb-schema/2.8.0/core/nwb.namespace.yaml similarity index 100% rename from nwb-schema/2.8.0/core/nwb.namespace.yaml rename to code/nwb-schema/2.8.0/core/nwb.namespace.yaml diff --git a/nwb-schema/2.8.0/core/nwb.ogen.yaml b/code/nwb-schema/2.8.0/core/nwb.ogen.yaml similarity index 100% rename from nwb-schema/2.8.0/core/nwb.ogen.yaml rename to code/nwb-schema/2.8.0/core/nwb.ogen.yaml diff --git a/nwb-schema/2.8.0/core/nwb.ophys.yaml b/code/nwb-schema/2.8.0/core/nwb.ophys.yaml similarity index 100% rename from nwb-schema/2.8.0/core/nwb.ophys.yaml rename to code/nwb-schema/2.8.0/core/nwb.ophys.yaml diff --git a/nwb-schema/2.8.0/core/nwb.retinotopy.yaml b/code/nwb-schema/2.8.0/core/nwb.retinotopy.yaml similarity index 100% rename from nwb-schema/2.8.0/core/nwb.retinotopy.yaml rename to code/nwb-schema/2.8.0/core/nwb.retinotopy.yaml diff --git a/nwb-schema/2.8.0/hdmf-common-schema/common/base.yaml b/code/nwb-schema/2.8.0/hdmf-common-schema/common/base.yaml similarity index 100% rename from nwb-schema/2.8.0/hdmf-common-schema/common/base.yaml rename to code/nwb-schema/2.8.0/hdmf-common-schema/common/base.yaml diff --git a/nwb-schema/2.8.0/hdmf-common-schema/common/experimental.yaml b/code/nwb-schema/2.8.0/hdmf-common-schema/common/experimental.yaml similarity index 100% rename from nwb-schema/2.8.0/hdmf-common-schema/common/experimental.yaml rename to code/nwb-schema/2.8.0/hdmf-common-schema/common/experimental.yaml diff --git a/nwb-schema/2.8.0/hdmf-common-schema/common/namespace.yaml b/code/nwb-schema/2.8.0/hdmf-common-schema/common/namespace.yaml similarity index 100% rename from nwb-schema/2.8.0/hdmf-common-schema/common/namespace.yaml rename to code/nwb-schema/2.8.0/hdmf-common-schema/common/namespace.yaml diff --git a/nwb-schema/2.8.0/hdmf-common-schema/common/resources.yaml b/code/nwb-schema/2.8.0/hdmf-common-schema/common/resources.yaml similarity index 100% rename from nwb-schema/2.8.0/hdmf-common-schema/common/resources.yaml rename to code/nwb-schema/2.8.0/hdmf-common-schema/common/resources.yaml diff --git a/nwb-schema/2.8.0/hdmf-common-schema/common/sparse.yaml b/code/nwb-schema/2.8.0/hdmf-common-schema/common/sparse.yaml similarity index 100% rename from nwb-schema/2.8.0/hdmf-common-schema/common/sparse.yaml rename to code/nwb-schema/2.8.0/hdmf-common-schema/common/sparse.yaml diff --git a/nwb-schema/2.8.0/hdmf-common-schema/common/table.yaml b/code/nwb-schema/2.8.0/hdmf-common-schema/common/table.yaml similarity index 100% rename from nwb-schema/2.8.0/hdmf-common-schema/common/table.yaml rename to code/nwb-schema/2.8.0/hdmf-common-schema/common/table.yaml diff --git a/nwbClearGenerated.m b/code/nwbClearGenerated.m similarity index 58% rename from nwbClearGenerated.m rename to code/nwbClearGenerated.m index 8e0c6cfd..54e929be 100644 --- a/nwbClearGenerated.m +++ b/code/nwbClearGenerated.m @@ -1,9 +1,35 @@ function clearedNamespaceNames = nwbClearGenerated(targetFolder, options) - %% NWBCLEARGENERATED clears generated class files. +% NWBCLEARGENERATED - Clear generated class files. +% +% Syntax: +% NWBCLEARGENERATED() Clear generated class files from the ``+types`` +% folder in the matnwb root directory. +% +% Input Arguments: +% - targetFolder (string) - +% Path name for folder containing generated classes in a ``+types`` +% namespace folder. Default value is the matnwb root directory +% +% - options (name-value pairs) - +% Optional name-value pairs. Available options: +% +% - ClearCache (logical) - +% Whether to clear the cached schema data in the ``namespaces`` folder. +% Default is ``false`` +% +% Usage: +% Example 1 - Clear all generated classes in the matnwb root directory:: +% +% nwbClearGenerated(); +% +% See also: +% generateCore, generateExtension + arguments targetFolder (1,1) string {mustBeFolder} = misc.getMatnwbDir() options.ClearCache (1,1) logical = false end + typesPath = fullfile(targetFolder, '+types'); listing = dir(typesPath); moduleNames = setdiff({listing.name}, {'+untyped', '+util', '.', '..'}); @@ -26,4 +52,4 @@ clearedNamespaceNames = strrep(clearedNamespaceNames, '+', ''); clearedNamespaceNames = string(clearedNamespaceNames); end -end \ No newline at end of file +end diff --git a/code/nwbExport.m b/code/nwbExport.m new file mode 100644 index 00000000..a108e0ae --- /dev/null +++ b/code/nwbExport.m @@ -0,0 +1,54 @@ +function nwbExport(nwbFileObjects, filePaths, mode) +%NWBEXPORT - Writes an NWB file. +% +% Syntax: +% NWBEXPORT(nwb, filename) Writes the nwb object to a file at filename. +% +% Input Arguments: +% - nwb (NwbFile) - Nwb file object +% - filename (string) - Filepath pointing to an NWB file. +% +% Usage: +% Example 1 - Export an NWB file:: +% +% % Create an NWB object with some properties: +% nwb = NwbFile; +% nwb.session_start_time = datetime('now'); +% nwb.identifier = 'EMPTY'; +% nwb.session_description = 'empty test file'; +% +% % Write the nwb object to a file: +% nwbExport(nwb, 'empty.nwb'); +% +% Example 2 - Export an NWB file using an older schema version:: +% +% % Generate classes for an older version of NWB schemas: +% generateCore('2.5.0') +% +% % Create an NWB object with some properties: +% nwb = NwbFile; +% nwb.session_start_time = datetime('now'); +% nwb.identifier = 'EMPTY'; +% nwb.session_description = 'empty test file'; +% +% % Write the nwb object to a file: +% nwbExport(nwb, 'empty.nwb'); +% +% See also: +% generateCore, generateExtension, NwbFile, nwbRead + + arguments + nwbFileObjects (1,:) NwbFile {mustBeNonempty} + filePaths (1,:) string {mustBeNonzeroLengthText} + mode (1,1) string {mustBeMember(mode, ["edit", "overwrite"])} = "edit" + end + + assert(length(nwbFileObjects) == length(filePaths), ... + 'NWB:Export:FilepathLengthMismatch', ... + 'Lists of NWB objects to export and list of file paths must be the same length.') + + for iFiles = 1:length(nwbFileObjects) + filePath = char(filePaths(iFiles)); + nwbFileObjects(iFiles).export(filePath, mode); + end +end diff --git a/nwbRead.m b/code/nwbRead.m similarity index 69% rename from nwbRead.m rename to code/nwbRead.m index 70ec5c6a..16752f34 100644 --- a/nwbRead.m +++ b/code/nwbRead.m @@ -1,21 +1,53 @@ function nwb = nwbRead(filename, flags, options) - %NWBREAD Reads an NWB file. - % nwb = NWBREAD(filename) Reads the nwb file at filename and returns an - % NWBFile object representing its contents. - % nwb = nwbRead(filename, 'ignorecache') Reads the nwb file without generating classes - % off of the cached schema if one exists. - % - % nwb = NWBREAD(filename, options) - % - % Requires that core and extension NWB types have been generated - % and reside in a 'types' package on the matlab path. - % - % Example: - % nwb = nwbRead('data.nwb'); - % nwb = nwbRead('data.nwb', 'ignorecache'); - % nwb = nwbRead('data.nwb', 'savedir', '.'); - % - % See also GENERATECORE, GENERATEEXTENSION, NWBFILE, NWBEXPORT +% NWBREAD - Read an NWB file. +% +% Syntax: +% nwb = NWBREAD(filename) Reads the nwb file at filename and returns an +% NWBFile object representing its contents. +% +% nwb = NWBREAD(filename, flags) Reads the nwb file using optional +% flags controlling the mode for how to read the file. See input +% arguments for a list of available flags. +% +% nwb = NWBREAD(filename, Name, Value) Reads the nwb file using optional +% name-value pairs controlling options for how to read the file. +% +% Input Arguments: +% - filename (string) - +% Filepath pointing to an NWB file. +% +% - flags (string) - +% Flag for setting the mode for the NWBREAD operation. Available options are: +% 'ignorecache'. If the 'ignorecache' flag is used, classes for NWB data +% types are not re-generated based on the embedded schemas in the file. +% +% - options (name-value pairs) - +% Optional name-value pairs. Available options: +% +% - savedir (string) - +% A folder to save generated classes for NWB types. +% +% Output Arguments: +% - nwb (NwbFile) - Nwb file object +% +% Usage: +% Example 1 - Read an NWB file:: +% +% nwb = nwbRead('data.nwb'); +% +% Example 2 - Read an NWB file without re-generating classes for NWB types:: +% +% nwb = nwbRead('data.nwb', 'ignorecache'); +% +% Note: This is a good option to use if you are reading several files +% which are created of the same version of the NWB schemas. +% +% Example 3 - Read an NWB file and generate classes for NWB types in the current working directory:: +% +% nwb = nwbRead('data.nwb', 'savedir', '.'); +% +% See also: +% generateCore, generateExtension, NwbFile, nwbExport arguments filename (1,1) string {matnwb.common.mustBeNwbFile} diff --git a/resources/functionSignatures.json b/code/resources/functionSignatures.json similarity index 100% rename from resources/functionSignatures.json rename to code/resources/functionSignatures.json diff --git a/tutorials/.gitattributes b/code/tutorials/.gitattributes similarity index 100% rename from tutorials/.gitattributes rename to code/tutorials/.gitattributes diff --git a/tutorials/basicUsage.mlx b/code/tutorials/basicUsage.mlx similarity index 100% rename from tutorials/basicUsage.mlx rename to code/tutorials/basicUsage.mlx diff --git a/tutorials/behavior.mlx b/code/tutorials/behavior.mlx similarity index 100% rename from tutorials/behavior.mlx rename to code/tutorials/behavior.mlx diff --git a/tutorials/convertTrials.m b/code/tutorials/convertTrials.m similarity index 100% rename from tutorials/convertTrials.m rename to code/tutorials/convertTrials.m diff --git a/tutorials/dataPipe.m b/code/tutorials/dataPipe.m similarity index 100% rename from tutorials/dataPipe.m rename to code/tutorials/dataPipe.m diff --git a/tutorials/dimensionMapNoDataPipes.mlx b/code/tutorials/dimensionMapNoDataPipes.mlx similarity index 100% rename from tutorials/dimensionMapNoDataPipes.mlx rename to code/tutorials/dimensionMapNoDataPipes.mlx diff --git a/tutorials/dimensionMapWithDataPipes.mlx b/code/tutorials/dimensionMapWithDataPipes.mlx similarity index 100% rename from tutorials/dimensionMapWithDataPipes.mlx rename to code/tutorials/dimensionMapWithDataPipes.mlx diff --git a/tutorials/dynamic_tables.mlx b/code/tutorials/dynamic_tables.mlx similarity index 100% rename from tutorials/dynamic_tables.mlx rename to code/tutorials/dynamic_tables.mlx diff --git a/tutorials/dynamically_loaded_filters.mlx b/code/tutorials/dynamically_loaded_filters.mlx similarity index 100% rename from tutorials/dynamically_loaded_filters.mlx rename to code/tutorials/dynamically_loaded_filters.mlx diff --git a/tutorials/ecephys.mlx b/code/tutorials/ecephys.mlx similarity index 93% rename from tutorials/ecephys.mlx rename to code/tutorials/ecephys.mlx index f5b4a2ef..b5453c04 100644 Binary files a/tutorials/ecephys.mlx and b/code/tutorials/ecephys.mlx differ diff --git a/tutorials/formatStruct.m b/code/tutorials/formatStruct.m similarity index 100% rename from tutorials/formatStruct.m rename to code/tutorials/formatStruct.m diff --git a/tutorials/icephys.mlx b/code/tutorials/icephys.mlx similarity index 98% rename from tutorials/icephys.mlx rename to code/tutorials/icephys.mlx index bf98cfe4..5b59645b 100644 Binary files a/tutorials/icephys.mlx and b/code/tutorials/icephys.mlx differ diff --git a/tutorials/images.mlx b/code/tutorials/images.mlx similarity index 100% rename from tutorials/images.mlx rename to code/tutorials/images.mlx diff --git a/tutorials/intro.mlx b/code/tutorials/intro.mlx similarity index 95% rename from tutorials/intro.mlx rename to code/tutorials/intro.mlx index 43b78de6..75b3a836 100644 Binary files a/tutorials/intro.mlx and b/code/tutorials/intro.mlx differ diff --git a/tutorials/ogen.mlx b/code/tutorials/ogen.mlx similarity index 100% rename from tutorials/ogen.mlx rename to code/tutorials/ogen.mlx diff --git a/tutorials/ophys.mlx b/code/tutorials/ophys.mlx similarity index 95% rename from tutorials/ophys.mlx rename to code/tutorials/ophys.mlx index 93a10050..123b22f7 100644 Binary files a/tutorials/ophys.mlx and b/code/tutorials/ophys.mlx differ diff --git a/tutorials/private/README.md b/code/tutorials/private/README.md similarity index 100% rename from tutorials/private/README.md rename to code/tutorials/private/README.md diff --git a/tutorials/private/mcode/basicUsage.m b/code/tutorials/private/mcode/basicUsage.m similarity index 100% rename from tutorials/private/mcode/basicUsage.m rename to code/tutorials/private/mcode/basicUsage.m diff --git a/tutorials/private/mcode/behavior.m b/code/tutorials/private/mcode/behavior.m similarity index 100% rename from tutorials/private/mcode/behavior.m rename to code/tutorials/private/mcode/behavior.m diff --git a/tutorials/private/mcode/dimensionMapNoDataPipes.m b/code/tutorials/private/mcode/dimensionMapNoDataPipes.m similarity index 100% rename from tutorials/private/mcode/dimensionMapNoDataPipes.m rename to code/tutorials/private/mcode/dimensionMapNoDataPipes.m diff --git a/tutorials/private/mcode/dimensionMapWithDataPipes.m b/code/tutorials/private/mcode/dimensionMapWithDataPipes.m similarity index 100% rename from tutorials/private/mcode/dimensionMapWithDataPipes.m rename to code/tutorials/private/mcode/dimensionMapWithDataPipes.m diff --git a/tutorials/private/mcode/dynamic_tables.m b/code/tutorials/private/mcode/dynamic_tables.m similarity index 100% rename from tutorials/private/mcode/dynamic_tables.m rename to code/tutorials/private/mcode/dynamic_tables.m diff --git a/tutorials/private/mcode/dynamically_loaded_filters.m b/code/tutorials/private/mcode/dynamically_loaded_filters.m similarity index 100% rename from tutorials/private/mcode/dynamically_loaded_filters.m rename to code/tutorials/private/mcode/dynamically_loaded_filters.m diff --git a/tutorials/private/mcode/ecephys.m b/code/tutorials/private/mcode/ecephys.m similarity index 98% rename from tutorials/private/mcode/ecephys.m rename to code/tutorials/private/mcode/ecephys.m index b07927f2..2002966b 100644 --- a/tutorials/private/mcode/ecephys.m +++ b/code/tutorials/private/mcode/ecephys.m @@ -436,9 +436,9 @@ %% % *Check out other tutorials that teach advanced NWB topics:* %% -% * % * -% * \ No newline at end of file diff --git a/tutorials/private/mcode/icephys.m b/code/tutorials/private/mcode/icephys.m similarity index 99% rename from tutorials/private/mcode/icephys.m rename to code/tutorials/private/mcode/icephys.m index 8608f77e..9583e428 100644 --- a/tutorials/private/mcode/icephys.m +++ b/code/tutorials/private/mcode/icephys.m @@ -29,7 +29,7 @@ ); % Device metadata -% Device metadata is represented by objects. diff --git a/tutorials/private/mcode/images.m b/code/tutorials/private/mcode/images.m similarity index 100% rename from tutorials/private/mcode/images.m rename to code/tutorials/private/mcode/images.m diff --git a/tutorials/private/mcode/intro.m b/code/tutorials/private/mcode/intro.m similarity index 96% rename from tutorials/private/mcode/intro.m rename to code/tutorials/private/mcode/intro.m index 417c24f8..97455d79 100644 --- a/tutorials/private/mcode/intro.m +++ b/code/tutorials/private/mcode/intro.m @@ -84,10 +84,10 @@ 'data_unit', 'm', ... 'timestamps', linspace(0, 1, 10)); %% -% The class serves as the foundation for all other time series types % in the NWB format. Several specialized subclasses extend the functionality of -% , each tailored to handle specific kinds of data. In the next % section, we’ll explore one of these specialized types. For a full overview, % please check out the -% * -% * +% * <./ecephys.mlx Extracellular electrophysiology> +% * <./icephys.mlx Intracellular electrophysiology> +% * <./ophys.mlx Optical physiology> %% % See the to learn what data types are available. diff --git a/tutorials/private/mcode/ogen.m b/code/tutorials/private/mcode/ogen.m similarity index 100% rename from tutorials/private/mcode/ogen.m rename to code/tutorials/private/mcode/ogen.m diff --git a/tutorials/private/mcode/ophys.m b/code/tutorials/private/mcode/ophys.m similarity index 98% rename from tutorials/private/mcode/ophys.m rename to code/tutorials/private/mcode/ophys.m index 434227be..035a68e6 100644 --- a/tutorials/private/mcode/ophys.m +++ b/code/tutorials/private/mcode/ophys.m @@ -385,10 +385,10 @@ %% % *Check out other tutorials that teach advanced NWB topics:* %% -% * % * -% * %% \ No newline at end of file diff --git a/tutorials/private/mcode/read_demo.m b/code/tutorials/private/mcode/read_demo.m similarity index 100% rename from tutorials/private/mcode/read_demo.m rename to code/tutorials/private/mcode/read_demo.m diff --git a/tutorials/private/mcode/remote_read.m b/code/tutorials/private/mcode/remote_read.m similarity index 100% rename from tutorials/private/mcode/remote_read.m rename to code/tutorials/private/mcode/remote_read.m diff --git a/tutorials/private/mcode/scratch.m b/code/tutorials/private/mcode/scratch.m similarity index 100% rename from tutorials/private/mcode/scratch.m rename to code/tutorials/private/mcode/scratch.m diff --git a/tutorials/read_demo.mlx b/code/tutorials/read_demo.mlx similarity index 100% rename from tutorials/read_demo.mlx rename to code/tutorials/read_demo.mlx diff --git a/tutorials/remote_read.mlx b/code/tutorials/remote_read.mlx similarity index 100% rename from tutorials/remote_read.mlx rename to code/tutorials/remote_read.mlx diff --git a/tutorials/scratch.mlx b/code/tutorials/scratch.mlx similarity index 100% rename from tutorials/scratch.mlx rename to code/tutorials/scratch.mlx diff --git a/doc/+types/+core/AbstractFeatureSeries.html b/doc/+types/+core/AbstractFeatureSeries.html deleted file mode 100644 index 5e91da1c..00000000 --- a/doc/+types/+core/AbstractFeatureSeries.html +++ /dev/null @@ -1,399 +0,0 @@ - - - - - - types.core.AbstractFeatureSeries - MATLAB File Help - - - - - - - - - - -
types.core.AbstractFeatureSeries - MATLAB File Help
-
types.core.AbstractFeatureSeries
-
  AbstractFeatureSeries Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.TimeSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
AbstractFeatureSeriesConstructor for AbstractFeatureSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
feature_units(char) Units of each feature. 
featuresREQUIRED (char) Description of the features represented in TimeSeries::data. 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_feature_units 
-   - - validate_features 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/AnnotationSeries.html b/doc/+types/+core/AnnotationSeries.html deleted file mode 100644 index a9984b87..00000000 --- a/doc/+types/+core/AnnotationSeries.html +++ /dev/null @@ -1,375 +0,0 @@ - - - - - - types.core.AnnotationSeries - MATLAB File Help - - - - - - - - - - -
types.core.AnnotationSeries - MATLAB File Help
-
types.core.AnnotationSeries
-
  AnnotationSeries Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.TimeSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
AnnotationSeriesConstructor for AnnotationSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/BehavioralEpochs.html b/doc/+types/+core/BehavioralEpochs.html deleted file mode 100644 index d301a7a4..00000000 --- a/doc/+types/+core/BehavioralEpochs.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.BehavioralEpochs - MATLAB File Help - - - - - - - - - - -
types.core.BehavioralEpochs - MATLAB File Help
-
types.core.BehavioralEpochs
-
  BehavioralEpochs TimeSeries for storing behavioral epochs.  The objective of this and the other two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries) is to provide generic hooks for software tools/scripts. This allows a tool/script to take the output one specific interface (e.g., UnitTimes) and plot that data relative to another data modality (e.g., behavioral events) without having to define all possible modalities in advance. Declaring one of these interfaces means that one or more TimeSeries of the specified type is published. These TimeSeries should reside in a group having the same name as the interface. For example, if a BehavioralTimeSeries interface is declared, the module will have one or more TimeSeries defined in the module sub-group 'BehavioralTimeSeries'. BehavioralEpochs should use IntervalSeries. BehavioralEvents is used for irregular events. BehavioralTimeSeries is for continuous data.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
BehavioralEpochsConstructor for BehavioralEpochs 
- -
Property Summary -
- - - - - -
intervalseries(IntervalSeries) IntervalSeries object containing start and stop times of epochs. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_intervalseries 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/BehavioralEvents.html b/doc/+types/+core/BehavioralEvents.html deleted file mode 100644 index b4168341..00000000 --- a/doc/+types/+core/BehavioralEvents.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.BehavioralEvents - MATLAB File Help - - - - - - - - - - -
types.core.BehavioralEvents - MATLAB File Help
-
types.core.BehavioralEvents
-
  BehavioralEvents TimeSeries for storing behavioral events. See description of BehavioralEpochs for more details.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
BehavioralEventsConstructor for BehavioralEvents 
- -
Property Summary -
- - - - - -
timeseries(TimeSeries) TimeSeries object containing behavioral events. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_timeseries 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/BehavioralTimeSeries.html b/doc/+types/+core/BehavioralTimeSeries.html deleted file mode 100644 index c8e775a8..00000000 --- a/doc/+types/+core/BehavioralTimeSeries.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.BehavioralTimeSeries - MATLAB File Help - - - - - - - - - - -
types.core.BehavioralTimeSeries - MATLAB File Help
-
types.core.BehavioralTimeSeries
-
  BehavioralTimeSeries TimeSeries for storing Behavoioral time series data. See description of BehavioralEpochs for more details.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
BehavioralTimeSeriesConstructor for BehavioralTimeSeries 
- -
Property Summary -
- - - - - -
timeseries(TimeSeries) TimeSeries object containing continuous behavioral data. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_timeseries 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/ClusterWaveforms.html b/doc/+types/+core/ClusterWaveforms.html deleted file mode 100644 index d84c2607..00000000 --- a/doc/+types/+core/ClusterWaveforms.html +++ /dev/null @@ -1,231 +0,0 @@ - - - - - - types.core.ClusterWaveforms - MATLAB File Help - - - - - - - - - - -
types.core.ClusterWaveforms - MATLAB File Help
-
types.core.ClusterWaveforms
-
  ClusterWaveforms DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ClusterWaveformsConstructor for ClusterWaveforms 
- -
Property Summary -
- - - - - - - - - - - - - - - - - -
clustering_interfaceClustering 
waveform_filteringREQUIRED (char) Filtering applied to data before generating mean/sd 
waveform_meanREQUIRED (single) The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated - Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty - (e.g., zero- filled)  -
waveform_sdREQUIRED (single) Stdev of waveforms for each cluster, using the same indices as in mean 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_clustering_interface 
-   - - validate_waveform_filtering 
-   - - validate_waveform_mean 
-   - - validate_waveform_sd 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/Clustering.html b/doc/+types/+core/Clustering.html deleted file mode 100644 index 7723a88e..00000000 --- a/doc/+types/+core/Clustering.html +++ /dev/null @@ -1,230 +0,0 @@ - - - - - - types.core.Clustering - MATLAB File Help - - - - - - - - - - -
types.core.Clustering - MATLAB File Help
-
types.core.Clustering
-
  Clustering DEPRECATED Clustered spike data, whether from automatic clustering tools (e.g., klustakwik) or as a result of manual sorting.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ClusteringConstructor for Clustering 
- -
Property Summary -
- - - - - - - - - - - - - - - - - -
descriptionREQUIRED (char) Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc) 
numREQUIRED (int32) Cluster number of each event 
peak_over_rmsREQUIRED (single) Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric). 
timesREQUIRED (double) Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction - module.  -
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_description 
-   - - validate_num 
-   - - validate_peak_over_rms 
-   - - validate_times 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/CompassDirection.html b/doc/+types/+core/CompassDirection.html deleted file mode 100644 index 3f933306..00000000 --- a/doc/+types/+core/CompassDirection.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.CompassDirection - MATLAB File Help - - - - - - - - - - -
types.core.CompassDirection - MATLAB File Help
-
types.core.CompassDirection
-
  CompassDirection With a CompassDirection interface, a module publishes a SpatialSeries object representing a floating point value for theta. The SpatialSeries::reference_frame field should indicate what direction corresponds to 0 and which is the direction of rotation (this should be clockwise). The si_unit for the SpatialSeries should be radians or degrees.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
CompassDirectionConstructor for CompassDirection 
- -
Property Summary -
- - - - - -
spatialseries(SpatialSeries) SpatialSeries object containing direction of gaze travel. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_spatialseries 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/CorrectedImageStack.html b/doc/+types/+core/CorrectedImageStack.html deleted file mode 100644 index d66698f0..00000000 --- a/doc/+types/+core/CorrectedImageStack.html +++ /dev/null @@ -1,218 +0,0 @@ - - - - - - types.core.CorrectedImageStack - MATLAB File Help - - - - - - - - - - -
types.core.CorrectedImageStack - MATLAB File Help
-
types.core.CorrectedImageStack
-
  CorrectedImageStack Reuslts from motion correction of an image stack.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
CorrectedImageStackConstructor for CorrectedImageStack 
- -
Property Summary -
- - - - - - - - - - - - - -
correctedREQUIRED (ImageSeries) Image stack with frames shifted to the common coordinates. 
originalImageSeries 
xy_translationREQUIRED (TimeSeries) Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align - each frame to a reference image.  -
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_corrected 
-   - - validate_original 
-   - - validate_xy_translation 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/CurrentClampSeries.html b/doc/+types/+core/CurrentClampSeries.html deleted file mode 100644 index b836e728..00000000 --- a/doc/+types/+core/CurrentClampSeries.html +++ /dev/null @@ -1,459 +0,0 @@ - - - - - - types.core.CurrentClampSeries - MATLAB File Help - - - - - - - - - - -
types.core.CurrentClampSeries - MATLAB File Help
-
types.core.CurrentClampSeries
-
  CurrentClampSeries Voltage data from an intracellular current-clamp recording. A corresponding CurrentClampStimulusSeries (stored separately as a stimulus) is used to store the current injected.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.PatchClampSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
CurrentClampSeriesConstructor for CurrentClampSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
bias_current(single) Bias current, in amps. 
bridge_balance(single) Bridge balance, in ohms. 
capacitance_compensation(single) Capacitance compensation, in farads. 
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
electrodeIntracellularElectrode 
gain(single) Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
stimulus_description(char) Protocol/stimulus name for this patch-clamp dataset. 
sweep_number(uint32) Sweep number, allows to group different PatchClampSeries together. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_bias_current 
-   - - validate_bridge_balance 
-   - - validate_capacitance_compensation 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_electrode 
-   - - validate_gain 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_stimulus_description 
-   - - validate_sweep_number 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/CurrentClampStimulusSeries.html b/doc/+types/+core/CurrentClampStimulusSeries.html deleted file mode 100644 index 8dd09f5b..00000000 --- a/doc/+types/+core/CurrentClampStimulusSeries.html +++ /dev/null @@ -1,423 +0,0 @@ - - - - - - types.core.CurrentClampStimulusSeries - MATLAB File Help - - - - - - - - - - -
types.core.CurrentClampStimulusSeries - MATLAB File Help
-
types.core.CurrentClampStimulusSeries
-
  CurrentClampStimulusSeries Stimulus current applied during current clamp recording.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.PatchClampSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
CurrentClampStimulusSeriesConstructor for CurrentClampStimulusSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
electrodeIntracellularElectrode 
gain(single) Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
stimulus_description(char) Protocol/stimulus name for this patch-clamp dataset. 
sweep_number(uint32) Sweep number, allows to group different PatchClampSeries together. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_electrode 
-   - - validate_gain 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_stimulus_description 
-   - - validate_sweep_number 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/DecompositionSeries.html b/doc/+types/+core/DecompositionSeries.html deleted file mode 100644 index f7bcc6d9..00000000 --- a/doc/+types/+core/DecompositionSeries.html +++ /dev/null @@ -1,425 +0,0 @@ - - - - - - types.core.DecompositionSeries - MATLAB File Help - - - - - - - - - - -
types.core.DecompositionSeries - MATLAB File Help
-
types.core.DecompositionSeries
-
  DecompositionSeries Spectral analysis of a time series, e.g. of an LFP or a speech signal.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.TimeSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
DecompositionSeriesConstructor for DecompositionSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
bandsREQUIRED (DynamicTable) Table for describing the bands that this series was generated from. There should be one row in this - table for each band.  -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
metricREQUIRED (char) The metric used, e.g. phase, amplitude, power. 
source_channels(DynamicTableRegion) DynamicTableRegion pointer to the channels that this decomposition series was generated from. 
source_timeseriesTimeSeries 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_bands 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_metric 
-   - - validate_source_channels 
-   - - validate_source_timeseries 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/Device.html b/doc/+types/+core/Device.html deleted file mode 100644 index a9d1b38d..00000000 --- a/doc/+types/+core/Device.html +++ /dev/null @@ -1,204 +0,0 @@ - - - - - - types.core.Device - MATLAB File Help - - - - - - - - - - -
types.core.Device - MATLAB File Help
-
types.core.Device
-
  Device Metadata about a data acquisition device, e.g., recording system, electrode, microscope.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBContainer, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
DeviceConstructor for Device 
- -
Property Summary -
- - - - - - - - - -
description(char) Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text. 
manufacturer(char) The name of the manufacturer of the device. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_description 
-   - - validate_manufacturer 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/DfOverF.html b/doc/+types/+core/DfOverF.html deleted file mode 100644 index 405d1119..00000000 --- a/doc/+types/+core/DfOverF.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.DfOverF - MATLAB File Help - - - - - - - - - - -
types.core.DfOverF - MATLAB File Help
-
types.core.DfOverF
-
  DfOverF dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes).
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
DfOverFConstructor for DfOverF 
- -
Property Summary -
- - - - - -
roiresponseseriesREQUIRED (RoiResponseSeries) RoiResponseSeries object(s) containing dF/F for a ROI. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_roiresponseseries 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/ElectricalSeries.html b/doc/+types/+core/ElectricalSeries.html deleted file mode 100644 index 4202d853..00000000 --- a/doc/+types/+core/ElectricalSeries.html +++ /dev/null @@ -1,427 +0,0 @@ - - - - - - types.core.ElectricalSeries - MATLAB File Help - - - - - - - - - - -
types.core.ElectricalSeries - MATLAB File Help
-
types.core.ElectricalSeries
-
  ElectricalSeries A time series of acquired voltage data from extracellular recordings. The data field is an int or float array storing data in volts. The first dimension should always represent time. The second dimension, if present, should represent channels.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.TimeSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ElectricalSeriesConstructor for ElectricalSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
channel_conversion(single) Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis - (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data - values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and - per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data - acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for - all channels.  -
channel_conversion_axis(int32) The zero-indexed axis of the 'data' dataset that the channel-specific conversion factor corresponds to. This value - is fixed to 1.  -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
electrodesREQUIRED (DynamicTableRegion) DynamicTableRegion pointer to the electrodes that this time series was generated from. 
filtering(char) Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered - data (also known as AP Band), then this value could be "High-pass 4-pole Bessel filter at 500 Hz". If this ElectricalSeries - represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be "Low-pass filter at 300 - Hz". If a non-standard filter type is used, provide as much detail about the filter properties as possible.  -
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_channel_conversion 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_electrodes 
-   - - validate_filtering 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/ElectrodeGroup.html b/doc/+types/+core/ElectrodeGroup.html deleted file mode 100644 index 2a0e6320..00000000 --- a/doc/+types/+core/ElectrodeGroup.html +++ /dev/null @@ -1,230 +0,0 @@ - - - - - - types.core.ElectrodeGroup - MATLAB File Help - - - - - - - - - - -
types.core.ElectrodeGroup - MATLAB File Help
-
types.core.ElectrodeGroup
-
  ElectrodeGroup A physical grouping of electrodes, e.g. a shank of an array.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBContainer, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ElectrodeGroupConstructor for ElectrodeGroup 
- -
Property Summary -
- - - - - - - - - - - - - - - - - -
description(char) Description of this electrode group. 
deviceDevice 
location(char) Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas - names for anatomical regions when possible.  -
position(Table with columns: (x = single, y = single, z = single)) stereotaxic or common framework coordinates 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_description 
-   - - validate_device 
-   - - validate_location 
-   - - validate_position 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/EventDetection.html b/doc/+types/+core/EventDetection.html deleted file mode 100644 index 87587d1b..00000000 --- a/doc/+types/+core/EventDetection.html +++ /dev/null @@ -1,237 +0,0 @@ - - - - - - types.core.EventDetection - MATLAB File Help - - - - - - - - - - -
types.core.EventDetection - MATLAB File Help
-
types.core.EventDetection
-
  EventDetection Detected spike events from voltage trace(s).
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
EventDetectionConstructor for EventDetection 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - -
detection_methodREQUIRED (char) Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant - values.  -
source_electricalseriesElectricalSeries 
source_idxREQUIRED (int32) Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' - should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index - points to each event from the raw data.  -
timesREQUIRED (double) Timestamps of events, in seconds. 
times_unit(char) Unit of measurement for event times, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_detection_method 
-   - - validate_source_electricalseries 
-   - - validate_source_idx 
-   - - validate_times 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/EventWaveform.html b/doc/+types/+core/EventWaveform.html deleted file mode 100644 index a77edc19..00000000 --- a/doc/+types/+core/EventWaveform.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.EventWaveform - MATLAB File Help - - - - - - - - - - -
types.core.EventWaveform - MATLAB File Help
-
types.core.EventWaveform
-
  EventWaveform Represents either the waveforms of detected events, as extracted from a raw data trace in /acquisition, or the event waveforms that were stored during experiment acquisition.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
EventWaveformConstructor for EventWaveform 
- -
Property Summary -
- - - - - -
spikeeventseries(SpikeEventSeries) SpikeEventSeries object(s) containing detected spike event waveforms. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_spikeeventseries 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/ExperimentalConditionsTable.html b/doc/+types/+core/ExperimentalConditionsTable.html deleted file mode 100644 index 962826a6..00000000 --- a/doc/+types/+core/ExperimentalConditionsTable.html +++ /dev/null @@ -1,292 +0,0 @@ - - - - - - types.core.ExperimentalConditionsTable - MATLAB File Help - - - - - - - - - - -
types.core.ExperimentalConditionsTable - MATLAB File Help
-
types.core.ExperimentalConditionsTable
-
  ExperimentalConditionsTable A table for grouping different intracellular recording repetitions together that belong to the same experimental condition.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.DynamicTable, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ExperimentalConditionsTableConstructor for ExperimentalConditionsTable 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
repetitionsREQUIRED (DynamicTableRegion) A reference to one or more rows in the RepetitionsTable table. 
repetitions_indexREQUIRED (VectorIndex) Index dataset for the repetitions column. 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_id 
-   - - validate_repetitions 
-   - - validate_repetitions_index 
-   - - validate_vectordata 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/EyeTracking.html b/doc/+types/+core/EyeTracking.html deleted file mode 100644 index 719a9178..00000000 --- a/doc/+types/+core/EyeTracking.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.EyeTracking - MATLAB File Help - - - - - - - - - - -
types.core.EyeTracking - MATLAB File Help
-
types.core.EyeTracking
-
  EyeTracking Eye-tracking data, representing direction of gaze.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
EyeTrackingConstructor for EyeTracking 
- -
Property Summary -
- - - - - -
spatialseries(SpatialSeries) SpatialSeries object containing data measuring direction of gaze. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_spatialseries 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/FeatureExtraction.html b/doc/+types/+core/FeatureExtraction.html deleted file mode 100644 index 21ec5a93..00000000 --- a/doc/+types/+core/FeatureExtraction.html +++ /dev/null @@ -1,228 +0,0 @@ - - - - - - types.core.FeatureExtraction - MATLAB File Help - - - - - - - - - - -
types.core.FeatureExtraction - MATLAB File Help
-
types.core.FeatureExtraction
-
  FeatureExtraction Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
FeatureExtractionConstructor for FeatureExtraction 
- -
Property Summary -
- - - - - - - - - - - - - - - - - -
descriptionREQUIRED (char) Description of features (eg, ''PC1'') for each of the extracted features. 
electrodesREQUIRED (DynamicTableRegion) DynamicTableRegion pointer to the electrodes that this time series was generated from. 
featuresREQUIRED (single) Multi-dimensional array of features extracted from each event. 
timesREQUIRED (double) Times of events that features correspond to (can be a link). 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_description 
-   - - validate_electrodes 
-   - - validate_features 
-   - - validate_times 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/FilteredEphys.html b/doc/+types/+core/FilteredEphys.html deleted file mode 100644 index 68c33da5..00000000 --- a/doc/+types/+core/FilteredEphys.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.FilteredEphys - MATLAB File Help - - - - - - - - - - -
types.core.FilteredEphys - MATLAB File Help
-
types.core.FilteredEphys
-
  FilteredEphys Electrophysiology data from one or more channels that has been subjected to filtering. Examples of filtered data include Theta and Gamma (LFP has its own interface). FilteredEphys modules publish an ElectricalSeries for each filtered channel or set of channels. The name of each ElectricalSeries is arbitrary but should be informative. The source of the filtered data, whether this is from analysis of another time series or as acquired by hardware, should be noted in each's TimeSeries::description field. There is no assumed 1::1 correspondence between filtered ephys signals and electrodes, as a single signal can apply to many nearby electrodes, and one electrode may have different filtered (e.g., theta and/or gamma) signals represented. Filter properties should be noted in the ElectricalSeries 'filtering' attribute.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
FilteredEphysConstructor for FilteredEphys 
- -
Property Summary -
- - - - - -
electricalseriesREQUIRED (ElectricalSeries) ElectricalSeries object(s) containing filtered electrophysiology data. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_electricalseries 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/Fluorescence.html b/doc/+types/+core/Fluorescence.html deleted file mode 100644 index f5cf3dc7..00000000 --- a/doc/+types/+core/Fluorescence.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.Fluorescence - MATLAB File Help - - - - - - - - - - -
types.core.Fluorescence - MATLAB File Help
-
types.core.Fluorescence
-
  Fluorescence Fluorescence information about a region of interest (ROI). Storage hierarchy of fluorescence should be the same as for segmentation (ie, same names for ROIs and for image planes).
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
FluorescenceConstructor for Fluorescence 
- -
Property Summary -
- - - - - -
roiresponseseriesREQUIRED (RoiResponseSeries) RoiResponseSeries object(s) containing fluorescence data for a ROI. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_roiresponseseries 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/GrayscaleImage.html b/doc/+types/+core/GrayscaleImage.html deleted file mode 100644 index ae3ed87e..00000000 --- a/doc/+types/+core/GrayscaleImage.html +++ /dev/null @@ -1,216 +0,0 @@ - - - - - - types.core.GrayscaleImage - MATLAB File Help - - - - - - - - - - -
types.core.GrayscaleImage - MATLAB File Help
-
types.core.GrayscaleImage
-
  GrayscaleImage A grayscale image.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.Image, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
GrayscaleImageConstructor for GrayscaleImage 
- -
Property Summary -
- - - - - - - - - - - - - -
dataREQUIRED any 
description(char) Description of the image. 
resolution(single) Pixel resolution of the image, in pixels per centimeter. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
-   - - validate_description 
-   - - validate_resolution 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/IZeroClampSeries.html b/doc/+types/+core/IZeroClampSeries.html deleted file mode 100644 index f5eb34f4..00000000 --- a/doc/+types/+core/IZeroClampSeries.html +++ /dev/null @@ -1,459 +0,0 @@ - - - - - - types.core.IZeroClampSeries - MATLAB File Help - - - - - - - - - - -
types.core.IZeroClampSeries - MATLAB File Help
-
types.core.IZeroClampSeries
-
  IZeroClampSeries Voltage data from an intracellular recording when all current and amplifier settings are off (i.e., CurrentClampSeries fields will be zero). There is no CurrentClampStimulusSeries associated with an IZero series because the amplifier is disconnected and no stimulus can reach the cell.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.CurrentClampSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
IZeroClampSeriesConstructor for IZeroClampSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
bias_current(single) Bias current, in amps. 
bridge_balance(single) Bridge balance, in ohms. 
capacitance_compensation(single) Capacitance compensation, in farads. 
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
electrodeIntracellularElectrode 
gain(single) Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
stimulus_description(char) Protocol/stimulus name for this patch-clamp dataset. 
sweep_number(uint32) Sweep number, allows to group different PatchClampSeries together. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_bias_current 
-   - - validate_bridge_balance 
-   - - validate_capacitance_compensation 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_electrode 
-   - - validate_gain 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_stimulus_description 
-   - - validate_sweep_number 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/Image.html b/doc/+types/+core/Image.html deleted file mode 100644 index b4969735..00000000 --- a/doc/+types/+core/Image.html +++ /dev/null @@ -1,216 +0,0 @@ - - - - - - types.core.Image - MATLAB File Help - - - - - - - - - - -
types.core.Image - MATLAB File Help
-
types.core.Image
-
  Image An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBData, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ImageConstructor for Image 
- -
Property Summary -
- - - - - - - - - - - - - -
dataREQUIRED any 
description(char) Description of the image. 
resolution(single) Pixel resolution of the image, in pixels per centimeter. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
-   - - validate_description 
-   - - validate_resolution 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/ImageMaskSeries.html b/doc/+types/+core/ImageMaskSeries.html deleted file mode 100644 index a02d0abc..00000000 --- a/doc/+types/+core/ImageMaskSeries.html +++ /dev/null @@ -1,460 +0,0 @@ - - - - - - types.core.ImageMaskSeries - MATLAB File Help - - - - - - - - - - -
types.core.ImageMaskSeries - MATLAB File Help
-
types.core.ImageMaskSeries
-
  ImageMaskSeries An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.ImageSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ImageMaskSeriesConstructor for ImageMaskSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
deviceDevice 
dimension(int32) Number of pixels on x, y, (and z) axes. 
external_file(char) Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the - image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored - in another NWB file and that file is linked to this file.  -
external_file_starting_frame(int32) Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an - index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains - a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. - Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has - three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then - this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries - (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].  -
format(char) Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image - files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not - present, then the default format='raw' case is assumed.  -
masked_imageseriesImageSeries 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_device 
-   - - validate_dimension 
-   - - validate_external_file 
-   - - validate_external_file_starting_frame 
-   - - validate_format 
-   - - validate_masked_imageseries 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/ImageReferences.html b/doc/+types/+core/ImageReferences.html deleted file mode 100644 index 846ee11d..00000000 --- a/doc/+types/+core/ImageReferences.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.ImageReferences - MATLAB File Help - - - - - - - - - - -
types.core.ImageReferences - MATLAB File Help
-
types.core.ImageReferences
-
  ImageReferences Ordered dataset of references to Image objects.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBData, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ImageReferencesConstructor for ImageReferences 
- -
Property Summary -
- - - - - -
dataREQUIRED any 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_dataReference to type `Image` 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/ImageSegmentation.html b/doc/+types/+core/ImageSegmentation.html deleted file mode 100644 index 99f40f29..00000000 --- a/doc/+types/+core/ImageSegmentation.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.ImageSegmentation - MATLAB File Help - - - - - - - - - - -
types.core.ImageSegmentation - MATLAB File Help
-
types.core.ImageSegmentation
-
  ImageSegmentation Stores pixels in an image that represent different regions of interest (ROIs) or masks. All segmentation for a given imaging plane is stored together, with storage for multiple imaging planes (masks) supported. Each ROI is stored in its own subgroup, with the ROI group containing both a 2D mask and a list of pixels that make up this mask. Segments can also be used for masking neuropil. If segmentation is allowed to change with time, a new imaging plane (or module) is required and ROI names should remain consistent between them.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ImageSegmentationConstructor for ImageSegmentation 
- -
Property Summary -
- - - - - -
planesegmentationREQUIRED (PlaneSegmentation) Results from image segmentation of a specific imaging plane. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_planesegmentation 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/ImageSeries.html b/doc/+types/+core/ImageSeries.html deleted file mode 100644 index 68f05853..00000000 --- a/doc/+types/+core/ImageSeries.html +++ /dev/null @@ -1,448 +0,0 @@ - - - - - - types.core.ImageSeries - MATLAB File Help - - - - - - - - - - -
types.core.ImageSeries - MATLAB File Help
-
types.core.ImageSeries
-
  ImageSeries General image data that is common between acquisition and stimulus time series. Sometimes the image data is stored in the file in a raw format while other times it will be stored as a series of external image files in the host file system. The data field will either be binary data, if the data is stored in the NWB file, or empty, if the data is stored in an external image stack. [frame][x][y] or [frame][x][y][z].
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.TimeSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ImageSeriesConstructor for ImageSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
deviceDevice 
dimension(int32) Number of pixels on x, y, (and z) axes. 
external_file(char) Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the - image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored - in another NWB file and that file is linked to this file.  -
external_file_starting_frame(int32) Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an - index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains - a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. - Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has - three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then - this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries - (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].  -
format(char) Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image - files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not - present, then the default format='raw' case is assumed.  -
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_device 
-   - - validate_dimension 
-   - - validate_external_file 
-   - - validate_external_file_starting_frame 
-   - - validate_format 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/Images.html b/doc/+types/+core/Images.html deleted file mode 100644 index 1666f6dc..00000000 --- a/doc/+types/+core/Images.html +++ /dev/null @@ -1,218 +0,0 @@ - - - - - - types.core.Images - MATLAB File Help - - - - - - - - - - -
types.core.Images - MATLAB File Help
-
types.core.Images
-
  Images A collection of images with an optional way to specify the order of the images using the "order_of_images" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ImagesConstructor for Images 
- -
Property Summary -
- - - - - - - - - - - - - -
description(char) Description of this collection of images. 
imageREQUIRED (Image) Images stored in this collection. 
order_of_images(ImageReferences) Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images - group should be stored once and only once, so the dataset should have the same length as the number of images.  -
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_description 
-   - - validate_image 
-   - - validate_order_of_images 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/ImagingPlane.html b/doc/+types/+core/ImagingPlane.html deleted file mode 100644 index 7451b3db..00000000 --- a/doc/+types/+core/ImagingPlane.html +++ /dev/null @@ -1,385 +0,0 @@ - - - - - - types.core.ImagingPlane - MATLAB File Help - - - - - - - - - - -
types.core.ImagingPlane - MATLAB File Help
-
types.core.ImagingPlane
-
  ImagingPlane An imaging plane and its metadata.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBContainer, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ImagingPlaneConstructor for ImagingPlane 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
description(char) Description of the imaging plane. 
deviceDevice 
excitation_lambdaREQUIRED (single) Excitation wavelength, in nm. 
grid_spacing(single) Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is - a regular grid. See also reference_frame to interpret the grid.  -
grid_spacing_unit(char) Measurement units for grid_spacing. The default value is 'meters'. 
imaging_rate(single) Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there - instead.  -
indicatorREQUIRED (char) Calcium indicator. 
locationREQUIRED (char) Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic - coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.  -
manifold(single) DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate - space. Deprecated in favor of origin_coords and grid_spacing.  -
manifold_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to - 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition - pixel units to meters is 2/1000.  -
manifold_unit(char) Base unit of measurement for working with the data. The default value is 'meters'. 
opticalchannelREQUIRED (OpticalChannel) An optical channel used to record from an imaging plane. 
origin_coords(single) Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also - reference_frame for what the physical location is relative to (e.g., bregma).  -
origin_coords_unit(char) Measurement units for origin_coords. The default value is 'meters'. 
reference_frame(char) Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical - location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate - the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. - If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 - x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to - bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, - larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords - = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = "Origin coordinates are relative to bregma. First dimension - corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis - (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral)."  -
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_description 
-   - - validate_device 
-   - - validate_excitation_lambda 
-   - - validate_grid_spacing 
-   - - validate_grid_spacing_unit 
-   - - validate_imaging_rate 
-   - - validate_indicator 
-   - - validate_location 
-   - - validate_manifold 
-   - - validate_manifold_conversion 
-   - - validate_manifold_unit 
-   - - validate_opticalchannel 
-   - - validate_origin_coords 
-   - - validate_origin_coords_unit 
-   - - validate_reference_frame 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/ImagingRetinotopy.html b/doc/+types/+core/ImagingRetinotopy.html deleted file mode 100644 index 81f2357b..00000000 --- a/doc/+types/+core/ImagingRetinotopy.html +++ /dev/null @@ -1,560 +0,0 @@ - - - - - - types.core.ImagingRetinotopy - MATLAB File Help - - - - - - - - - - -
types.core.ImagingRetinotopy - MATLAB File Help
-
types.core.ImagingRetinotopy
-
  ImagingRetinotopy Intrinsic signal optical imaging or widefield imaging for measuring retinotopy. Stores orthogonal maps (e.g., altitude/azimuth; radius/theta) of responses to specific stimuli and a combined polarity map from which to identify visual areas. This group does not store the raw responses imaged during retinotopic mapping or the stimuli presented, but rather the resulting phase and power maps after applying a Fourier transform on the averaged responses. Note: for data consistency, all images and arrays are stored in the format [row][column] and [row, col], which equates to [y][x]. Field of view and dimension arrays may appear backward (i.e., y before x).
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ImagingRetinotopyConstructor for ImagingRetinotopy 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
axis_1_phase_mapREQUIRED (single) Phase response to stimulus on the first measured axis. 
axis_1_phase_map_dimension(int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. 
axis_1_phase_map_field_of_view(single) Size of viewing area, in meters. 
axis_1_phase_map_unit(char) Unit that axis data is stored in (e.g., degrees). 
axis_1_power_map(single) Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum - relative power.  -
axis_1_power_map_dimension(int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. 
axis_1_power_map_field_of_view(single) Size of viewing area, in meters. 
axis_1_power_map_unit(char) Unit that axis data is stored in (e.g., degrees). 
axis_2_phase_mapREQUIRED (single) Phase response to stimulus on the second measured axis. 
axis_2_phase_map_dimension(int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. 
axis_2_phase_map_field_of_view(single) Size of viewing area, in meters. 
axis_2_phase_map_unit(char) Unit that axis data is stored in (e.g., degrees). 
axis_2_power_map(single) Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum - relative power.  -
axis_2_power_map_dimension(int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. 
axis_2_power_map_field_of_view(single) Size of viewing area, in meters. 
axis_2_power_map_unit(char) Unit that axis data is stored in (e.g., degrees). 
axis_descriptionsREQUIRED (char) Two-element array describing the contents of the two response axis fields. Description should be something - like ['altitude', 'azimuth'] or '['radius', 'theta'].  -
focal_depth_image(uint16) Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: - [rows][columns].  -
focal_depth_image_bits_per_pixel(int32) Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value. 
focal_depth_image_dimension(int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. 
focal_depth_image_field_of_view(single) Size of viewing area, in meters. 
focal_depth_image_focal_depth(single) Focal depth offset, in meters. 
focal_depth_image_format(char) Format of image. Right now only 'raw' is supported. 
sign_map(single) Sine of the angle between the direction of the gradient in axis_1 and axis_2. 
sign_map_dimension(int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. 
sign_map_field_of_view(single) Size of viewing area, in meters. 
vasculature_imageREQUIRED (uint16) Gray-scale anatomical image of cortical surface. Array structure: [rows][columns] 
vasculature_image_bits_per_pixel(int32) Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value 
vasculature_image_dimension(int32) Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width. 
vasculature_image_field_of_view(single) Size of viewing area, in meters. 
vasculature_image_format(char) Format of image. Right now only 'raw' is supported. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_axis_1_phase_map 
-   - - validate_axis_1_phase_map_dimension 
-   - - validate_axis_1_phase_map_field_of_view 
-   - - validate_axis_1_phase_map_unit 
-   - - validate_axis_1_power_map 
-   - - validate_axis_1_power_map_dimension 
-   - - validate_axis_1_power_map_field_of_view 
-   - - validate_axis_1_power_map_unit 
-   - - validate_axis_2_phase_map 
-   - - validate_axis_2_phase_map_dimension 
-   - - validate_axis_2_phase_map_field_of_view 
-   - - validate_axis_2_phase_map_unit 
-   - - validate_axis_2_power_map 
-   - - validate_axis_2_power_map_dimension 
-   - - validate_axis_2_power_map_field_of_view 
-   - - validate_axis_2_power_map_unit 
-   - - validate_axis_descriptions 
-   - - validate_focal_depth_image 
-   - - validate_focal_depth_image_bits_per_pixel 
-   - - validate_focal_depth_image_dimension 
-   - - validate_focal_depth_image_field_of_view 
-   - - validate_focal_depth_image_focal_depth 
-   - - validate_focal_depth_image_format 
-   - - validate_sign_map 
-   - - validate_sign_map_dimension 
-   - - validate_sign_map_field_of_view 
-   - - validate_vasculature_image 
-   - - validate_vasculature_image_bits_per_pixel 
-   - - validate_vasculature_image_dimension 
-   - - validate_vasculature_image_field_of_view 
-   - - validate_vasculature_image_format 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/IndexSeries.html b/doc/+types/+core/IndexSeries.html deleted file mode 100644 index 4b986248..00000000 --- a/doc/+types/+core/IndexSeries.html +++ /dev/null @@ -1,399 +0,0 @@ - - - - - - types.core.IndexSeries - MATLAB File Help - - - - - - - - - - -
types.core.IndexSeries - MATLAB File Help
-
types.core.IndexSeries
-
  IndexSeries Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.TimeSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
IndexSeriesConstructor for IndexSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
indexed_imagesImages 
indexed_timeseriesImageSeries 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_indexed_images 
-   - - validate_indexed_timeseries 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/IntervalSeries.html b/doc/+types/+core/IntervalSeries.html deleted file mode 100644 index c63179da..00000000 --- a/doc/+types/+core/IntervalSeries.html +++ /dev/null @@ -1,375 +0,0 @@ - - - - - - types.core.IntervalSeries - MATLAB File Help - - - - - - - - - - -
types.core.IntervalSeries - MATLAB File Help
-
types.core.IntervalSeries
-
  IntervalSeries Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.TimeSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
IntervalSeriesConstructor for IntervalSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/IntracellularElectrode.html b/doc/+types/+core/IntracellularElectrode.html deleted file mode 100644 index 79d21fdb..00000000 --- a/doc/+types/+core/IntracellularElectrode.html +++ /dev/null @@ -1,290 +0,0 @@ - - - - - - types.core.IntracellularElectrode - MATLAB File Help - - - - - - - - - - -
types.core.IntracellularElectrode - MATLAB File Help
-
types.core.IntracellularElectrode
-
  IntracellularElectrode An intracellular electrode and its metadata.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBContainer, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
IntracellularElectrodeConstructor for IntracellularElectrode 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
cell_id(char) unique ID of the cell 
descriptionREQUIRED (char) Description of electrode (e.g., whole-cell, sharp, etc.). 
deviceDevice 
filtering(char) Electrode specific filtering. 
initial_access_resistance(char) Initial access resistance. 
location(char) Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if - in vivo, etc. Use standard atlas names for anatomical regions when possible.  -
resistance(char) Electrode resistance, in ohms. 
seal(char) Information about seal used for recording. 
slice(char) Information about slice used for recording. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_cell_id 
-   - - validate_description 
-   - - validate_device 
-   - - validate_filtering 
-   - - validate_initial_access_resistance 
-   - - validate_location 
-   - - validate_resistance 
-   - - validate_seal 
-   - - validate_slice 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/IntracellularElectrodesTable.html b/doc/+types/+core/IntracellularElectrodesTable.html deleted file mode 100644 index 886d77a5..00000000 --- a/doc/+types/+core/IntracellularElectrodesTable.html +++ /dev/null @@ -1,280 +0,0 @@ - - - - - - types.core.IntracellularElectrodesTable - MATLAB File Help - - - - - - - - - - -
types.core.IntracellularElectrodesTable - MATLAB File Help
-
types.core.IntracellularElectrodesTable
-
  IntracellularElectrodesTable Table for storing intracellular electrode related metadata.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.DynamicTable, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
IntracellularElectrodesTableConstructor for IntracellularElectrodesTable 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
electrodeREQUIRED (VectorData) Column for storing the reference to the intracellular electrode. 
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_electrode 
-   - - validate_id 
-   - - validate_vectordata 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/IntracellularRecordingsTable.html b/doc/+types/+core/IntracellularRecordingsTable.html deleted file mode 100644 index c8ea3022..00000000 --- a/doc/+types/+core/IntracellularRecordingsTable.html +++ /dev/null @@ -1,335 +0,0 @@ - - - - - - types.core.IntracellularRecordingsTable - MATLAB File Help - - - - - - - - - - -
types.core.IntracellularRecordingsTable - MATLAB File Help
-
types.core.IntracellularRecordingsTable
-
  IntracellularRecordingsTable A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response is recorded as part of an experiment. In this case, both the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.AlignedDynamicTable, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
IntracellularRecordingsTableConstructor for IntracellularRecordingsTable 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
categories(char) The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in - the parent group. This attribute should be used to specify an order of categories and the category names must match the names - of the corresponding DynamicTable in the group.  -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
dynamictable(DynamicTable) A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. - The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable - parent container. The name of the category is given by the name of the DynamicTable and its description by the description - attribute of the DynamicTable.  -
electrodesREQUIRED (IntracellularElectrodesTable) Table for storing intracellular electrode related metadata. 
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
responsesREQUIRED (IntracellularResponsesTable) Table for storing intracellular response related metadata. 
stimuliREQUIRED (IntracellularStimuliTable) Table for storing intracellular stimulus related metadata. 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_categories 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_dynamictable 
-   - - validate_electrodes 
-   - - validate_id 
-   - - validate_responses 
-   - - validate_stimuli 
-   - - validate_vectordata 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/IntracellularResponsesTable.html b/doc/+types/+core/IntracellularResponsesTable.html deleted file mode 100644 index 21cca4d6..00000000 --- a/doc/+types/+core/IntracellularResponsesTable.html +++ /dev/null @@ -1,280 +0,0 @@ - - - - - - types.core.IntracellularResponsesTable - MATLAB File Help - - - - - - - - - - -
types.core.IntracellularResponsesTable - MATLAB File Help
-
types.core.IntracellularResponsesTable
-
  IntracellularResponsesTable Table for storing intracellular response related metadata.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.DynamicTable, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
IntracellularResponsesTableConstructor for IntracellularResponsesTable 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
responseREQUIRED (TimeSeriesReferenceVectorData) Column storing the reference to the recorded response for the recording (rows) 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_id 
-   - - validate_response 
-   - - validate_vectordata 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/IntracellularStimuliTable.html b/doc/+types/+core/IntracellularStimuliTable.html deleted file mode 100644 index a20c0fd4..00000000 --- a/doc/+types/+core/IntracellularStimuliTable.html +++ /dev/null @@ -1,280 +0,0 @@ - - - - - - types.core.IntracellularStimuliTable - MATLAB File Help - - - - - - - - - - -
types.core.IntracellularStimuliTable - MATLAB File Help
-
types.core.IntracellularStimuliTable
-
  IntracellularStimuliTable Table for storing intracellular stimulus related metadata.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.DynamicTable, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
IntracellularStimuliTableConstructor for IntracellularStimuliTable 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
stimulusREQUIRED (TimeSeriesReferenceVectorData) Column storing the reference to the recorded stimulus for the recording (rows). 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_id 
-   - - validate_stimulus 
-   - - validate_vectordata 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/LFP.html b/doc/+types/+core/LFP.html deleted file mode 100644 index ab88421e..00000000 --- a/doc/+types/+core/LFP.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.LFP - MATLAB File Help - - - - - - - - - - -
types.core.LFP - MATLAB File Help
-
types.core.LFP
-
  LFP LFP data from one or more channels. The electrode map in each published ElectricalSeries will identify which channels are providing LFP data. Filter properties should be noted in the ElectricalSeries 'filtering' attribute.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
LFPConstructor for LFP 
- -
Property Summary -
- - - - - -
electricalseriesREQUIRED (ElectricalSeries) ElectricalSeries object(s) containing LFP data for one or more channels. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_electricalseries 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/LabMetaData.html b/doc/+types/+core/LabMetaData.html deleted file mode 100644 index 3b27632e..00000000 --- a/doc/+types/+core/LabMetaData.html +++ /dev/null @@ -1,175 +0,0 @@ - - - - - - types.core.LabMetaData - MATLAB File Help - - - - - - - - - - -
types.core.LabMetaData - MATLAB File Help
-
types.core.LabMetaData
-
  LabMetaData Lab-specific meta-data.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBContainer, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
LabMetaDataConstructor for LabMetaData 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/MotionCorrection.html b/doc/+types/+core/MotionCorrection.html deleted file mode 100644 index b243c40c..00000000 --- a/doc/+types/+core/MotionCorrection.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.MotionCorrection - MATLAB File Help - - - - - - - - - - -
types.core.MotionCorrection - MATLAB File Help
-
types.core.MotionCorrection
-
  MotionCorrection An image stack where all frames are shifted (registered) to a common coordinate system, to account for movement and drift between frames. Note: each frame at each point in time is assumed to be 2-D (has only x & y dimensions).
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
MotionCorrectionConstructor for MotionCorrection 
- -
Property Summary -
- - - - - -
correctedimagestackREQUIRED (CorrectedImageStack) Reuslts from motion correction of an image stack. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_correctedimagestack 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/NWBContainer.html b/doc/+types/+core/NWBContainer.html deleted file mode 100644 index bb766466..00000000 --- a/doc/+types/+core/NWBContainer.html +++ /dev/null @@ -1,175 +0,0 @@ - - - - - - types.core.NWBContainer - MATLAB File Help - - - - - - - - - - -
types.core.NWBContainer - MATLAB File Help
-
types.core.NWBContainer
-
  NWBContainer An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.Container, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
NWBContainerConstructor for NWBContainer 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/NWBData.html b/doc/+types/+core/NWBData.html deleted file mode 100644 index bb28606d..00000000 --- a/doc/+types/+core/NWBData.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.NWBData - MATLAB File Help - - - - - - - - - - -
types.core.NWBData - MATLAB File Help
-
types.core.NWBData
-
  NWBData An abstract data type for a dataset.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.Data, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
NWBDataConstructor for NWBData 
- -
Property Summary -
- - - - - -
dataREQUIRED any 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/NWBDataInterface.html b/doc/+types/+core/NWBDataInterface.html deleted file mode 100644 index c084c98c..00000000 --- a/doc/+types/+core/NWBDataInterface.html +++ /dev/null @@ -1,175 +0,0 @@ - - - - - - types.core.NWBDataInterface - MATLAB File Help - - - - - - - - - - -
types.core.NWBDataInterface - MATLAB File Help
-
types.core.NWBDataInterface
-
  NWBDataInterface An abstract data type for a generic container storing collections of data, as opposed to metadata.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBContainer, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
NWBDataInterfaceConstructor for NWBDataInterface 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/NWBFile.html b/doc/+types/+core/NWBFile.html deleted file mode 100644 index 63e82d7f..00000000 --- a/doc/+types/+core/NWBFile.html +++ /dev/null @@ -1,804 +0,0 @@ - - - - - - types.core.NWBFile - MATLAB File Help - - - - - - - - - - -
types.core.NWBFile - MATLAB File Help
-
types.core.NWBFile
-
  NWBFile An NWB file storing cellular-based neurophysiology data from a single experimental session.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBContainer, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
NWBFileConstructor for NWBFile 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
acquisition(DynamicTable|NWBDataInterface) Tabular data that is relevent to acquisition | Acquired, raw data. 
analysis(DynamicTable|NWBContainer) Tabular data that is relevent to data stored in analysis | Custom analysis results. 
file_create_dateREQUIRED (datetime) A record of the date the file was created and of subsequent modifications. The date is stored in UTC with - local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" - with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this - may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.  -
general(LabMetaData) Place-holder than can be extended so that lab-specific meta-data can be placed in /general. 
general_data_collection(char) Notes about data collection and analysis. 
general_devices(Device) Data acquisition devices. 
general_experiment_description(char) General description of the experiment. 
general_experimenter(char) Name of person(s) who performed the experiment. Can also specify roles of different people involved. 
general_extracellular_ephys(ElectrodeGroup) Physical group of electrodes. 
general_extracellular_ephys_electrodes(DynamicTable) A table of all electrodes (i.e. channels) used for recording. 
general_institution(char) Institution(s) where experiment was performed. 
general_intracellular_ephys(IntracellularElectrode) An intracellular electrode. 
general_intracellular_ephys_experimental_conditions(ExperimentalConditionsTable) A table for grouping different intracellular recording repetitions together that belong to the - same experimental experimental_conditions.  -
general_intracellular_ephys_filtering(char) [DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and - parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute - for each TimeSeries.  -
general_intracellular_ephys_intracellular_recordings(IntracellularRecordingsTable) A table to group together a stimulus and response from a single electrode and a single simultaneous - recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. - In some cases, however, only a stimulus or a response are recorded as as part of an experiment. In this case both, the stimulus - and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, - indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at - least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels - that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries - may be used.  -
general_intracellular_ephys_repetitions(RepetitionsTable) A table for grouping different sequential intracellular recordings together. With each SequentialRecording - typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli - applied in sequence.  -
general_intracellular_ephys_sequential_recordings(SequentialRecordingsTable) A table for grouping different sequential recordings from the SimultaneousRecordingsTable table - together. This is typically used to group together sequential recordings where the a sequence of stimuli of the same type - with varying parameters have been presented in a sequence.  -
general_intracellular_ephys_simultaneous_recordings(SimultaneousRecordingsTable) A table for grouping different intracellular recordings from the IntracellularRecordingsTable - table together that were recorded simultaneously from different electrodes  -
general_intracellular_ephys_sweep_table(SweepTable) [DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable - and SimultaneousRecordingsTable tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions - tables provide enhanced support for experiment metadata.  -
general_keywords(char) Terms to search over. 
general_lab(char) Laboratory where experiment was performed. 
general_notes(char) Notes about the experiment. 
general_optogenetics(OptogeneticStimulusSite) An optogenetic stimulation site. 
general_optophysiology(ImagingPlane) An imaging plane. 
general_pharmacology(char) Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus - dosage, concentration, etc.  -
general_protocol(char) Experimental protocol, if applicable. e.g., include IACUC protocol number. 
general_related_publications(char) Publication information. PMID, DOI, URL, etc. 
general_session_id(char) Lab-specific ID for the session. 
general_slices(char) Description of slices, including information about preparation thickness, orientation, temperature, and bath solution. 
general_source_script(char) Script file or link to public source code used to create this NWB file. 
general_source_script_file_name(char) Name of script file. 
general_stimulus(char) Notes about stimuli, such as how and where they were presented. 
general_subject(Subject) Information about the animal or person from which the data was measured. 
general_surgery(char) Narrative description about surgery/surgeries, including date(s) and who performed surgery. 
general_virus(char) Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, - etc.  -
identifierREQUIRED (char) A unique text identifier for the file. For example, concatenated lab name, file creation date/time and experimentalist, - or a hash of these and/or other values. The goal is that the string should be unique to all other files.  -
intervals(TimeIntervals) Optional additional table(s) for describing other experimental time intervals. 
intervals_epochs(TimeIntervals) Divisions in time marking experimental stages or sub-divisions of a single recording session. 
intervals_invalid_times(TimeIntervals) Time intervals that should be removed from analysis. 
intervals_trials(TimeIntervals) Repeated experimental events that have a logical grouping. 
nwb_version(char) File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, - minor and patch numbers.  -
processing(ProcessingModule) Intermediate analysis of acquired data. 
scratch(DynamicTable|NWBContainer|ScratchData) Any one-off tables | Any one-off containers | Any one-off datasets 
session_descriptionREQUIRED (char) A description of the experimental session and data in the file. 
session_start_timeREQUIRED (datetime) Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as - ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with no timezone offset. - Date accuracy is up to milliseconds.  -
stimulus_presentation(TimeSeries) TimeSeries objects containing data of presented stimuli. 
stimulus_templates(Images|TimeSeries) Images objects containing images of presented stimuli. | TimeSeries objects containing template data of - presented stimuli.  -
timestamps_reference_timeREQUIRED (datetime) Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone - offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with no timezone - offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).  -
units(Units) Data about sorted spike units. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_acquisition 
-   - - validate_analysis 
-   - - validate_file_create_date 
-   - - validate_general 
-   - - validate_general_data_collection 
-   - - validate_general_devices 
-   - - validate_general_experiment_description 
-   - - validate_general_experimenter 
-   - - validate_general_extracellular_ephys 
-   - - validate_general_extracellular_ephys_electrodes 
-   - - validate_general_institution 
-   - - validate_general_intracellular_ephys 
-   - - validate_general_intracellular_ephys_experimental_conditions 
-   - - validate_general_intracellular_ephys_filtering 
-   - - validate_general_intracellular_ephys_intracellular_recordings 
-   - - validate_general_intracellular_ephys_repetitions 
-   - - validate_general_intracellular_ephys_sequential_recordings 
-   - - validate_general_intracellular_ephys_simultaneous_recordings 
-   - - validate_general_intracellular_ephys_sweep_table 
-   - - validate_general_keywords 
-   - - validate_general_lab 
-   - - validate_general_notes 
-   - - validate_general_optogenetics 
-   - - validate_general_optophysiology 
-   - - validate_general_pharmacology 
-   - - validate_general_protocol 
-   - - validate_general_related_publications 
-   - - validate_general_session_id 
-   - - validate_general_slices 
-   - - validate_general_source_script 
-   - - validate_general_source_script_file_name 
-   - - validate_general_stimulus 
-   - - validate_general_subject 
-   - - validate_general_surgery 
-   - - validate_general_virus 
-   - - validate_identifier 
-   - - validate_intervals 
-   - - validate_intervals_epochs 
-   - - validate_intervals_invalid_times 
-   - - validate_intervals_trials 
-   - - validate_processing 
-   - - validate_scratch 
-   - - validate_session_description 
-   - - validate_session_start_time 
-   - - validate_stimulus_presentation 
-   - - validate_stimulus_templates 
-   - - validate_timestamps_reference_time 
-   - - validate_units 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/OnePhotonSeries.html b/doc/+types/+core/OnePhotonSeries.html deleted file mode 100644 index aa0887af..00000000 --- a/doc/+types/+core/OnePhotonSeries.html +++ /dev/null @@ -1,534 +0,0 @@ - - - - - - types.core.OnePhotonSeries - MATLAB File Help - - - - - - - - - - -
types.core.OnePhotonSeries - MATLAB File Help
-
types.core.OnePhotonSeries
-
  OnePhotonSeries Image stack recorded over time from 1-photon microscope.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.ImageSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
OnePhotonSeriesConstructor for OnePhotonSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
binning(uint8) Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc. 
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
deviceDevice 
dimension(int32) Number of pixels on x, y, (and z) axes. 
exposure_time(single) Exposure time of the sample; often the inverse of the frequency. 
external_file(char) Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the - image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored - in another NWB file and that file is linked to this file.  -
external_file_starting_frame(int32) Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an - index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains - a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. - Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has - three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then - this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries - (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].  -
format(char) Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image - files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not - present, then the default format='raw' case is assumed.  -
imaging_planeImagingPlane 
intensity(single) Intensity of the excitation in mW/mm^2, if known. 
pmt_gain(single) Photomultiplier gain. 
power(single) Power of the excitation in mW, if known. 
scan_line_rate(single) Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information - for analysis, and so good to be stored w/ the actual data.  -
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_binning 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_device 
-   - - validate_dimension 
-   - - validate_exposure_time 
-   - - validate_external_file 
-   - - validate_external_file_starting_frame 
-   - - validate_format 
-   - - validate_imaging_plane 
-   - - validate_intensity 
-   - - validate_pmt_gain 
-   - - validate_power 
-   - - validate_scan_line_rate 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/OpticalChannel.html b/doc/+types/+core/OpticalChannel.html deleted file mode 100644 index 0469dbab..00000000 --- a/doc/+types/+core/OpticalChannel.html +++ /dev/null @@ -1,204 +0,0 @@ - - - - - - types.core.OpticalChannel - MATLAB File Help - - - - - - - - - - -
types.core.OpticalChannel - MATLAB File Help
-
types.core.OpticalChannel
-
  OpticalChannel An optical channel used to record from an imaging plane.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBContainer, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
OpticalChannelConstructor for OpticalChannel 
- -
Property Summary -
- - - - - - - - - -
descriptionREQUIRED (char) Description or other notes about the channel. 
emission_lambdaREQUIRED (single) Emission wavelength for channel, in nm. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_description 
-   - - validate_emission_lambda 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/OpticalSeries.html b/doc/+types/+core/OpticalSeries.html deleted file mode 100644 index a84f0a81..00000000 --- a/doc/+types/+core/OpticalSeries.html +++ /dev/null @@ -1,484 +0,0 @@ - - - - - - types.core.OpticalSeries - MATLAB File Help - - - - - - - - - - -
types.core.OpticalSeries - MATLAB File Help
-
types.core.OpticalSeries
-
  OpticalSeries Image data that is presented or recorded. A stimulus template movie will be stored only as an image. When the image is presented as stimulus, additional data is required, such as field of view (e.g., how much of the visual field the image covers, or how what is the area of the target being imaged). If the OpticalSeries represents acquired imaging data, orientation is also important.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.ImageSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
OpticalSeriesConstructor for OpticalSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
deviceDevice 
dimension(int32) Number of pixels on x, y, (and z) axes. 
distance(single) Distance from camera/monitor to target/eye. 
external_file(char) Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the - image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored - in another NWB file and that file is linked to this file.  -
external_file_starting_frame(int32) Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an - index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains - a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. - Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has - three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then - this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries - (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].  -
field_of_view(single) Width, height and depth of image, or imaged area, in meters. 
format(char) Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image - files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not - present, then the default format='raw' case is assumed.  -
orientation(char) Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference. 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_device 
-   - - validate_dimension 
-   - - validate_distance 
-   - - validate_external_file 
-   - - validate_external_file_starting_frame 
-   - - validate_field_of_view 
-   - - validate_format 
-   - - validate_orientation 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/OptogeneticSeries.html b/doc/+types/+core/OptogeneticSeries.html deleted file mode 100644 index 9ee2fcc0..00000000 --- a/doc/+types/+core/OptogeneticSeries.html +++ /dev/null @@ -1,387 +0,0 @@ - - - - - - types.core.OptogeneticSeries - MATLAB File Help - - - - - - - - - - -
types.core.OptogeneticSeries - MATLAB File Help
-
types.core.OptogeneticSeries
-
  OptogeneticSeries An optogenetic stimulus.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.TimeSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
OptogeneticSeriesConstructor for OptogeneticSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
siteOptogeneticStimulusSite 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_site 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/OptogeneticStimulusSite.html b/doc/+types/+core/OptogeneticStimulusSite.html deleted file mode 100644 index 41239480..00000000 --- a/doc/+types/+core/OptogeneticStimulusSite.html +++ /dev/null @@ -1,230 +0,0 @@ - - - - - - types.core.OptogeneticStimulusSite - MATLAB File Help - - - - - - - - - - -
types.core.OptogeneticStimulusSite - MATLAB File Help
-
types.core.OptogeneticStimulusSite
-
  OptogeneticStimulusSite A site of optogenetic stimulation.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBContainer, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
OptogeneticStimulusSiteConstructor for OptogeneticStimulusSite 
- -
Property Summary -
- - - - - - - - - - - - - - - - - -
descriptionREQUIRED (char) Description of stimulation site. 
deviceDevice 
excitation_lambdaREQUIRED (single) Excitation wavelength, in nm. 
locationREQUIRED (char) Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic - coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.  -
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_description 
-   - - validate_device 
-   - - validate_excitation_lambda 
-   - - validate_location 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/PatchClampSeries.html b/doc/+types/+core/PatchClampSeries.html deleted file mode 100644 index db29d7c8..00000000 --- a/doc/+types/+core/PatchClampSeries.html +++ /dev/null @@ -1,423 +0,0 @@ - - - - - - types.core.PatchClampSeries - MATLAB File Help - - - - - - - - - - -
types.core.PatchClampSeries - MATLAB File Help
-
types.core.PatchClampSeries
-
  PatchClampSeries An abstract base class for patch-clamp data - stimulus or response, current or voltage.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.TimeSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
PatchClampSeriesConstructor for PatchClampSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
electrodeIntracellularElectrode 
gain(single) Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
stimulus_description(char) Protocol/stimulus name for this patch-clamp dataset. 
sweep_number(uint32) Sweep number, allows to group different PatchClampSeries together. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_electrode 
-   - - validate_gain 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_stimulus_description 
-   - - validate_sweep_number 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/PlaneSegmentation.html b/doc/+types/+core/PlaneSegmentation.html deleted file mode 100644 index b98c0e15..00000000 --- a/doc/+types/+core/PlaneSegmentation.html +++ /dev/null @@ -1,358 +0,0 @@ - - - - - - types.core.PlaneSegmentation - MATLAB File Help - - - - - - - - - - -
types.core.PlaneSegmentation - MATLAB File Help
-
types.core.PlaneSegmentation
-
  PlaneSegmentation Results from image segmentation of a specific imaging plane.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.DynamicTable, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
PlaneSegmentationConstructor for PlaneSegmentation 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
image_mask(VectorData) ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of - the ROI are finite non-zero.  -
imaging_planeImagingPlane 
pixel_mask(VectorData) Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing - of this dataset is maintained by the PlaneSegmentation  -
pixel_mask_index(VectorIndex) Index into pixel_mask. 
reference_images(ImageSeries) One or more image stacks that the masks apply to (can be one-element stack). 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
voxel_mask(VectorData) Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing - of this dataset is maintained by the PlaneSegmentation  -
voxel_mask_index(VectorIndex) Index into voxel_mask. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_id 
-   - - validate_image_mask 
-   - - validate_imaging_plane 
-   - - validate_pixel_mask 
-   - - validate_pixel_mask_index 
-   - - validate_reference_images 
-   - - validate_vectordata 
-   - - validate_voxel_mask 
-   - - validate_voxel_mask_index 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/Position.html b/doc/+types/+core/Position.html deleted file mode 100644 index be01a0b3..00000000 --- a/doc/+types/+core/Position.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.Position - MATLAB File Help - - - - - - - - - - -
types.core.Position - MATLAB File Help
-
types.core.Position
-
  Position Position data, whether along the x, x/y or x/y/z axis.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
PositionConstructor for Position 
- -
Property Summary -
- - - - - -
spatialseriesREQUIRED (SpatialSeries) SpatialSeries object containing position data. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_spatialseries 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/ProcessingModule.html b/doc/+types/+core/ProcessingModule.html deleted file mode 100644 index bcd33e16..00000000 --- a/doc/+types/+core/ProcessingModule.html +++ /dev/null @@ -1,216 +0,0 @@ - - - - - - types.core.ProcessingModule - MATLAB File Help - - - - - - - - - - -
types.core.ProcessingModule - MATLAB File Help
-
types.core.ProcessingModule
-
  ProcessingModule A collection of processed data.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBContainer, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ProcessingModuleConstructor for ProcessingModule 
- -
Property Summary -
- - - - - - - - - - - - - -
description(char) Description of this collection of processed data. 
dynamictable(DynamicTable) Tables stored in this collection. 
nwbdatainterface(NWBDataInterface) Data objects stored in this collection. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_description 
-   - - validate_dynamictable 
-   - - validate_nwbdatainterface 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/PupilTracking.html b/doc/+types/+core/PupilTracking.html deleted file mode 100644 index 287f2246..00000000 --- a/doc/+types/+core/PupilTracking.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.core.PupilTracking - MATLAB File Help - - - - - - - - - - -
types.core.PupilTracking - MATLAB File Help
-
types.core.PupilTracking
-
  PupilTracking Eye-tracking data, representing pupil size.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
PupilTrackingConstructor for PupilTracking 
- -
Property Summary -
- - - - - -
timeseriesREQUIRED (TimeSeries) TimeSeries object containing time series data on pupil size. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_timeseries 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/RGBAImage.html b/doc/+types/+core/RGBAImage.html deleted file mode 100644 index f07883e0..00000000 --- a/doc/+types/+core/RGBAImage.html +++ /dev/null @@ -1,216 +0,0 @@ - - - - - - types.core.RGBAImage - MATLAB File Help - - - - - - - - - - -
types.core.RGBAImage - MATLAB File Help
-
types.core.RGBAImage
-
  RGBAImage A color image with transparency.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.Image, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
RGBAImageConstructor for RGBAImage 
- -
Property Summary -
- - - - - - - - - - - - - -
dataREQUIRED any 
description(char) Description of the image. 
resolution(single) Pixel resolution of the image, in pixels per centimeter. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
-   - - validate_description 
-   - - validate_resolution 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/RGBImage.html b/doc/+types/+core/RGBImage.html deleted file mode 100644 index e589b76c..00000000 --- a/doc/+types/+core/RGBImage.html +++ /dev/null @@ -1,216 +0,0 @@ - - - - - - types.core.RGBImage - MATLAB File Help - - - - - - - - - - -
types.core.RGBImage - MATLAB File Help
-
types.core.RGBImage
-
  RGBImage A color image.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.Image, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
RGBImageConstructor for RGBImage 
- -
Property Summary -
- - - - - - - - - - - - - -
dataREQUIRED any 
description(char) Description of the image. 
resolution(single) Pixel resolution of the image, in pixels per centimeter. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
-   - - validate_description 
-   - - validate_resolution 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/RepetitionsTable.html b/doc/+types/+core/RepetitionsTable.html deleted file mode 100644 index b4c7feff..00000000 --- a/doc/+types/+core/RepetitionsTable.html +++ /dev/null @@ -1,292 +0,0 @@ - - - - - - types.core.RepetitionsTable - MATLAB File Help - - - - - - - - - - -
types.core.RepetitionsTable - MATLAB File Help
-
types.core.RepetitionsTable
-
  RepetitionsTable A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.DynamicTable, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
RepetitionsTableConstructor for RepetitionsTable 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
sequential_recordingsREQUIRED (DynamicTableRegion) A reference to one or more rows in the SequentialRecordingsTable table. 
sequential_recordings_indexREQUIRED (VectorIndex) Index dataset for the sequential_recordings column. 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_id 
-   - - validate_sequential_recordings 
-   - - validate_sequential_recordings_index 
-   - - validate_vectordata 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/RoiResponseSeries.html b/doc/+types/+core/RoiResponseSeries.html deleted file mode 100644 index b6ae6977..00000000 --- a/doc/+types/+core/RoiResponseSeries.html +++ /dev/null @@ -1,389 +0,0 @@ - - - - - - types.core.RoiResponseSeries - MATLAB File Help - - - - - - - - - - -
types.core.RoiResponseSeries - MATLAB File Help
-
types.core.RoiResponseSeries
-
  RoiResponseSeries ROI responses over an imaging plane. The first dimension represents time. The second dimension, if present, represents ROIs.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.TimeSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
RoiResponseSeriesConstructor for RoiResponseSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
roisREQUIRED (DynamicTableRegion) DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in - this timeseries.  -
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_rois 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/ScratchData.html b/doc/+types/+core/ScratchData.html deleted file mode 100644 index a7db6eb4..00000000 --- a/doc/+types/+core/ScratchData.html +++ /dev/null @@ -1,204 +0,0 @@ - - - - - - types.core.ScratchData - MATLAB File Help - - - - - - - - - - -
types.core.ScratchData - MATLAB File Help
-
types.core.ScratchData
-
  ScratchData Any one-off datasets
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBData, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ScratchDataConstructor for ScratchData 
- -
Property Summary -
- - - - - - - - - -
dataREQUIRED any 
notes(char) Any notes the user has about the dataset being stored 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
-   - - validate_notes 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/SequentialRecordingsTable.html b/doc/+types/+core/SequentialRecordingsTable.html deleted file mode 100644 index 78b79738..00000000 --- a/doc/+types/+core/SequentialRecordingsTable.html +++ /dev/null @@ -1,304 +0,0 @@ - - - - - - types.core.SequentialRecordingsTable - MATLAB File Help - - - - - - - - - - -
types.core.SequentialRecordingsTable - MATLAB File Help
-
types.core.SequentialRecordingsTable
-
  SequentialRecordingsTable A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where a sequence of stimuli of the same type with varying parameters have been presented in a sequence.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.DynamicTable, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
SequentialRecordingsTableConstructor for SequentialRecordingsTable 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
simultaneous_recordingsREQUIRED (DynamicTableRegion) A reference to one or more rows in the SimultaneousRecordingsTable table. 
simultaneous_recordings_indexREQUIRED (VectorIndex) Index dataset for the simultaneous_recordings column. 
stimulus_typeREQUIRED (VectorData) The type of stimulus used for the sequential recording. 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_id 
-   - - validate_simultaneous_recordings 
-   - - validate_simultaneous_recordings_index 
-   - - validate_stimulus_type 
-   - - validate_vectordata 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/SimultaneousRecordingsTable.html b/doc/+types/+core/SimultaneousRecordingsTable.html deleted file mode 100644 index c5ae835a..00000000 --- a/doc/+types/+core/SimultaneousRecordingsTable.html +++ /dev/null @@ -1,292 +0,0 @@ - - - - - - types.core.SimultaneousRecordingsTable - MATLAB File Help - - - - - - - - - - -
types.core.SimultaneousRecordingsTable - MATLAB File Help
-
types.core.SimultaneousRecordingsTable
-
  SimultaneousRecordingsTable A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.DynamicTable, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
SimultaneousRecordingsTableConstructor for SimultaneousRecordingsTable 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
recordingsREQUIRED (DynamicTableRegion) A reference to one or more rows in the IntracellularRecordingsTable table. 
recordings_indexREQUIRED (VectorIndex) Index dataset for the recordings column. 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_id 
-   - - validate_recordings 
-   - - validate_recordings_index 
-   - - validate_vectordata 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/SpatialSeries.html b/doc/+types/+core/SpatialSeries.html deleted file mode 100644 index ed5b6d81..00000000 --- a/doc/+types/+core/SpatialSeries.html +++ /dev/null @@ -1,387 +0,0 @@ - - - - - - types.core.SpatialSeries - MATLAB File Help - - - - - - - - - - -
types.core.SpatialSeries - MATLAB File Help
-
types.core.SpatialSeries
-
  SpatialSeries Direction, e.g., of gaze or travel, or position. The TimeSeries::data field is a 2D array storing position or direction relative to some reference frame. Array structure: [num measurements] [num dimensions]. Each SpatialSeries has a text dataset reference_frame that indicates the zero-position, or the zero-axes for direction. For example, if representing gaze direction, 'straight-ahead' might be a specific pixel on the monitor, or some other point in space. For position data, the 0,0 point might be the top-left corner of an enclosure, as viewed from the tracking camera. The unit of data will indicate how to interpret SpatialSeries values.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.TimeSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
SpatialSeriesConstructor for SpatialSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
reference_frame(char) Description defining what exactly 'straight-ahead' means. 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_reference_frame 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/SpikeEventSeries.html b/doc/+types/+core/SpikeEventSeries.html deleted file mode 100644 index 2835fe21..00000000 --- a/doc/+types/+core/SpikeEventSeries.html +++ /dev/null @@ -1,427 +0,0 @@ - - - - - - types.core.SpikeEventSeries - MATLAB File Help - - - - - - - - - - -
types.core.SpikeEventSeries - MATLAB File Help
-
types.core.SpikeEventSeries
-
  SpikeEventSeries Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode).
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.ElectricalSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
SpikeEventSeriesConstructor for SpikeEventSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
channel_conversion(single) Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis - (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data - values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and - per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data - acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for - all channels.  -
channel_conversion_axis(int32) The zero-indexed axis of the 'data' dataset that the channel-specific conversion factor corresponds to. This value - is fixed to 1.  -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
electrodesREQUIRED (DynamicTableRegion) DynamicTableRegion pointer to the electrodes that this time series was generated from. 
filtering(char) Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered - data (also known as AP Band), then this value could be "High-pass 4-pole Bessel filter at 500 Hz". If this ElectricalSeries - represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be "Low-pass filter at 300 - Hz". If a non-standard filter type is used, provide as much detail about the filter properties as possible.  -
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_channel_conversion 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_electrodes 
-   - - validate_filtering 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/Subject.html b/doc/+types/+core/Subject.html deleted file mode 100644 index f78215a3..00000000 --- a/doc/+types/+core/Subject.html +++ /dev/null @@ -1,300 +0,0 @@ - - - - - - types.core.Subject - MATLAB File Help - - - - - - - - - - -
types.core.Subject - MATLAB File Help
-
types.core.Subject
-
  Subject Information about the animal or person from which the data was measured.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBContainer, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
SubjectConstructor for Subject 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
age(char) Age of subject. Can be supplied instead of 'date_of_birth'. 
age_reference(char) Age is with reference to this event. Can be 'birth' or 'gestational'. If reference is omitted, 'birth' is implied. 
date_of_birth(datetime) Date of birth of subject. Can be supplied instead of 'age'. 
description(char) Description of subject and where subject came from (e.g., breeder, if animal). 
genotype(char) Genetic strain. If absent, assume Wild Type (WT). 
sex(char) Gender of subject. 
species(char) Species of subject. 
strain(char) Strain of subject. 
subject_id(char) ID of animal/person used/participating in experiment (lab convention). 
weight(char) Weight at time of experiment, at time of surgery and at other important times. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_age 
-   - - validate_age_reference 
-   - - validate_date_of_birth 
-   - - validate_description 
-   - - validate_genotype 
-   - - validate_sex 
-   - - validate_species 
-   - - validate_strain 
-   - - validate_subject_id 
-   - - validate_weight 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/SweepTable.html b/doc/+types/+core/SweepTable.html deleted file mode 100644 index da5a2d3f..00000000 --- a/doc/+types/+core/SweepTable.html +++ /dev/null @@ -1,304 +0,0 @@ - - - - - - types.core.SweepTable - MATLAB File Help - - - - - - - - - - -
types.core.SweepTable - MATLAB File Help
-
types.core.SweepTable
-
  SweepTable [DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable, and ExperimentalConditions tables provide enhanced support for experiment metadata.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.DynamicTable, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
SweepTableConstructor for SweepTable 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
seriesREQUIRED (VectorData) The PatchClampSeries with the sweep number in that row. 
series_indexREQUIRED (VectorIndex) Index for series. 
sweep_numberREQUIRED (VectorData) Sweep number of the PatchClampSeries in that row. 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_id 
-   - - validate_series 
-   - - validate_series_index 
-   - - validate_sweep_number 
-   - - validate_vectordata 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/TimeIntervals.html b/doc/+types/+core/TimeIntervals.html deleted file mode 100644 index 2053b354..00000000 --- a/doc/+types/+core/TimeIntervals.html +++ /dev/null @@ -1,340 +0,0 @@ - - - - - - types.core.TimeIntervals - MATLAB File Help - - - - - - - - - - -
types.core.TimeIntervals - MATLAB File Help
-
types.core.TimeIntervals
-
  TimeIntervals A container for aggregating epoch data and the TimeSeries that each epoch applies to.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.DynamicTable, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
TimeIntervalsConstructor for TimeIntervals 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
start_timeREQUIRED (VectorData) Start time of epoch, in seconds. 
stop_timeREQUIRED (VectorData) Stop time of epoch, in seconds. 
tags(VectorData) User-defined tags that identify or categorize events. 
tags_index(VectorIndex) Index for tags. 
timeseries(TimeSeriesReferenceVectorData) An index into a TimeSeries object. 
timeseries_index(VectorIndex) Index for timeseries. 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_id 
-   - - validate_start_time 
-   - - validate_stop_time 
-   - - validate_tags 
-   - - validate_tags_index 
-   - - validate_timeseries 
-   - - validate_timeseries_index 
-   - - validate_vectordata 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/TimeSeries.html b/doc/+types/+core/TimeSeries.html deleted file mode 100644 index 42274298..00000000 --- a/doc/+types/+core/TimeSeries.html +++ /dev/null @@ -1,375 +0,0 @@ - - - - - - types.core.TimeSeries - MATLAB File Help - - - - - - - - - - -
types.core.TimeSeries - MATLAB File Help
-
types.core.TimeSeries
-
  TimeSeries General purpose time series.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBDataInterface, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
TimeSeriesConstructor for TimeSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/TimeSeriesReferenceVectorData.html b/doc/+types/+core/TimeSeriesReferenceVectorData.html deleted file mode 100644 index 6c413c25..00000000 --- a/doc/+types/+core/TimeSeriesReferenceVectorData.html +++ /dev/null @@ -1,204 +0,0 @@ - - - - - - types.core.TimeSeriesReferenceVectorData - MATLAB File Help - - - - - - - - - - -
types.core.TimeSeriesReferenceVectorData - MATLAB File Help
-
types.core.TimeSeriesReferenceVectorData
-
  TimeSeriesReferenceVectorData Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.VectorData, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
TimeSeriesReferenceVectorDataConstructor for TimeSeriesReferenceVectorData 
- -
Property Summary -
- - - - - - - - - -
dataREQUIRED any 
description(char) Description of what these vectors represent. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
-   - - validate_description 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/TwoPhotonSeries.html b/doc/+types/+core/TwoPhotonSeries.html deleted file mode 100644 index e4066f25..00000000 --- a/doc/+types/+core/TwoPhotonSeries.html +++ /dev/null @@ -1,498 +0,0 @@ - - - - - - types.core.TwoPhotonSeries - MATLAB File Help - - - - - - - - - - -
types.core.TwoPhotonSeries - MATLAB File Help
-
types.core.TwoPhotonSeries
-
  TwoPhotonSeries Image stack recorded over time from 2-photon microscope.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.ImageSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
TwoPhotonSeriesConstructor for TwoPhotonSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
deviceDevice 
dimension(int32) Number of pixels on x, y, (and z) axes. 
external_file(char) Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the - image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored - in another NWB file and that file is linked to this file.  -
external_file_starting_frame(int32) Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an - index to indicate which frames each file contains, to faciliate random access. The 'starting_frame' attribute, hence, contains - a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. - Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has - three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then - this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries - (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0].  -
field_of_view(single) Width, height and depth of image, or imaged area, in meters. 
format(char) Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image - files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not - present, then the default format='raw' case is assumed.  -
imaging_planeImagingPlane 
pmt_gain(single) Photomultiplier gain. 
scan_line_rate(single) Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information - for analysis, and so good to be stored w/ the actual data.  -
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_device 
-   - - validate_dimension 
-   - - validate_external_file 
-   - - validate_external_file_starting_frame 
-   - - validate_field_of_view 
-   - - validate_format 
-   - - validate_imaging_plane 
-   - - validate_pmt_gain 
-   - - validate_scan_line_rate 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/Units.html b/doc/+types/+core/Units.html deleted file mode 100644 index 7954b1f4..00000000 --- a/doc/+types/+core/Units.html +++ /dev/null @@ -1,429 +0,0 @@ - - - - - - types.core.Units - MATLAB File Help - - - - - - - - - - -
types.core.Units - MATLAB File Help
-
types.core.Units
-
  Units Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.DynamicTable, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
UnitsConstructor for Units 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
electrode_group(VectorData) Electrode group that each spike unit came from. 
electrodes(DynamicTableRegion) Electrode that each spike unit came from, specified using a DynamicTableRegion. 
electrodes_index(VectorIndex) Index into electrodes. 
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
obs_intervals(VectorData) Observation intervals for each unit. 
obs_intervals_index(VectorIndex) Index into the obs_intervals dataset. 
spike_times(VectorData) Spike times for each unit in seconds. 
spike_times_index(VectorIndex) Index into the spike_times dataset. 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
waveform_mean(VectorData) Spike waveform mean for each spike unit. 
waveform_sd(VectorData) Spike waveform standard deviation for each spike unit. 
waveforms(VectorData) Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' - column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded - from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike - events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements - of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' - column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds - to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 - elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first - spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays - for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated - with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. - The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used - to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event - should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for - each waveform must be the same.  -
waveforms_index(VectorIndex) Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail. 
waveforms_index_index(VectorIndex) Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more - detail.  -
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_electrode_group 
-   - - validate_electrodes 
-   - - validate_electrodes_index 
-   - - validate_id 
-   - - validate_obs_intervals 
-   - - validate_obs_intervals_index 
-   - - validate_spike_times 
-   - - validate_spike_times_index 
-   - - validate_vectordata 
-   - - validate_waveform_mean 
-   - - validate_waveform_sd 
-   - - validate_waveforms 
-   - - validate_waveforms_index 
-   - - validate_waveforms_index_index 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/VoltageClampSeries.html b/doc/+types/+core/VoltageClampSeries.html deleted file mode 100644 index 272363c9..00000000 --- a/doc/+types/+core/VoltageClampSeries.html +++ /dev/null @@ -1,535 +0,0 @@ - - - - - - types.core.VoltageClampSeries - MATLAB File Help - - - - - - - - - - -
types.core.VoltageClampSeries - MATLAB File Help
-
types.core.VoltageClampSeries
-
  VoltageClampSeries Current data from an intracellular voltage-clamp recording. A corresponding VoltageClampStimulusSeries (stored separately as a stimulus) is used to store the voltage injected.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.PatchClampSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
VoltageClampSeriesConstructor for VoltageClampSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
capacitance_fast(single) Fast capacitance, in farads. 
capacitance_fast_unit(char) Unit of measurement for capacitance_fast, which is fixed to 'farads'. 
capacitance_slow(single) Slow capacitance, in farads. 
capacitance_slow_unit(char) Unit of measurement for capacitance_fast, which is fixed to 'farads'. 
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
electrodeIntracellularElectrode 
gain(single) Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). 
resistance_comp_bandwidth(single) Resistance compensation bandwidth, in hertz. 
resistance_comp_bandwidth_unit(char) Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'. 
resistance_comp_correction(single) Resistance compensation correction, in percent. 
resistance_comp_correction_unit(char) Unit of measurement for resistance_comp_correction, which is fixed to 'percent'. 
resistance_comp_prediction(single) Resistance compensation prediction, in percent. 
resistance_comp_prediction_unit(char) Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'. 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
stimulus_description(char) Protocol/stimulus name for this patch-clamp dataset. 
sweep_number(uint32) Sweep number, allows to group different PatchClampSeries together. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
whole_cell_capacitance_comp(single) Whole cell capacitance compensation, in farads. 
whole_cell_capacitance_comp_unit(char) Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'. 
whole_cell_series_resistance_comp(single) Whole cell series resistance compensation, in ohms. 
whole_cell_series_resistance_comp_unit(char) Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_capacitance_fast 
-   - - validate_capacitance_slow 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_electrode 
-   - - validate_gain 
-   - - validate_resistance_comp_bandwidth 
-   - - validate_resistance_comp_correction 
-   - - validate_resistance_comp_prediction 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_stimulus_description 
-   - - validate_sweep_number 
-   - - validate_timestamps 
-   - - validate_whole_cell_capacitance_comp 
-   - - validate_whole_cell_series_resistance_comp 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+core/VoltageClampStimulusSeries.html b/doc/+types/+core/VoltageClampStimulusSeries.html deleted file mode 100644 index 4b61d805..00000000 --- a/doc/+types/+core/VoltageClampStimulusSeries.html +++ /dev/null @@ -1,423 +0,0 @@ - - - - - - types.core.VoltageClampStimulusSeries - MATLAB File Help - - - - - - - - - - -
types.core.VoltageClampStimulusSeries - MATLAB File Help
-
types.core.VoltageClampStimulusSeries
-
  VoltageClampStimulusSeries Stimulus voltage applied during a voltage clamp recording.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.PatchClampSeries, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
VoltageClampStimulusSeriesConstructor for VoltageClampStimulusSeries 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
comments(char) Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, - or descriptive information if the primary description field is populated with a computer-readable string.  -
control(uint8) Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. - If present, the length of this array should be the same size as the first dimension of data.  -
control_description(char) Description of each control value. Must be present if control is present. If present, control_description[0] should - describe time points where control == 0.  -
dataREQUIRED (any) Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can - also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.  -
data_continuity(char) Optionally describe the continuity of the data. Can be "continuous", "instantaneous", or "step". For example, a voltage - trace would be "continuous", because samples are recorded from a continuous process. An array of lick times would be "instantaneous", - because the data represents distinct moments in time. Times of image presentations would be "step" because the picture remains - the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. - It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.  -
data_conversion(single) Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition - system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the - data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 - range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then - the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.  -
data_offset(single) Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common - examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and - (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.  -
data_resolution(single) Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value - of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.  -
data_unit(char) Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. - To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.  -
description(char) Description of the time series. 
electrodeIntracellularElectrode 
gain(single) Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp). 
starting_time(double) Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample - can be specified and all subsequent ones calculated from the sampling rate attribute.  -
starting_time_rate(single) Sampling rate, in Hz. 
starting_time_unit(char) Unit of measurement for time, which is fixed to 'seconds'. 
stimulus_description(char) Protocol/stimulus name for this patch-clamp dataset. 
sweep_number(uint32) Sweep number, allows to group different PatchClampSeries together. 
timestamps(double) Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. 
timestamps_interval(int32) Value is '1' 
timestamps_unit(char) Unit of measurement for timestamps, which is fixed to 'seconds'. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_comments 
-   - - validate_control 
-   - - validate_control_description 
-   - - validate_data 
-   - - validate_data_continuity 
-   - - validate_data_conversion 
-   - - validate_data_offset 
-   - - validate_data_resolution 
-   - - validate_data_unit 
-   - - validate_description 
-   - - validate_electrode 
-   - - validate_gain 
-   - - validate_starting_time 
-   - - validate_starting_time_rate 
-   - - validate_stimulus_description 
-   - - validate_sweep_number 
-   - - validate_timestamps 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+hdmf_common/AlignedDynamicTable.html b/doc/+types/+hdmf_common/AlignedDynamicTable.html deleted file mode 100644 index c8924672..00000000 --- a/doc/+types/+hdmf_common/AlignedDynamicTable.html +++ /dev/null @@ -1,299 +0,0 @@ - - - - - - types.hdmf_common.AlignedDynamicTable - MATLAB File Help - - - - - - - - - - -
types.hdmf_common.AlignedDynamicTable - MATLAB File Help
-
types.hdmf_common.AlignedDynamicTable
-
  AlignedDynamicTable DynamicTable container that supports storing a collection of sub-tables. Each sub-table is a DynamicTable itself that is aligned with the main table by row index. I.e., all DynamicTables stored in this group MUST have the same number of rows. This type effectively defines a 2-level table in which the main data is stored in the main table implemented by this type and additional columns of the table are grouped into categories, with each category being represented by a separate DynamicTable stored within the group.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.DynamicTable, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
AlignedDynamicTableConstructor for AlignedDynamicTable 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - -
categories(char) The names of the categories in this AlignedDynamicTable. Each category is represented by one DynamicTable stored in - the parent group. This attribute should be used to specify an order of categories and the category names must match the names - of the corresponding DynamicTable in the group.  -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
dynamictable(DynamicTable) A DynamicTable representing a particular category for columns in the AlignedDynamicTable parent container. - The table MUST be aligned with (i.e., have the same number of rows) as all other DynamicTables stored in the AlignedDynamicTable - parent container. The name of the category is given by the name of the DynamicTable and its description by the description - attribute of the DynamicTable.  -
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_categories 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_dynamictable 
-   - - validate_id 
-   - - validate_vectordata 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+hdmf_common/CSRMatrix.html b/doc/+types/+hdmf_common/CSRMatrix.html deleted file mode 100644 index 47ba9089..00000000 --- a/doc/+types/+hdmf_common/CSRMatrix.html +++ /dev/null @@ -1,228 +0,0 @@ - - - - - - types.hdmf_common.CSRMatrix - MATLAB File Help - - - - - - - - - - -
types.hdmf_common.CSRMatrix - MATLAB File Help
-
types.hdmf_common.CSRMatrix
-
  CSRMatrix A compressed sparse row matrix. Data are stored in the standard CSR format, where column indices for row i are stored in indices[indptr[i]:indptr[i+1]] and their corresponding values are stored in data[indptr[i]:indptr[i+1]].
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.Container, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
CSRMatrixConstructor for CSRMatrix 
- -
Property Summary -
- - - - - - - - - - - - - - - - - -
dataREQUIRED (any) The non-zero values in the matrix. 
indicesREQUIRED (uint) The column indices. 
indptrREQUIRED (uint) The row index pointer. 
shape(uint) The shape (number of rows, number of columns) of this sparse matrix. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
-   - - validate_indices 
-   - - validate_indptr 
-   - - validate_shape 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+hdmf_common/Container.html b/doc/+types/+hdmf_common/Container.html deleted file mode 100644 index b70d0886..00000000 --- a/doc/+types/+hdmf_common/Container.html +++ /dev/null @@ -1,175 +0,0 @@ - - - - - - types.hdmf_common.Container - MATLAB File Help - - - - - - - - - - -
types.hdmf_common.Container - MATLAB File Help
-
types.hdmf_common.Container
-
  Container An abstract data type for a group storing collections of data and metadata. Base type for all data and metadata containers.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.untyped.MetaClass, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ContainerConstructor for Container 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+hdmf_common/Data.html b/doc/+types/+hdmf_common/Data.html deleted file mode 100644 index fb163661..00000000 --- a/doc/+types/+hdmf_common/Data.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.hdmf_common.Data - MATLAB File Help - - - - - - - - - - -
types.hdmf_common.Data - MATLAB File Help
-
types.hdmf_common.Data
-
  Data An abstract data type for a dataset.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.untyped.MetaClass, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
DataConstructor for Data 
- -
Property Summary -
- - - - - -
dataREQUIRED any 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+hdmf_common/DynamicTable.html b/doc/+types/+hdmf_common/DynamicTable.html deleted file mode 100644 index c61b63b8..00000000 --- a/doc/+types/+hdmf_common/DynamicTable.html +++ /dev/null @@ -1,268 +0,0 @@ - - - - - - types.hdmf_common.DynamicTable - MATLAB File Help - - - - - - - - - - -
types.hdmf_common.DynamicTable - MATLAB File Help
-
types.hdmf_common.DynamicTable
-
  DynamicTable A group containing multiple datasets that are aligned on the first dimension (Currently, this requirement if left up to APIs to check and enforce). These datasets represent different columns in the table. Apart from a column that contains unique identifiers for each row, there are no other required datasets. Users are free to add any number of custom VectorData objects (columns) here. DynamicTable also supports ragged array columns, where each element can be of a different size. To add a ragged array column, use a VectorIndex type to index the corresponding VectorData type. See documentation for VectorData and VectorIndex for more details. Unlike a compound data type, which is analogous to storing an array-of-structs, a DynamicTable can be thought of as a struct-of-arrays. This provides an alternative structure to choose from when optimizing storage for anticipated access patterns. Additionally, this type provides a way of creating a table without having to define a compound type up front. Although this convenience may be attractive, users should think carefully about how data will be accessed. DynamicTable is more appropriate for column-centric access, whereas a dataset with a compound type would be more appropriate for row-centric access. Finally, data size should also be taken into account. For small tables, performance loss may be an acceptable trade-off for the flexibility of a DynamicTable.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.Container, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
DynamicTableConstructor for DynamicTable 
- -
Property Summary -
- - - - - - - - - - - - - - - - - -
colnames(char) The names of the columns in this table. This should be used to specify an order to the columns. 
description(char) Description of what is in this dynamic table. 
idREQUIRED (ElementIdentifiers) Array of unique identifiers for the rows of this dynamic table. 
vectordata(VectorData) Vector columns, including index columns, of this dynamic table. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addColumn 
-   - - addRow 
-   - - addlistenerAdd listener for event. 
-   - - clear 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - getRow 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - toTable 
-   - - validate_colnames 
-   - - validate_description 
-   - - validate_id 
-   - - validate_vectordata 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+hdmf_common/DynamicTableRegion.html b/doc/+types/+hdmf_common/DynamicTableRegion.html deleted file mode 100644 index 3f2ed8e9..00000000 --- a/doc/+types/+hdmf_common/DynamicTableRegion.html +++ /dev/null @@ -1,216 +0,0 @@ - - - - - - types.hdmf_common.DynamicTableRegion - MATLAB File Help - - - - - - - - - - -
types.hdmf_common.DynamicTableRegion - MATLAB File Help
-
types.hdmf_common.DynamicTableRegion
-
  DynamicTableRegion DynamicTableRegion provides a link from one table to an index or region of another. The `table` attribute is a link to another `DynamicTable`, indicating which table is referenced, and the data is int(s) indicating the row(s) (0-indexed) of the target array. `DynamicTableRegion`s can be used to associate rows with repeated meta-data without data duplication. They can also be used to create hierarchical relationships between multiple `DynamicTable`s. `DynamicTableRegion` objects may be paired with a `VectorIndex` object to create ragged references, so a single cell of a `DynamicTable` can reference many rows of another `DynamicTable`.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.VectorData, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
DynamicTableRegionConstructor for DynamicTableRegion 
- -
Property Summary -
- - - - - - - - - - - - - -
dataREQUIRED any 
description(char) Description of what these vectors represent. 
table(Object Reference to DynamicTable) Reference to the DynamicTable object that this region applies to. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
-   - - validate_description 
-   - - validate_tableReference to type `DynamicTable` 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+hdmf_common/ElementIdentifiers.html b/doc/+types/+hdmf_common/ElementIdentifiers.html deleted file mode 100644 index c40c623a..00000000 --- a/doc/+types/+hdmf_common/ElementIdentifiers.html +++ /dev/null @@ -1,192 +0,0 @@ - - - - - - types.hdmf_common.ElementIdentifiers - MATLAB File Help - - - - - - - - - - -
types.hdmf_common.ElementIdentifiers - MATLAB File Help
-
types.hdmf_common.ElementIdentifiers
-
  ElementIdentifiers A list of unique identifiers for values within a dataset, e.g. rows of a DynamicTable.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.Data, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ElementIdentifiersConstructor for ElementIdentifiers 
- -
Property Summary -
- - - - - -
dataREQUIRED any 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+hdmf_common/SimpleMultiContainer.html b/doc/+types/+hdmf_common/SimpleMultiContainer.html deleted file mode 100644 index c97cebd3..00000000 --- a/doc/+types/+hdmf_common/SimpleMultiContainer.html +++ /dev/null @@ -1,204 +0,0 @@ - - - - - - types.hdmf_common.SimpleMultiContainer - MATLAB File Help - - - - - - - - - - -
types.hdmf_common.SimpleMultiContainer - MATLAB File Help
-
types.hdmf_common.SimpleMultiContainer
-
  SimpleMultiContainer A simple Container for holding onto multiple containers.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.Container, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
SimpleMultiContainerConstructor for SimpleMultiContainer 
- -
Property Summary -
- - - - - - - - - -
container(Container) Container objects held within this SimpleMultiContainer. 
data(Data) Data objects held within this SimpleMultiContainer. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_container 
-   - - validate_data 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+hdmf_common/VectorData.html b/doc/+types/+hdmf_common/VectorData.html deleted file mode 100644 index ec011b76..00000000 --- a/doc/+types/+hdmf_common/VectorData.html +++ /dev/null @@ -1,204 +0,0 @@ - - - - - - types.hdmf_common.VectorData - MATLAB File Help - - - - - - - - - - -
types.hdmf_common.VectorData - MATLAB File Help
-
types.hdmf_common.VectorData
-
  VectorData An n-dimensional dataset representing a column of a DynamicTable. If used without an accompanying VectorIndex, first dimension is along the rows of the DynamicTable and each step along the first dimension is a cell of the larger table. VectorData can also be used to represent a ragged array if paired with a VectorIndex. This allows for storing arrays of varying length in a single cell of the DynamicTable by indexing into this VectorData. The first vector is at VectorData[0:VectorIndex[0]]. The second vector is at VectorData[VectorIndex[0]:VectorIndex[1]], and so on.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.Data, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
VectorDataConstructor for VectorData 
- -
Property Summary -
- - - - - - - - - -
dataREQUIRED any 
description(char) Description of what these vectors represent. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
-   - - validate_description 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+hdmf_common/VectorIndex.html b/doc/+types/+hdmf_common/VectorIndex.html deleted file mode 100644 index edd7d53c..00000000 --- a/doc/+types/+hdmf_common/VectorIndex.html +++ /dev/null @@ -1,216 +0,0 @@ - - - - - - types.hdmf_common.VectorIndex - MATLAB File Help - - - - - - - - - - -
types.hdmf_common.VectorIndex - MATLAB File Help
-
types.hdmf_common.VectorIndex
-
  VectorIndex Used with VectorData to encode a ragged array. An array of indices into the first dimension of the target VectorData, and forming a map between the rows of a DynamicTable and the indices of the VectorData. The name of the VectorIndex is expected to be the name of the target VectorData object followed by "_index".
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.VectorData, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
VectorIndexConstructor for VectorIndex 
- -
Property Summary -
- - - - - - - - - - - - - -
dataREQUIRED any 
description(char) Description of what these vectors represent. 
target(Object Reference to VectorData) Reference to the target dataset that this index applies to. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
-   - - validate_description 
-   - - validate_targetReference to type `VectorData` 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+hdmf_experimental/EnumData.html b/doc/+types/+hdmf_experimental/EnumData.html deleted file mode 100644 index cb7d0569..00000000 --- a/doc/+types/+hdmf_experimental/EnumData.html +++ /dev/null @@ -1,216 +0,0 @@ - - - - - - types.hdmf_experimental.EnumData - MATLAB File Help - - - - - - - - - - -
types.hdmf_experimental.EnumData - MATLAB File Help
-
types.hdmf_experimental.EnumData
-
  EnumData Data that come from a fixed set of values. A data value of i corresponds to the i-th value in the VectorData referenced by the 'elements' attribute.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.VectorData, types.untyped.DatasetClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
EnumDataConstructor for EnumData 
- -
Property Summary -
- - - - - - - - - - - - - -
dataREQUIRED any 
description(char) Description of what these vectors represent. 
elements(Object Reference to VectorData) Reference to the VectorData object that contains the enumerable elements 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_data 
-   - - validate_description 
-   - - validate_elementsReference to type `VectorData` 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+hdmf_experimental/ExternalResources.html b/doc/+types/+hdmf_experimental/ExternalResources.html deleted file mode 100644 index 9e122751..00000000 --- a/doc/+types/+hdmf_experimental/ExternalResources.html +++ /dev/null @@ -1,240 +0,0 @@ - - - - - - types.hdmf_experimental.ExternalResources - MATLAB File Help - - - - - - - - - - -
types.hdmf_experimental.ExternalResources - MATLAB File Help
-
types.hdmf_experimental.ExternalResources
-
  ExternalResources A set of four tables for tracking external resource references in a file. NOTE: this data type is in beta testing and is subject to change in a later version.
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.hdmf_common.Container, types.untyped.GroupClass
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ExternalResourcesConstructor for ExternalResources 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - -
entitiesREQUIRED (Data) A table for mapping user terms (i.e., keys) to resource entities. 
keysREQUIRED (Data) A table for storing user terms that are used to refer to external resources. 
object_keysREQUIRED (Data) A table for identifying which objects use which keys. 
objectsREQUIRED (Data) A table for identifying which objects in a file contain references to external resources. 
resourcesREQUIRED (Data) A table for mapping user terms (i.e., keys) to resource entities. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - export 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - validate_entities 
-   - - validate_keys 
-   - - validate_object_keys 
-   - - validate_objects 
-   - - validate_resources 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+untyped/+datapipe/+dynamic/Filter.html b/doc/+types/+untyped/+datapipe/+dynamic/Filter.html deleted file mode 100644 index 2bf6ae54..00000000 --- a/doc/+types/+untyped/+datapipe/+dynamic/Filter.html +++ /dev/null @@ -1,1326 +0,0 @@ - - - - - - types.untyped.datapipe.dynamic.Filter - MATLAB File Help - - - - - - - - - - -
types.untyped.datapipe.dynamic.Filter - MATLAB File Help
-
types.untyped.datapipe.dynamic.Filter
-
 Filter Compression filter registered to HDF5
-  as defined by (https://portal.hdfgroup.org/display/support/Filters)
-  Submit an issue if we're missing one you wish to use!
- -
Class Details
- - - - - - - - - - - - - -
Superclassesuint64
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
FilterCompression filter registered to HDF5 
- -
Enumeration Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
APAX 
B3D 
BLOSC 
BZIP2 
BitGroom 
BitShuffle 
CBF 
CCSDS_123 
FAPEC 
FCIDECOMP 
FPZip 
GBRGranular BitRound 
JPEG 
JPEG_LS 
JPEG_XR 
LPC_Rice 
LZ4 
LZF 
LZO 
MAFISC 
SPDP 
SZ 
SZ3 
Snappy 
VBZ 
ZFP 
ZStandard 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - abs 
-   - - accumarray 
-   - - all 
-   - - and 
-   - - any 
-   - - balance 
-   - - bandwidth 
-   - - bitand 
-   - - bitcmp 
-   - - bitget 
-   - - bitor 
-   - - bitset 
-   - - bitshift 
-   - - bitxor 
-   - - bsxfun 
-   - - cat 
-   - - ceil 
-   - - cell 
-   - - cellstr 
-   - - char 
-   - - chol 
-   - - cholupdate 
-   - - complex 
-   - - conj 
-   - - conv2 
-   - - ctranspose 
-   - - cummax 
-   - - cummin 
-   - - cumprod 
-   - - cumsum 
-   - - diag 
-   - - diff 
-   - - double 
-   - - eig 
-   - - end 
-   - - eq 
-   - - fft 
-   - - fftn 
-   - - filter 
-   - - find 
-   - - fix 
-   - - floor 
-   - - full 
-   - - function_handle 
-   - - ge 
-   - - gt 
-   - - horzcat 
-   - - ifft 
-   - - ifftn 
-   - - imag 
-   - - int16 
-   - - int32 
-   - - int64 
-   - - int8 
-   - - intersect 
-   - - isempty 
-   - - isequal 
-   - - isequaln 
-   - - isequalwithequalnans 
-   - - isfinite 
-   - - isfloat 
-   - - isinf 
-   - - isinteger 
-   - - islogical 
-   - - ismember 
-   - - isnan 
-   - - isnumeric 
-   - - isreal 
-   - - isscalar 
-   - - issorted 
-   - - issortedrows 
-   - - issparse 
-   - - isvector 
-   - - ldivide 
-   - - ldl 
-   - - le 
-   - - length 
-   - - linsolve 
-   - - logical 
-   - - lt 
-   - - lu 
-   - - max 
-   - - maxk 
-   - - min 
-   - - mink 
-   - - minus 
-   - - mldivide 
-   - - mod 
-   - - mpower 
-   - - mrdivide 
-   - - mtimes 
-   - - ndims 
-   - - ne 
-   - - nnz 
-   - - nonzeros 
-   - - norm 
-   - - not 
-   - - numel 
-   - - nzmax 
-   - - or 
-   - - ordqz 
-   - - ordschur 
-   - - permute 
-   - - plot 
-   - - plus 
-   - - power 
-   - - prod 
-   - - qr 
-   - - qz 
-   - - rdivide 
-   - - real 
-   - - rem 
-   - - reshape 
-   - - round 
-   - - schur 
-   - - setdiff 
-   - - setxor 
-   - - sign 
-   - - single 
-   - - size 
-   - - sort 
-   - - sortrowsc 
-   - - strcmp 
-   - - strcmpi 
-   - - string 
-   - - strncmp 
-   - - strncmpi 
-   - - subsasgn 
-   - - subsindex 
-   - - subsref 
-   - - sum 
-   - - svd 
-   - - times 
-   - - transpose 
-   - - tril 
-   - - triu 
-   - - uint16 
-   - - uint32 
-   - - uint8 
-   - - uminus 
-   - - union 
-   - - uplus 
-   - - vecnorm 
-   - - vertcat 
-   - - xor 
- - \ No newline at end of file diff --git a/doc/+types/+untyped/+datapipe/+properties/DynamicFilter.html b/doc/+types/+untyped/+datapipe/+properties/DynamicFilter.html deleted file mode 100644 index bec8e035..00000000 --- a/doc/+types/+untyped/+datapipe/+properties/DynamicFilter.html +++ /dev/null @@ -1,213 +0,0 @@ - - - - - - types.untyped.datapipe.properties.DynamicFilter - MATLAB File Help - - - - - - - - - - -
types.untyped.datapipe.properties.DynamicFilter - MATLAB File Help
-
types.untyped.datapipe.properties.DynamicFilter
-
types.untyped.datapipe.properties.DynamicFilter is a class.
-    obj = DynamicFilter(filter, parameters)
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.untyped.datapipe.Property
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
DynamicFilter 
- -
Property Summary -
- - - - - - - - - -
dynamicFilter 
parameters 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addTo 
-   - - addlistenerAdd listener for event. 
Sealed -   - - catConcatenation for heterogeneous arrays 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - horzcatHorizontal concatenation for heterogeneous arrays 
-   - - isInDcpl 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
Sealed -   - - vertcatVertical concatenation for heterogeneous arrays 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/+types/+untyped/+datapipe/+properties/Shuffle.html b/doc/+types/+untyped/+datapipe/+properties/Shuffle.html deleted file mode 100644 index 5faa95da..00000000 --- a/doc/+types/+untyped/+datapipe/+properties/Shuffle.html +++ /dev/null @@ -1,208 +0,0 @@ - - - - - - types.untyped.datapipe.properties.Shuffle - MATLAB File Help - - - - - - - - - - -
types.untyped.datapipe.properties.Shuffle - MATLAB File Help
-
types.untyped.datapipe.properties.Shuffle
-
 Shuffle Shuffle Filter
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.untyped.datapipe.Property
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
ShuffleShuffle Filter 
- -
Property Summary -
- - - - - -
FILTER_NAME 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addTo 
-   - - addlistenerAdd listener for event. 
Sealed -   - - catConcatenation for heterogeneous arrays 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - horzcatHorizontal concatenation for heterogeneous arrays 
Static -   - - isInDcpl 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
Sealed -   - - vertcatVertical concatenation for heterogeneous arrays 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/NwbFile.html b/doc/NwbFile.html deleted file mode 100644 index e793d6fd..00000000 --- a/doc/NwbFile.html +++ /dev/null @@ -1,829 +0,0 @@ - - - - - - NwbFile - MATLAB File Help - - - - - - - - - - -
NwbFile - MATLAB File Help
-
NwbFile
-
  NwbFile Root object representing data read from an NWB file.
- 
-  Requires that core and extension NWB types have been generated
-  and reside in a 'types' package on the matlab path.
- 
-  Example. Construct an object from scratch for export:
-     nwb = NwbFile;
-     nwb.epochs = types.core.Epochs;
-     nwbExport(nwb, 'epoch.nwb');
See also
- -
Class Details
- - - - - - - - - - - - - -
Superclassestypes.core.NWBFile
Sealedfalse
Construct on loadfalse
- -
Constructor Summary -
- - - - - -
NwbFileRoot object representing data read from an NWB file. 
- -
Property Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
acquisition(DynamicTable|NWBDataInterface) Tabular data that is relevent to acquisition | Acquired, raw data. 
analysis(DynamicTable|NWBContainer) Tabular data that is relevent to data stored in analysis | Custom analysis results. 
file_create_dateREQUIRED (datetime) A record of the date the file was created and of subsequent modifications. The date is stored in UTC with - local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" - with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this - may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array.  -
general(LabMetaData) Place-holder than can be extended so that lab-specific meta-data can be placed in /general. 
general_data_collection(char) Notes about data collection and analysis. 
general_devices(Device) Data acquisition devices. 
general_experiment_description(char) General description of the experiment. 
general_experimenter(char) Name of person(s) who performed the experiment. Can also specify roles of different people involved. 
general_extracellular_ephys(ElectrodeGroup) Physical group of electrodes. 
general_extracellular_ephys_electrodes(DynamicTable) A table of all electrodes (i.e. channels) used for recording. 
general_institution(char) Institution(s) where experiment was performed. 
general_intracellular_ephys(IntracellularElectrode) An intracellular electrode. 
general_intracellular_ephys_experimental_conditions(ExperimentalConditionsTable) A table for grouping different intracellular recording repetitions together that belong to the - same experimental experimental_conditions.  -
general_intracellular_ephys_filtering(char) [DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and - parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute - for each TimeSeries.  -
general_intracellular_ephys_intracellular_recordings(IntracellularRecordingsTable) A table to group together a stimulus and response from a single electrode and a single simultaneous - recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. - In some cases, however, only a stimulus or a response are recorded as as part of an experiment. In this case both, the stimulus - and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, - indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at - least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels - that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries - may be used.  -
general_intracellular_ephys_repetitions(RepetitionsTable) A table for grouping different sequential intracellular recordings together. With each SequentialRecording - typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli - applied in sequence.  -
general_intracellular_ephys_sequential_recordings(SequentialRecordingsTable) A table for grouping different sequential recordings from the SimultaneousRecordingsTable table - together. This is typically used to group together sequential recordings where the a sequence of stimuli of the same type - with varying parameters have been presented in a sequence.  -
general_intracellular_ephys_simultaneous_recordings(SimultaneousRecordingsTable) A table for grouping different intracellular recordings from the IntracellularRecordingsTable - table together that were recorded simultaneously from different electrodes  -
general_intracellular_ephys_sweep_table(SweepTable) [DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable - and SimultaneousRecordingsTable tabels. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions - tables provide enhanced support for experiment metadata.  -
general_keywords(char) Terms to search over. 
general_lab(char) Laboratory where experiment was performed. 
general_notes(char) Notes about the experiment. 
general_optogenetics(OptogeneticStimulusSite) An optogenetic stimulation site. 
general_optophysiology(ImagingPlane) An imaging plane. 
general_pharmacology(char) Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus - dosage, concentration, etc.  -
general_protocol(char) Experimental protocol, if applicable. e.g., include IACUC protocol number. 
general_related_publications(char) Publication information. PMID, DOI, URL, etc. 
general_session_id(char) Lab-specific ID for the session. 
general_slices(char) Description of slices, including information about preparation thickness, orientation, temperature, and bath solution. 
general_source_script(char) Script file or link to public source code used to create this NWB file. 
general_source_script_file_name(char) Name of script file. 
general_stimulus(char) Notes about stimuli, such as how and where they were presented. 
general_subject(Subject) Information about the animal or person from which the data was measured. 
general_surgery(char) Narrative description about surgery/surgeries, including date(s) and who performed surgery. 
general_virus(char) Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, - etc.  -
identifierREQUIRED (char) A unique text identifier for the file. For example, concatenated lab name, file creation date/time and experimentalist, - or a hash of these and/or other values. The goal is that the string should be unique to all other files.  -
intervals(TimeIntervals) Optional additional table(s) for describing other experimental time intervals. 
intervals_epochs(TimeIntervals) Divisions in time marking experimental stages or sub-divisions of a single recording session. 
intervals_invalid_times(TimeIntervals) Time intervals that should be removed from analysis. 
intervals_trials(TimeIntervals) Repeated experimental events that have a logical grouping. 
nwb_version(char) File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, - minor and patch numbers.  -
processing(ProcessingModule) Intermediate analysis of acquired data. 
scratch(DynamicTable|NWBContainer|ScratchData) Any one-off tables | Any one-off containers | Any one-off datasets 
session_descriptionREQUIRED (char) A description of the experimental session and data in the file. 
session_start_timeREQUIRED (datetime) Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as - ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with no timezone offset. - Date accuracy is up to milliseconds.  -
stimulus_presentation(TimeSeries) TimeSeries objects containing data of presented stimuli. 
stimulus_templates(Images|TimeSeries) Images objects containing images of presented stimuli. | TimeSeries objects containing template data of - presented stimuli.  -
timestamps_reference_timeREQUIRED (datetime) Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone - offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in "Z" with no timezone - offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero).  -
units(Units) Data about sorted spike units. 
- -
Method Summary -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-   - - addlistenerAdd listener for event. 
-   - - deleteDelete a handle object. 
-   - - eq== (EQ) Test handle equality. 
-   - - exportadd to file create date 
-   - - findobjFind objects matching specified conditions. 
-   - - findpropFind property of MATLAB handle object. 
-   - - ge>= (GE) Greater than or equal relation for handles. 
-   - - gt> (GT) Greater than relation for handles. 
Sealed -   - - isvalidTest handle validity. 
-   - - le<= (LE) Less than or equal relation for handles. 
-   - - listenerAdd listener for event without binding the listener to the source object. 
-   - - loadAll 
-   - - lt< (LT) Less than relation for handles. 
-   - - ne~= (NE) Not equal relation for handles. 
-   - - notifyNotify listeners of event. 
-   - - resolve 
-   - - searchForSearches this NwbFile object for a given typename 
-   - - validate_acquisition 
-   - - validate_analysis 
-   - - validate_file_create_date 
-   - - validate_general 
-   - - validate_general_data_collection 
-   - - validate_general_devices 
-   - - validate_general_experiment_description 
-   - - validate_general_experimenter 
-   - - validate_general_extracellular_ephys 
-   - - validate_general_extracellular_ephys_electrodes 
-   - - validate_general_institution 
-   - - validate_general_intracellular_ephys 
-   - - validate_general_intracellular_ephys_experimental_conditions 
-   - - validate_general_intracellular_ephys_filtering 
-   - - validate_general_intracellular_ephys_intracellular_recordings 
-   - - validate_general_intracellular_ephys_repetitions 
-   - - validate_general_intracellular_ephys_sequential_recordings 
-   - - validate_general_intracellular_ephys_simultaneous_recordings 
-   - - validate_general_intracellular_ephys_sweep_table 
-   - - validate_general_keywords 
-   - - validate_general_lab 
-   - - validate_general_notes 
-   - - validate_general_optogenetics 
-   - - validate_general_optophysiology 
-   - - validate_general_pharmacology 
-   - - validate_general_protocol 
-   - - validate_general_related_publications 
-   - - validate_general_session_id 
-   - - validate_general_slices 
-   - - validate_general_source_script 
-   - - validate_general_source_script_file_name 
-   - - validate_general_stimulus 
-   - - validate_general_subject 
-   - - validate_general_surgery 
-   - - validate_general_virus 
-   - - validate_identifier 
-   - - validate_intervals 
-   - - validate_intervals_epochs 
-   - - validate_intervals_invalid_times 
-   - - validate_intervals_trials 
-   - - validate_processing 
-   - - validate_scratch 
-   - - validate_session_description 
-   - - validate_session_start_time 
-   - - validate_stimulus_presentation 
-   - - validate_stimulus_templates 
-   - - validate_timestamps_reference_time 
-   - - validate_units 
- -
Event Summary -
-
- - \ No newline at end of file diff --git a/doc/alpha.png b/doc/alpha.png deleted file mode 100644 index c73de7b0..00000000 Binary files a/doc/alpha.png and /dev/null differ diff --git a/doc/c++.png b/doc/c++.png deleted file mode 100644 index 24f56e62..00000000 Binary files a/doc/c++.png and /dev/null differ diff --git a/doc/c.png b/doc/c.png deleted file mode 100644 index c39fbf0e..00000000 Binary files a/doc/c.png and /dev/null differ diff --git a/doc/demoicon.gif b/doc/demoicon.gif deleted file mode 100644 index c89e7acb..00000000 Binary files a/doc/demoicon.gif and /dev/null differ diff --git a/doc/down.png b/doc/down.png deleted file mode 100644 index d41104a2..00000000 Binary files a/doc/down.png and /dev/null differ diff --git a/doc/doxysearch.php b/doc/doxysearch.php deleted file mode 100644 index 36112a42..00000000 --- a/doc/doxysearch.php +++ /dev/null @@ -1,329 +0,0 @@ -$word, - "match"=>$w, - "index"=>$statIdx, - "full"=>strlen($w)==strlen($word), - "docs"=>array() - ); - } - $w = readString($file); - } - $totalFreq=0; - for ($count=$start;$count$idx,"freq"=>$freq,"rank"=>0.0); - $totalFreq+=$freq; - if ($statInfo["full"]) $totalfreq+=$freq; - } - // read name an url info for the doc - for ($i=0;$i<$numDocs;$i++) - { - fseek($file,$docInfo[$i]["idx"]); - $docInfo[$i]["name"]=readString($file); - $docInfo[$i]["url"]=readString($file); - } - $statInfo["docs"]=$docInfo; - } - for ($count=$start;$count$key, - "name"=>$di["name"], - "rank"=>$rank - ); - } - $docs[$key]["words"][] = array( - "word"=>$wordInfo["word"], - "match"=>$wordInfo["match"], - "freq"=>$di["freq"] - ); - } - } - return $docs; -} - -function normalize_ranking(&$docs) -{ - $maxRank = 0.0000001; - // compute maximal rank - foreach ($docs as $doc) - { - if ($doc["rank"]>$maxRank) - { - $maxRank=$doc["rank"]; - } - } - reset($docs); - // normalize rankings - while (list ($key, $val) = each ($docs)) - { - $docs[$key]["rank"]*=100/$maxRank; - } -} - -function filter_results($docs,&$requiredWords,&$forbiddenWords) -{ - $filteredDocs=array(); - while (list ($key, $val) = each ($docs)) - { - $words = &$docs[$key]["words"]; - $copy=1; // copy entry by default - if (sizeof($requiredWords)>0) - { - foreach ($requiredWords as $reqWord) - { - $found=0; - foreach ($words as $wordInfo) - { - $found = $wordInfo["word"]==$reqWord; - if ($found) break; - } - if (!$found) - { - $copy=0; // document contains none of the required words - break; - } - } - } - if (sizeof($forbiddenWords)>0) - { - foreach ($words as $wordInfo) - { - if (in_array($wordInfo["word"],$forbiddenWords)) - { - $copy=0; // document contains a forbidden word - break; - } - } - } - if ($copy) $filteredDocs[$key]=$docs[$key]; - } - return $filteredDocs; -} - -function compare_rank($a,$b) -{ - return ($a["rank"]>$b["rank"]) ? -1 : 1; -} - -function sort_results($docs,&$sorted) -{ - $sorted = $docs; - usort($sorted,"compare_rank"); - return $sorted; -} - -function report_results(&$docs) -{ - echo "\n"; - echo " \n"; - echo " \n"; - echo " \n"; - $numDocs = sizeof($docs); - if ($numDocs==0) - { - echo " \n"; - echo " \n"; - echo " \n"; - } - else - { - echo " \n"; - echo " \n"; - echo " \n"; - $num=1; - foreach ($docs as $doc) - { - echo " \n"; - echo " "; - echo "\n"; - echo " \n"; - echo " \n"; - echo " \n"; - $num++; - } - } - echo "

Search Results

".matches_text(0)."
".matches_text($numDocs); - echo "\n"; - echo "
$num.".$doc["name"]."
Matches: "; - foreach ($doc["words"] as $wordInfo) - { - $word = $wordInfo["word"]; - $matchRight = substr($wordInfo["match"],strlen($word)); - echo "$word$matchRight(".$wordInfo["freq"].") "; - } - echo "
\n"; -} - -function matches_text($num) -{ - if ($num==0) - { - return 'Sorry, no documents matching your query.'; - } - else if ($num==1) - { - return 'Found 1 document matching your query.'; - } - else // $num>1 - { - return 'Found '.$num.' documents matching your query. Showing best matches first.'; - } -} - -function main($idxfile) -{ - if(strcmp('4.1.0', phpversion()) > 0) - { - die("Error: PHP version 4.1.0 or above required!"); - } - if (!($file=fopen($idxfile,"rb"))) - { - die("Error: Search index file could NOT be opened!"); - } - if (readHeader($file)!="DOXS") - { - die("Error: Header of index file is invalid!"); - } - $query=""; - if (array_key_exists("query", $_GET)) - { - $query=$_GET["query"]; - } - $results = array(); - $requiredWords = array(); - $forbiddenWords = array(); - $foundWords = array(); - $word=strtolower(strtok($query," ")); - while ($word) // for each word in the search query - { - if (($word{0}=='+')) { $word=substr($word,1); $requiredWords[]=$word; } - if (($word{0}=='-')) { $word=substr($word,1); $forbiddenWords[]=$word; } - if (!in_array($word,$foundWords)) - { - $foundWords[]=$word; - search($file,$word,$results); - } - $word=strtolower(strtok(" ")); - } - $docs = array(); - combine_results($results,$docs); - // filter out documents with forbidden word or that do not contain - // required words - $filteredDocs = filter_results($docs,$requiredWords,$forbiddenWords); - // normalize rankings so they are in the range [0-100] - normalize_ranking($filteredDocs); - // sort the results based on rank - $sorted = array(); - sort_results($filteredDocs,$sorted); - // report results to the user - report_results($sorted); - fclose($file); -} - -?> diff --git a/doc/fortran.png b/doc/fortran.png deleted file mode 100644 index 350c572e..00000000 Binary files a/doc/fortran.png and /dev/null differ diff --git a/doc/generateCore.html b/doc/generateCore.html deleted file mode 100644 index aefc5dd1..00000000 --- a/doc/generateCore.html +++ /dev/null @@ -1,99 +0,0 @@ - - - - Description of generateCore - - - - - - - - - -
Home > . > generateCore.m
- - - -

generateCore -

- -

PURPOSE ^

-
GENERATECORE Generate Matlab classes from NWB core schema files
- -

SYNOPSIS ^

-
function generateCore(varargin)
- -

DESCRIPTION ^

-
 GENERATECORE Generate Matlab classes from NWB core schema files
-   GENERATECORE()  Generate classes (Matlab m-files) from the
-   NWB:N core namespace file.
-
-   GENERATECORE(core_or_extension_paths,...)  Generate classes for the
-   core namespace as well as one or more extenstions.  Each input filename
-   should be an NWB namespace file.
-
-   A cache of schema data is generated in the 'namespaces' subdirectory in
-   the current working directory.  This is for allowing cross-referencing
-   classes between multiple namespaces.
-
-   Output files are generated placed in a '+types' subdirectory in the
-   current working directory.
-
-   Example:
-      generateCore();
-      generateCore('schema/core/nwb.namespace.yaml');
-      generateCore('schema/my_ext/myext.namespace.yaml');
-
-   See also GENERATEEXTENSION
- - -

CROSS-REFERENCE INFORMATION ^

-This function calls: -
    -
-This function is called by: -
    -
- - - - -

SOURCE CODE ^

-
0001 function generateCore(varargin)
-0002 % GENERATECORE Generate Matlab classes from NWB core schema files
-0003 %   GENERATECORE()  Generate classes (Matlab m-files) from the
-0004 %   NWB:N core namespace file.
-0005 %
-0006 %   GENERATECORE(core_or_extension_paths,...)  Generate classes for the
-0007 %   core namespace as well as one or more extenstions.  Each input filename
-0008 %   should be an NWB namespace file.
-0009 %
-0010 %   A cache of schema data is generated in the 'namespaces' subdirectory in
-0011 %   the current working directory.  This is for allowing cross-referencing
-0012 %   classes between multiple namespaces.
-0013 %
-0014 %   Output files are generated placed in a '+types' subdirectory in the
-0015 %   current working directory.
-0016 %
-0017 %   Example:
-0018 %      generateCore();
-0019 %      generateCore('schema/core/nwb.namespace.yaml');
-0020 %      generateCore('schema/my_ext/myext.namespace.yaml');
-0021 %
-0022 %   See also GENERATEEXTENSION
-0023 if nargin == 0
-0024     [nwbLocation, ~, ~] = fileparts(mfilename('fullpath'));
-0025     namespacePath = fullfile(nwbLocation, 'schema', 'core', 'nwb.namespace.yaml');
-0026     generateExtension(namespacePath);
-0027 else
-0028     for i=1:length(varargin)
-0029         generateExtension(varargin{i});
-0030     end
-0031 end
-0032 end
-
Generated on Fri 09-Aug-2019 14:27:49 by m2html © 2005
- - \ No newline at end of file diff --git a/doc/generateExtension.html b/doc/generateExtension.html deleted file mode 100644 index c88cad81..00000000 --- a/doc/generateExtension.html +++ /dev/null @@ -1,97 +0,0 @@ - - - - Description of generateExtension - - - - - - - - - -
Home > . > generateExtension.m
- - - -

generateExtension -

- -

PURPOSE ^

-
GENERATEEXTENSION Generate Matlab classes from NWB extension schema file
- -

SYNOPSIS ^

-
function generateExtension(source)
- -

DESCRIPTION ^

-
 GENERATEEXTENSION Generate Matlab classes from NWB extension schema file
-   GENERATECORE(extension_path...)  Generate classes 
-   (Matlab m-files) from one or more NWB:N schema extension namespace 
-   files.  A registry of already generated core types is used to resolve 
-   dependent types.
-   
-   A cache of schema data is generated in the 'namespaces' subdirectory in
-   the current working directory.  This is for allowing cross-referencing
-   classes between multiple namespaces.
-
-   Output files are generated placed in a '+types' subdirectory in the
-   current working directory.
-   
-   Example:
-      generateCore('schema\core\nwb.namespace.yaml');
-      generateExtension('schema\myext\myextension.namespace.yaml')
- 
-   See also GENERATECORE
- - -

CROSS-REFERENCE INFORMATION ^

-This function calls: -
    -
-This function is called by: -
    -
- - - - -

SOURCE CODE ^

-
0001 function generateExtension(source)
-0002 % GENERATEEXTENSION Generate Matlab classes from NWB extension schema file
-0003 %   GENERATECORE(extension_path...)  Generate classes
-0004 %   (Matlab m-files) from one or more NWB:N schema extension namespace
-0005 %   files.  A registry of already generated core types is used to resolve
-0006 %   dependent types.
-0007 %
-0008 %   A cache of schema data is generated in the 'namespaces' subdirectory in
-0009 %   the current working directory.  This is for allowing cross-referencing
-0010 %   classes between multiple namespaces.
-0011 %
-0012 %   Output files are generated placed in a '+types' subdirectory in the
-0013 %   current working directory.
-0014 %
-0015 %   Example:
-0016 %      generateCore('schema\core\nwb.namespace.yaml');
-0017 %      generateExtension('schema\myext\myextension.namespace.yaml')
-0018 %
-0019 %   See also GENERATECORE
-0020 validateattributes(source, {'char', 'string'}, {'scalartext'});
-0021 
-0022 %find jar from source and generate Schema
-0023 schema = spec.loadSchema();
-0024 
-0025 [localpath, ~, ~] = fileparts(source);
-0026 assert(2 == exist(source, 'file'),...
-0027     'MATNWB:FILE', 'Path to file `%s` could not be found.', source);
-0028 fid = fopen(source);
-0029 namespace_map = schema.read(fread(fid, '*char') .');
-0030 fclose(fid);
-0031 
-0032 spec.generate(namespace_map, localpath);
-0033 end
-
Generated on Fri 09-Aug-2019 14:27:49 by m2html © 2005
- - \ No newline at end of file diff --git a/doc/helpwin.css b/doc/helpwin.css deleted file mode 100644 index 53f47d6c..00000000 --- a/doc/helpwin.css +++ /dev/null @@ -1,149 +0,0 @@ -body { - font-family: Helvetica, Arial, sans-serif; - font-size: 14px; -} - -a { - color:#000099; -} - -a:visited { - color:#840084; -} - -a:active { - color:#000099; -} - -a:hover { - color:#000033; -} - -hr { - border-style: solid; - border-width: 5px 0px 0px 0px; - height: 4px; - width: 100%; - color: #0080ff; -} - -p { - margin-left: 5px; -} - -td { - font-family: Helvetica, Arial, sans-serif; - font-size: 12px; -} - -.title { - color:#990000; - font-size:24px; - padding:2px 0px 2px 0px; - margin:10px 0px 0px 5px; - vertical-align: middle; -} - -.sectiontitle { - color:#990000; - font-size:16px; - font-weight: bold; - width:100%; - padding:2px 0px 2px 5px; - margin: 15px 0px 0px 0px; -} - -.headertitle { - font-weight:bold; - padding: 3px 5px 3px 5px; -} - -.helptopic { - font-weight:bold; - color:#990000; -} - -.helptext { - margin-left: 5px; - font-family: monospace; - white-space: pre; - font-size: 13px; -} - -.subheader { - background-color: #E7EBF7; - width: 100%; -} - -.subheader-left { - padding: 3px 5px 3px 5px; -} - -.subheader-right { - text-align: right; - padding: 3px 5px 3px 5px; -} - -.footerlinktitle { - margin: 15px 0px 0px 5px; - color:#990000; - font-size: 16px; -} - -.footerlink { - padding: 3px 5px 3px 15px; -} - -.topiclinks { - margin: 5px 0px 0px 5px; -} - -a.topiclink { - padding-right: 10px; -} - -.class-details { - border-width: 0px 0px 0px 0px; - margin: 0px 0px 0px 5px; -} - -.class-detail-label { - font-weight: bold; - padding: 0px 10px 0px 0px; - spacing: 0px 0px 0px 0px; -} -.name { - color:#663d00; - font-weight: bold; -} -.summary-list .m-help { - white-space: nowrap; -} - -.summary-list { - margin: 0px 15px 0px 15px; -} - -.summary-list .summary-item .name { - border-width: 0px 0px 1px 0px; - border-style: solid; - border-color:#E7EBF7; - padding: 2px 15px 2px 5px; -} - -.summary-list .summary-item .m-help { - border-width: 0px 0px 1px 0px; - border-style: solid; - border-color:#E7EBF7; - padding: 2px 5px 2px 5px; - width: 95%; - white-space: normal; -} - -.summary-list .summary-item .attributes { - border-width: 0px 0px 1px 0px; - border-style: solid; - border-color:#E7EBF7; - padding: 2px 15px 2px 5px; - white-space: nowrap; -} diff --git a/doc/hp.png b/doc/hp.png deleted file mode 100644 index d09f988f..00000000 Binary files a/doc/hp.png and /dev/null differ diff --git a/doc/index.html b/doc/index.html deleted file mode 100644 index c0f21dcd..00000000 --- a/doc/index.html +++ /dev/null @@ -1,130 +0,0 @@ - - - - Index for Directory . - - - - - - - - - - -
< Master indexIndex for MatNWB >
- -

Index for MatNWB

- -

Matlab files in this directory:

- -
 NwbFileNWBFILE NWB:N file object representation
 generateCoreGENERATECORE Generate Matlab classes from NWB core schema files
 generateExtensionGENERATEEXTENSION Generate Matlab classes from NWB extension schema file
 nwbExportNWBEXPORT Writes an NWB file.
 nwbReadNWBREAD Reads an NWB file.
- - - - -

Class Directories:

-

Core:

- -

HDMF Common:

- -

HDMF Experimental:

- -

DataPipe Properties:

- -

DataPipe:

- -
Generated on Thu 27-May-2021 14:13:15 by m2html © 2005
- - diff --git a/doc/left.png b/doc/left.png deleted file mode 100644 index 404df045..00000000 Binary files a/doc/left.png and /dev/null differ diff --git a/doc/linux.png b/doc/linux.png deleted file mode 100644 index 42c0c328..00000000 Binary files a/doc/linux.png and /dev/null differ diff --git a/doc/m2html.css b/doc/m2html.css deleted file mode 100644 index 030a84b5..00000000 --- a/doc/m2html.css +++ /dev/null @@ -1,90 +0,0 @@ -body { - background: white; - color: black; - font-family: arial,sans-serif; - margin: 0; - padding: 1ex; -} - -div.fragment { - width: 98%; - border: 1px solid #CCCCCC; - background-color: #f5f5f5; - padding-left: 4px; - margin: 4px; -} - -div.box { - width: 98%; - background-color: #f5f5f5; - border: 1px solid #CCCCCC; - color: black; - padding: 4px; -} - -.comment { - color: #228B22; -} -.string { - color: #B20000; -} -.keyword { - color: #0000FF; -} - -.keywordtype { color: #604020; } -.keywordflow { color: #e08000; } -.preprocessor { color: #806020; } -.stringliteral { color: #002080; } -.charliteral { color: #008080; } - -a { - text-decoration: none; -} - -a:hover { - background-color: #006699; - color:#FFFFFF; -} - -a.code { - font-weight: normal; - color: #A020F0; -} - -a.code:hover { - background-color: #FF0000; - color: #FFFFFF; -} - -h1 { - background: transparent; - color: #006699; - font-size: x-large; - text-align: center; -} - -h2 { - background: transparent; - color: #006699; - font-size: large; -} - -address { - font-size:small; -} - -form.search { - margin-bottom: 0px; - margin-top: 0px; -} -input.search { - font-size: 75%; - color: #000080; - font-weight: normal; - background-color: #eeeeff; -} - -li { - padding-left:5px; -} \ No newline at end of file diff --git a/doc/matlabicon.gif b/doc/matlabicon.gif deleted file mode 100644 index 7c2b5e6e..00000000 Binary files a/doc/matlabicon.gif and /dev/null differ diff --git a/doc/mex.png b/doc/mex.png deleted file mode 100644 index 396f1bc9..00000000 Binary files a/doc/mex.png and /dev/null differ diff --git a/doc/nwbExport.html b/doc/nwbExport.html deleted file mode 100644 index 35812094..00000000 --- a/doc/nwbExport.html +++ /dev/null @@ -1,98 +0,0 @@ - - - - Description of nwbExport - - - - - - - - - -
Home > . > nwbExport.m
- - - -

nwbExport -

- -

PURPOSE ^

-
NWBEXPORT Writes an NWB file.
- -

SYNOPSIS ^

-
function nwbExport(nwb, filenames)
- -

DESCRIPTION ^

-
NWBEXPORT Writes an NWB file.
-  nwbRead(nwb,filename) Writes the nwb object to a file at filename.
-
-  Example:
-    % Generate Matlab code for the NWB objects from the core schema.
-    % This only needs to be done once.
-    generateCore('schema\core\nwb.namespace.yaml');
-    % Create some fake fata and write
-    nwb = NwbFile;
-    nwb.session_start_time = datetime('now');
-    nwb.identifier = 'EMPTY';
-    nwb.session_description = 'empty test file';
-    nwbExport(nwb, 'empty.nwb');
-
-  See also GENERATECORE, GENERATEEXTENSION, NWBFILE, NWBREAD
- - -

CROSS-REFERENCE INFORMATION ^

-This function calls: -
    -
-This function is called by: -
    -
- - - - -

SOURCE CODE ^

-
0001 function nwbExport(nwb, filenames)
-0002 %NWBEXPORT Writes an NWB file.
-0003 %  nwbRead(nwb,filename) Writes the nwb object to a file at filename.
-0004 %
-0005 %  Example:
-0006 %    % Generate Matlab code for the NWB objects from the core schema.
-0007 %    % This only needs to be done once.
-0008 %    generateCore('schema\core\nwb.namespace.yaml');
-0009 %    % Create some fake fata and write
-0010 %    nwb = NwbFile;
-0011 %    nwb.session_start_time = datetime('now');
-0012 %    nwb.identifier = 'EMPTY';
-0013 %    nwb.session_description = 'empty test file';
-0014 %    nwbExport(nwb, 'empty.nwb');
-0015 %
-0016 %  See also GENERATECORE, GENERATEEXTENSION, NWBFILE, NWBREAD
-0017 validateattributes(nwb, {'NwbFile'}, {'nonempty'});
-0018 validateattributes(filenames, {'cell', 'string', 'char'}, {'nonempty'});
-0019 if iscell(filenames)
-0020     assert(iscellstr(filenames), 'filename cell array must consist of strings');
-0021 end
-0022 if ~isscalar(nwb)
-0023     assert(~ischar(filenames) && length(filenames) == length(nwb), ...
-0024         'NwbFile and filename array dimensions must match.');
-0025 end
-0026 
-0027 for i=1:length(nwb)
-0028     if iscellstr(filenames)
-0029         filename = filenames{i};
-0030     elseif isstring(filenames)
-0031         filename = filenames(i);
-0032     else
-0033         filename = filenames;
-0034     end
-0035     export(nwb(i), filename);
-0036 end
-0037 end
-
Generated on Fri 09-Aug-2019 14:27:49 by m2html © 2005
- - \ No newline at end of file diff --git a/doc/nwbRead.html b/doc/nwbRead.html deleted file mode 100644 index 170a7454..00000000 --- a/doc/nwbRead.html +++ /dev/null @@ -1,156 +0,0 @@ - - - - Description of nwbRead - - - - - - - - - -
Home > . > nwbRead.m
- - - -

nwbRead -

- -

PURPOSE ^

-
NWBREAD Reads an NWB file.
- -

SYNOPSIS ^

-
function nwb = nwbRead(filename, varargin)
- -

DESCRIPTION ^

-
NWBREAD Reads an NWB file.
-  nwb = nwbRead(filename) Reads the nwb file at filename and returns an
-  NWBFile object representing its contents.
-
-  Requires that core and extension NWB types have been generated
-  and reside in a 'types' package on the matlab path.
-
-  Example:
-    %Generate Matlab code for the NWB objects from the core schema.
-    %This only needs to be done once.
-    generateCore('schema\core\nwb.namespace.yaml');
-    %Now we can read nwb files!
-    nwb=nwbRead('data.nwb');
-
-  See also GENERATECORE, GENERATEEXTENSION, NWBFILE, NWBEXPORT
- - -

CROSS-REFERENCE INFORMATION ^

-This function calls: -
    -
-This function is called by: -
    -
- - -

SUBFUNCTIONS ^

- - -

SOURCE CODE ^

-
0001 function nwb = nwbRead(filename, varargin)
-0002 %NWBREAD Reads an NWB file.
-0003 %  nwb = nwbRead(filename) Reads the nwb file at filename and returns an
-0004 %  NWBFile object representing its contents.
-0005 %
-0006 %  Requires that core and extension NWB types have been generated
-0007 %  and reside in a 'types' package on the matlab path.
-0008 %
-0009 %  Example:
-0010 %    %Generate Matlab code for the NWB objects from the core schema.
-0011 %    %This only needs to be done once.
-0012 %    generateCore('schema\core\nwb.namespace.yaml');
-0013 %    %Now we can read nwb files!
-0014 %    nwb=nwbRead('data.nwb');
-0015 %
-0016 %  See also GENERATECORE, GENERATEEXTENSION, NWBFILE, NWBEXPORT
-0017 ignorecache = ~isempty(varargin) && ischar(varargin{1}) &&...
-0018     strcmp('ignorecache', varargin{1});
-0019 if ischar(filename)
-0020     validateattributes(filename, {'char'}, {'scalartext', 'nonempty'});
-0021     info = h5info(filename);
-0022     try
-0023         %check for .specloc
-0024         fid = H5F.open(filename);
-0025         attr_id = H5A.open(fid, '.specloc');
-0026         ref_data = H5A.read(attr_id);
-0027         blacklist = H5R.get_name(attr_id, 'H5R_OBJECT', ref_data);
-0028         if ~ignorecache
-0029             generateSpec(fid, h5info(filename, blacklist));
-0030             rehash(); %required if we want parseGroup to read the right files.
-0031         end
-0032         info.Attributes(strcmp('.specloc', {info.Attributes.Name})) = [];
-0033         H5A.close(attr_id);
-0034         H5F.close(fid);
-0035     catch ME
-0036         if ~strcmp(ME.identifier, 'MATLAB:imagesci:hdf5lib:libraryError')
-0037             rethrow(ME);
-0038         end
-0039         blacklist = '';
-0040     end
-0041     nwb = io.parseGroup(filename, info, blacklist);
-0042     return;
-0043 elseif isstring(filename)
-0044     validateattributes(filename, {'string'}, {'nonempty'});
-0045 else
-0046     validateattributes(filename, {'cell'}, {'nonempty'});
-0047     assert(iscellstr(filename));
-0048 end
-0049 nwb = NwbFile.empty(length(filename), 0);
-0050 isStringArray = isstring(filename);
-0051 for i=1:length(filename)
-0052     if isStringArray
-0053         fnm = filename(i);
-0054     else
-0055         fnm = filename{i};
-0056     end
-0057     info = h5info(fnm);
-0058     nwb(i) = io.parseGroup(fnm, info);
-0059 end
-0060 end
-0061 
-0062 function generateSpec(fid, specinfo)
-0063 schema = spec.loadSchema();
-0064 
-0065 for i=1:length(specinfo.Groups)
-0066     location = specinfo.Groups(i).Groups(1);
-0067     
-0068     namespace_name = split(specinfo.Groups(i).Name, '/');
-0069     namespace_name = namespace_name{end};
-0070     
-0071     filenames = {location.Datasets.Name};
-0072     if ~any(strcmp('namespace', filenames))
-0073         warning('MATNWB:INVALIDCACHE',...
-0074         'Couldn''t find a `namespace` in namespace `%s`.  Skipping cache generation.',...
-0075         namespace_name);
-0076         return;
-0077     end
-0078     source_names = {location.Datasets.Name};
-0079     file_loc = strcat(location.Name, '/', source_names);
-0080     schema_map = containers.Map;
-0081     for j=1:length(file_loc)
-0082         did = H5D.open(fid, file_loc{j});
-0083         if strcmp('namespace', source_names{j})
-0084             namespace_map = schema.read(H5D.read(did));
-0085         else
-0086             schema_map(source_names{j}) = H5D.read(did);    
-0087         end
-0088         H5D.close(did);
-0089     end
-0090     
-0091     spec.generate(namespace_map, schema_map);
-0092 end
-0093 end
-
Generated on Fri 09-Aug-2019 14:27:49 by m2html © 2005
- - \ No newline at end of file diff --git a/doc/pcode.png b/doc/pcode.png deleted file mode 100644 index 6801bd9a..00000000 Binary files a/doc/pcode.png and /dev/null differ diff --git a/doc/right.png b/doc/right.png deleted file mode 100644 index 067c5baf..00000000 Binary files a/doc/right.png and /dev/null differ diff --git a/doc/sgi.png b/doc/sgi.png deleted file mode 100644 index 20052bc5..00000000 Binary files a/doc/sgi.png and /dev/null differ diff --git a/doc/simulinkicon.gif b/doc/simulinkicon.gif deleted file mode 100644 index 1386ddd4..00000000 Binary files a/doc/simulinkicon.gif and /dev/null differ diff --git a/doc/solaris.png b/doc/solaris.png deleted file mode 100644 index e31e8a2a..00000000 Binary files a/doc/solaris.png and /dev/null differ diff --git a/doc/up.png b/doc/up.png deleted file mode 100644 index b348b9a1..00000000 Binary files a/doc/up.png and /dev/null differ diff --git a/doc/windows.png b/doc/windows.png deleted file mode 100644 index 6cec95b8..00000000 Binary files a/doc/windows.png and /dev/null differ diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 00000000..4456a5f1 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,4 @@ +sphinx +sphinx-rtd-theme +sphinx-copybutton +sphinxcontrib-matlabdomain diff --git a/docs/source/_config/tutorial_config.json b/docs/source/_config/tutorial_config.json new file mode 100644 index 00000000..da0a4384 --- /dev/null +++ b/docs/source/_config/tutorial_config.json @@ -0,0 +1,25 @@ +{ + "titles": { + "intro": "Getting Started with MatNWB", + "read_demo": "Reading NWB Files with MatNWB", + "basicUsage": "Basic Usage of MatNWB", + "dimensionMapNoDataPipes": "Mapping Dimensions without DataPipes", + "dimensionMapWithDataPipes": "Mapping Dimensions with DataPipes", + "convertTrials": "Converting Trials to NWB Format", + "dynamic_tables": "Using Dynamic Tables in MatNWB", + "scratch": "Working with Scratch Space in MatNWB", + "behavior": "Behavior Data", + "ecephys": "Extracellular Electrophysiology", + "icephys": "Intracellular Electrophysiology", + "images": "Image Data", + "ogen": "Optogenetics", + "ophys": "Calcium Imaging", + "dataPipe": "Advanced Writing Using DataPipes", + "dynamically_loaded_filters": "Implementing Dynamically Loaded Filters", + "remote_read": "Reading NWB Files from Remote Locations" + }, + "youtube": { + "ecephys": "https://www.youtube.com/watch?v=W8t4_quIl1k&ab_channel=NeurodataWithoutBorders", + "ophys": "https://www.youtube.com/watch?v=OBidHdocnTc&ab_channel=NeurodataWithoutBorders" + } +} diff --git a/docs/source/_static/css/custom.css b/docs/source/_static/css/custom.css new file mode 100644 index 00000000..680bd1af --- /dev/null +++ b/docs/source/_static/css/custom.css @@ -0,0 +1,74 @@ +/* arg formatting by line, taken from https://github.com/sphinx-doc/sphinx/issues/1514#issuecomment-742703082 */ + +/* For general themes */ +div.body, .wy-nav-content { + max-width: 1000px; /* Set the content width */ + margin: 0; /* Remove auto-centering */ + padding-left: 30; /* Optional: Adjust padding */ +} + +/* For Read the Docs theme specifically */ +.wy-nav-content { + margin: 0; /* Remove centering (auto) */ + padding-left: 30px; /* Align content to the left */ +} + + +/*Newlines (\a) and spaces (\20) before each parameter*/ +dl.class em:not([class])::before { + content: "\a\20\20\20\20\20\20\20\20\20\20\20\20\20\20\20\20"; + white-space: pre; +} + +/*Newline after the last parameter (so the closing bracket is on a new line)*/ +dl.class em:not([class]):last-of-type::after { + content: "\a"; + white-space: pre; +} + +/*To have blue background of width of the block (instead of width of content)*/ +dl.class > dt:first-of-type { + display: block !important; +} + +.rst-content code.literal, .rst-content tt.literal { + color: #2b417e; /* Replace with your desired color */ +} +.rst-content div[class^=highlight], .rst-content pre.literal-block { + margin: 1px 0 14px +} + +.rst-content .section ol li>*, .rst-content .section ul li>*, .rst-content .toctree-wrapper ol li>*, .rst-content .toctree-wrapper ul li>*, .rst-content section ol li>*, .rst-content section ul li>* { + margin-top: 0px; +} + +/* Ensure there is 10px spacing between nested list items at different levels*/ +.rst-content li > dl > dt { + margin-bottom: 10px; +} +.rst-content dd > ul > li { + margin-bottom: 10px; +} +.rst-content .section ol.simple li>*, .rst-content .section ol.simple li ol, .rst-content .section ol.simple li ul, .rst-content .section ul.simple li>*, .rst-content .section ul.simple li ol, .rst-content .section ul.simple li ul, .rst-content .toctree-wrapper ol.simple li>*, .rst-content .toctree-wrapper ol.simple li ol, .rst-content .toctree-wrapper ol.simple li ul, .rst-content .toctree-wrapper ul.simple li>*, .rst-content .toctree-wrapper ul.simple li ol, .rst-content .toctree-wrapper ul.simple li ul, .rst-content section ol.simple li>*, .rst-content section ol.simple li ol, .rst-content section ol.simple li ul, .rst-content section ul.simple li>*, .rst-content section ul.simple li ol, .rst-content section ul.simple li ul{ + margin-bottom: 10px; +} + +/* Improve padding and margins for function docstring section titles */ +.rst-content dd > dl > dt { + padding-left: 5px; + margin-top: 20px; + margin-bottom: 10px; +} +html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt, html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{ + margin-top: 28px; + margin-bottom: 10px; +} + +button.copybtn { + height:25px; + width:25px; + opacity: 0.5; + padding: 0; + border: none; + background: none; +} diff --git a/tutorials/html/UnitTimes.png b/docs/source/_static/html/tutorials/UnitTimes.png similarity index 100% rename from tutorials/html/UnitTimes.png rename to docs/source/_static/html/tutorials/UnitTimes.png diff --git a/tutorials/html/basicUsage.html b/docs/source/_static/html/tutorials/basicUsage.html similarity index 83% rename from tutorials/html/basicUsage.html rename to docs/source/_static/html/tutorials/basicUsage.html index c9cab5d2..60d0d32d 100644 --- a/tutorials/html/basicUsage.html +++ b/docs/source/_static/html/tutorials/basicUsage.html @@ -12,7 +12,7 @@ .S9 { margin-left: 56px; line-height: 21px; min-height: 0px; text-align: left; white-space: pre-wrap; } .S10 { margin: 10px 10px 9px 4px; padding: 0px; line-height: 21px; min-height: 0px; white-space: pre-wrap; color: rgb(0, 0, 0); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 14px; font-weight: 400; text-align: left; } .S11 { border-left: 1px solid rgb(233, 233, 233); border-right: 1px solid rgb(233, 233, 233); border-top: 0px none rgb(0, 0, 0); border-bottom: 1px solid rgb(233, 233, 233); border-radius: 0px; padding: 0px 45px 4px 13px; line-height: 17.234px; min-height: 18px; white-space: nowrap; color: rgb(0, 0, 0); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } -.S12 { color: rgb(64, 64, 64); padding: 10px 0px 6px 17px; background: rgb(255, 255, 255) none repeat scroll 0% 0% / auto padding-box border-box; font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; overflow-x: hidden; line-height: 17.234px; }

Using NWB Data

last updated: February 9, 2021
In this tutorial, we demonstrate the reading and usage of the NWB file produced in the File Conversion Tutorial. The output is a near-reproduction of Figure 1e from the Li et al publication, showing raster and peristimulus time histogram (PSTH) plots for neural recordings from anterior lateral motor cortex (ALM). This figure illustrates the main finding of the publication, showing the robustness of motor planning behavior and neural dynamics following short unilateral network silencing via optogenetic inhibition.

Reading NWB Files

NWB files can be read in using the nwbRead() function. This function returns a nwbfile object which is the in-memory representation of the NWB file structure.
nwb = nwbRead('out\ANM255201_20141124.nwb');

Constrained Sets

Analyzed data in NWB is placed under the analysis property, which is a Constrained Set. A constrained set consists of an arbitrary amount of key-value pairs similar to Map containers in MATLAB or a dictionary in Python. However, constrained sets also have the ability to validate their own properties closer to how a typed Object would.
You can get/set values in constrained sets using their respective .get()/.set() methods and retrieve all Set properties using the keys() method, like in a containers.Map.
unit_names = keys(nwb.analysis);

Dynamic Tables

nwb.intervals_trials returns a unique type of table called a Dynamic Table. Dynamic tables inherit from the NWB type types.hdmf_common.DynamicTable and allow for a table-like interface in NWB. In the case below, we grab the special column start_time. Dynamic Tables allow adding your own vectors using the vectordata property, which are Constrained Sets. All columns are represented by either a types.hdmf_common.VectorData or a types.hdmf_common.VectorIndex type.

Data Stubs

The data property of the column id in nwb.units is a types.untyped.DataStub. This object is a representation of a dataset that is not loaded in memory, and is what allows MatNWB to lazily load its file data. To load the data into memory, use the .load() method which extracts all data from the NWB file. Alternatively, you can index into the DataStub directly using conventional MATLAB syntax.

Jagged Arrays in Dynamic Tables

With the new addition of addRow and getRow to Dynamic Tables, the concept of jagged arrays can be worked around and no longer require full understanding outside of specific data format concerns or low-level nwb tool development. The below paragraph is retained in its entirety from its original form as purely informational.
All data in a Dynamic Table must be aligned by row and column, but not all data fits into this paradigm neatly. In order to represent variable amounts of data that is localised to each row and column, NWB uses a concept called Jagged Arrays. These arrays consist of two column types: the familiar types.core.VectorData, and the new types.core.VectorIndex. A Vector Index holds no data, instead holding a reference to another Vector Data and a vector of indices that align to the Dynamic Table dimensions. The indices represent the last index boundary in the Vector Data object for the Vector Index row. As an example, an index of three in the first row of the Vector Index column points to the first three values in the referenced Vector Data column. Subsequently, if the next index were a five, it would indicate the fourth and fifth elements in the referenced Vector Data column.
The jagged arrays serve to represent multiple trials and spike times associated to each unit by id. A convenient way to represent these in MATLAB is to use Map containers where each unit's data is indexed directly by its unit id. Below, we utilize getRow in order to build the same Map.
unit_ids = nwb.units.id.data.load(); % array of unit ids represented within this
% Initialize trials & times Map containers indexed by unit_ids
unit_trials = containers.Map('KeyType',class(unit_ids),'ValueType','any');
unit_times = containers.Map('KeyType',class(unit_ids),'ValueType','any');
last_idx = 0;
for i = 1:length(unit_ids)
unit_id = unit_ids(i);
row = nwb.units.getRow(unit_id, 'useId', true, 'columns', {'spike_times', 'trials'});
unit_trials(unit_id) = row.trials{1};
unit_times(unit_id) = row.spike_times{1};
end

Process Units

We now do the following for each Unit:
  • Filter out invalid trials
  • Separate datasets based on resulting mouse behavior (right/left licks).
  • Derive "sample", "delay", and "response" times for this analyzed neuron.
  • Compose a peristimulus time histogram from the data.
sorted_ids = sort(unit_ids);
Photostim = struct(...
'ind', true,... % mask into xs and ys for this photostim
'period', 'none',...
'duration', 0,... % in seconds
'ramp_offset', 0); % in seconds
% Initialize Map container of plotting data for each unit, stored as structure
Unit = containers.Map('KeyType',class(unit_ids),'ValueType','any');
unit_struct = struct(...
'id', [],...
'xs', [],...
'ys', [],...
'xlim', [-Inf Inf],...
'sample', 0,...
'delay', 0,...
'response', 0,...
'left_scatter', false,...
'right_scatter', false,...
'photostim', Photostim); % can have multiple photostim
for unit_id = unit_ids'
We first extract trial IDs from the Unit IDs.
unit_trial_id = unit_trials(unit_id);
Then filter out outliers from the Sample, Delay, and Response time points with which we derive a "good enough" estimate.
trial = nwb.intervals_trials.getRow(unit_trial_id, 'useId', true,...
'columns', {'PoleInTime', 'PoleOutTime', 'CueTime', 'GoodTrials'});
unit_sample = trial.PoleInTime;
unit_delay = trial.PoleOutTime;
unit_response = trial.CueTime;
unit_good_trials = trial.GoodTrials;
% Subjective parameters
delay_threshold = 0.064;
response_threshold = 0.43;
expected_delay_offset = 1.3; % determined from figure 1a
expected_response_offset = 1.3;
expected_delay = unit_sample + expected_delay_offset;
expected_response = unit_delay + expected_response_offset;
good_delay = (unit_delay > expected_delay - delay_threshold) &...
(unit_delay < expected_delay + delay_threshold);
good_response = (unit_response > expected_response - response_threshold) &...
(unit_response < expected_response + response_threshold);
avg_sample = mean(unit_sample(good_delay & good_response));
avg_delay = mean(unit_delay(good_delay & good_response));
avg_response = mean(unit_response(good_delay & good_response));
Filter the rest of the data by "good" trials.
unit_good_trials = unit_good_trials & good_delay & good_response;
unit_trial_id = unit_trial_id(unit_good_trials);
unit_spike_time = unit_times(unit_id);
unit_spike_time = unit_spike_time(unit_good_trials);
Retrieve good trial data and organize by stimulation type.
trial = nwb.intervals_trials.getRow(unit_trial_id, 'useId', true,...
'columns', {'start_time', 'HitR', 'HitL', 'StimTrials', 'PhotostimulationType'});
unit_is_photostim = logical(trial.StimTrials);
unit_stim_type = trial.PhotostimulationType;
unit_no_stim = ~unit_is_photostim & 0 == unit_stim_type;
unit_sample_stim = unit_is_photostim & 1 == unit_stim_type;
unit_early_stim = unit_is_photostim & 2 == unit_stim_type;
unit_middle_stim = unit_is_photostim & 3 == unit_stim_type;
Compose Scatter Plots and the Peristimulus Time Histogram zeroed on the Response time.
xs = unit_spike_time - trial.start_time - avg_response;
ys = unit_trial_id;
curr_unit = unit_struct;
curr_unit.xs = xs;
curr_unit.ys = ys;
curr_unit.left_scatter = logical(trial.HitL);
curr_unit.right_scatter = logical(trial.HitR);
curr_unit.sample = avg_sample - avg_response;
curr_unit.delay = avg_delay - avg_response;
curr_unit.response = 0;
% Photostim periods
curr_unit.photostim.ind = unit_no_stim;
% Sample
if any(unit_sample_stim)
SampleStim = Photostim;
SampleStim.ind = unit_sample_stim;
SampleStim.period = 'Sample';
SampleStim.duration = 0.5;
SampleStim.ramp_offset = 0.1;
curr_unit.photostim(end+1) = SampleStim;
end
% Early Delay
if any(unit_early_stim)
early_stim_types = unique(unit_stim_type(unit_early_stim));
for i_early_types=1:length(early_stim_types)
early_type = early_stim_types(i_early_types);
EarlyStim = Photostim;
EarlyStim.period = 'Early Delay';
EarlyStim.ind = early_type == unit_stim_type & unit_early_stim;
if early_type == 2
EarlyStim.duration = 0.5;
EarlyStim.ramp_offset = 0.1;
else
EarlyStim.duration = 0.8;
EarlyStim.ramp_offset = 0.2;
end
curr_unit.photostim(end+1) = EarlyStim;
end
end
% Middle Delay
if any(unit_middle_stim)
MiddleStim = Photostim;
MiddleStim.ind = unit_middle_stim;
MiddleStim.period = 'Middle Delay';
MiddleStim.duration = 0.5;
MiddleStim.ramp_offset = 0.1;
curr_unit.photostim(end+1) = MiddleStim;
end
Unit(unit_id) = curr_unit;
end

Plot Example Neurons

neuron_labels = [2, 3]; % neuron labels from Figure 1e
neuron_ids = [11, 2]; % neuron unit IDs corresponding to the Fig 1e labels
num_conditions = 4; % photostim conditions: nostim, sample, early, middle if applicable
num_neurons = length(neuron_ids);
% Inititalize data structures for each summary plot of categorized neural spike data at specified stimulus condition
RasterPlot = struct(...
'xs', 0,...
'ys', 0);
ConditionPlot = struct(...
'label', '',...
'xlim', 0,...
'sample', 0,...
'delay', 0,...
'response', 0,...
'right_scatter', RasterPlot,...
'left_scatter', RasterPlot,...
'psth_bin_window', 0,...
'stim_type', '');
fig = figure;
% Plot neural spike data for each neuron and stimulus condition in a subplot array: num_neurons (rows) x num_conditions (columns)
for nn=1:num_neurons
Neuron = Unit(neuron_ids(nn));
% Initialize structure with neural + stimulus condition data
CurrPlot = ConditionPlot;
CurrPlot.xlim = [min(Neuron.xs) max(Neuron.xs)];
CurrPlot.sample = Neuron.sample;
CurrPlot.delay = Neuron.delay;
CurrPlot.response = Neuron.response;
% Plot each neuron/condition
plot_row = (nn - 1) * num_conditions;
for cc=1:num_conditions
ax = subplot(num_neurons, num_conditions, plot_row + cc, 'Parent', fig);
Stim = Neuron.photostim(cc);
CurrPlot.stim_type = Stim.period;
if strcmp(Stim.period, 'none')
CurrPlot.label = sprintf('Neuron %d', neuron_labels(nn));
CurrPlot.psth_bin_window = 9;
else
CurrPlot.label = Stim.period;
CurrPlot.psth_bin_window = 2;
end
stim_left_scatter_ind = Stim.ind & Neuron.left_scatter;
stim_left_scatter_trials = Neuron.ys(stim_left_scatter_ind);
CurrPlot.left_scatter.xs = Neuron.xs(stim_left_scatter_ind);
[~,CurrPlot.left_scatter.ys] = ismember(stim_left_scatter_trials,unique(stim_left_scatter_trials));
stim_right_scatter_ind = Stim.ind & Neuron.right_scatter;
stim_right_scatter_trials = Neuron.ys(stim_right_scatter_ind);
CurrPlot.right_scatter.xs = Neuron.xs(stim_right_scatter_ind);
[~,CurrPlot.right_scatter.ys] = ismember(stim_right_scatter_trials,unique(stim_right_scatter_trials));
plot_condition(ax, CurrPlot);
end
end
+.S12 { color: rgb(64, 64, 64); padding: 10px 0px 6px 17px; background: rgb(255, 255, 255) none repeat scroll 0% 0% / auto padding-box border-box; font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; overflow-x: hidden; line-height: 17.234px; }

Using NWB Data

last updated: February 9, 2021
In this tutorial, we demonstrate the reading and usage of the NWB file produced in the File Conversion Tutorial. The output is a near-reproduction of Figure 1e from the Li et al publication, showing raster and peristimulus time histogram (PSTH) plots for neural recordings from anterior lateral motor cortex (ALM). This figure illustrates the main finding of the publication, showing the robustness of motor planning behavior and neural dynamics following short unilateral network silencing via optogenetic inhibition.

Reading NWB Files

NWB files can be read in using the nwbRead() function. This function returns a nwbfile object which is the in-memory representation of the NWB file structure.
nwb = nwbRead('out\ANM255201_20141124.nwb');

Constrained Sets

Analyzed data in NWB is placed under the analysis property, which is a Constrained Set. A constrained set consists of an arbitrary amount of key-value pairs similar to Map containers in MATLAB or a dictionary in Python. However, constrained sets also have the ability to validate their own properties closer to how a typed Object would.
You can get/set values in constrained sets using their respective .get()/.set() methods and retrieve all Set properties using the keys() method, like in a containers.Map.
unit_names = keys(nwb.analysis);

Dynamic Tables

nwb.intervals_trials returns a unique type of table called a Dynamic Table. Dynamic tables inherit from the NWB type types.hdmf_common.DynamicTable and allow for a table-like interface in NWB. In the case below, we grab the special column start_time. Dynamic Tables allow adding your own vectors using the vectordata property, which are Constrained Sets. All columns are represented by either a types.hdmf_common.VectorData or a types.hdmf_common.VectorIndex type.

Data Stubs

The data property of the column id in nwb.units is a types.untyped.DataStub. This object is a representation of a dataset that is not loaded in memory, and is what allows MatNWB to lazily load its file data. To load the data into memory, use the .load() method which extracts all data from the NWB file. Alternatively, you can index into the DataStub directly using conventional MATLAB syntax.

Jagged Arrays in Dynamic Tables

With the new addition of addRow and getRow to Dynamic Tables, the concept of jagged arrays can be worked around and no longer require full understanding outside of specific data format concerns or low-level nwb tool development. The below paragraph is retained in its entirety from its original form as purely informational.
All data in a Dynamic Table must be aligned by row and column, but not all data fits into this paradigm neatly. In order to represent variable amounts of data that is localised to each row and column, NWB uses a concept called Jagged Arrays. These arrays consist of two column types: the familiar types.core.VectorData, and the new types.core.VectorIndex. A Vector Index holds no data, instead holding a reference to another Vector Data and a vector of indices that align to the Dynamic Table dimensions. The indices represent the last index boundary in the Vector Data object for the Vector Index row. As an example, an index of three in the first row of the Vector Index column points to the first three values in the referenced Vector Data column. Subsequently, if the next index were a five, it would indicate the fourth and fifth elements in the referenced Vector Data column.
The jagged arrays serve to represent multiple trials and spike times associated to each unit by id. A convenient way to represent these in MATLAB is to use Map containers where each unit's data is indexed directly by its unit id. Below, we utilize getRow in order to build the same Map.
unit_ids = nwb.units.id.data.load(); % array of unit ids represented within this
% Initialize trials & times Map containers indexed by unit_ids
unit_trials = containers.Map('KeyType',class(unit_ids),'ValueType','any');
unit_times = containers.Map('KeyType',class(unit_ids),'ValueType','any');
last_idx = 0;
for i = 1:length(unit_ids)
unit_id = unit_ids(i);
row = nwb.units.getRow(unit_id, 'useId', true, 'columns', {'spike_times', 'trials'});
unit_trials(unit_id) = row.trials{1};
unit_times(unit_id) = row.spike_times{1};
end

Process Units

We now do the following for each Unit:
  • Filter out invalid trials
  • Separate datasets based on resulting mouse behavior (right/left licks).
  • Derive "sample", "delay", and "response" times for this analyzed neuron.
  • Compose a peristimulus time histogram from the data.
sorted_ids = sort(unit_ids);
Photostim = struct(...
'ind', true,... % mask into xs and ys for this photostim
'period', 'none',...
'duration', 0,... % in seconds
'ramp_offset', 0); % in seconds
% Initialize Map container of plotting data for each unit, stored as structure
Unit = containers.Map('KeyType',class(unit_ids),'ValueType','any');
unit_struct = struct(...
'id', [],...
'xs', [],...
'ys', [],...
'xlim', [-Inf Inf],...
'sample', 0,...
'delay', 0,...
'response', 0,...
'left_scatter', false,...
'right_scatter', false,...
'photostim', Photostim); % can have multiple photostim
for unit_id = unit_ids'
We first extract trial IDs from the Unit IDs.
unit_trial_id = unit_trials(unit_id);
Then filter out outliers from the Sample, Delay, and Response time points with which we derive a "good enough" estimate.
trial = nwb.intervals_trials.getRow(unit_trial_id, 'useId', true,...
'columns', {'PoleInTime', 'PoleOutTime', 'CueTime', 'GoodTrials'});
unit_sample = trial.PoleInTime;
unit_delay = trial.PoleOutTime;
unit_response = trial.CueTime;
unit_good_trials = trial.GoodTrials;
% Subjective parameters
delay_threshold = 0.064;
response_threshold = 0.43;
expected_delay_offset = 1.3; % determined from figure 1a
expected_response_offset = 1.3;
expected_delay = unit_sample + expected_delay_offset;
expected_response = unit_delay + expected_response_offset;
good_delay = (unit_delay > expected_delay - delay_threshold) &...
(unit_delay < expected_delay + delay_threshold);
good_response = (unit_response > expected_response - response_threshold) &...
(unit_response < expected_response + response_threshold);
avg_sample = mean(unit_sample(good_delay & good_response));
avg_delay = mean(unit_delay(good_delay & good_response));
avg_response = mean(unit_response(good_delay & good_response));
Filter the rest of the data by "good" trials.
unit_good_trials = unit_good_trials & good_delay & good_response;
unit_trial_id = unit_trial_id(unit_good_trials);
unit_spike_time = unit_times(unit_id);
unit_spike_time = unit_spike_time(unit_good_trials);
Retrieve good trial data and organize by stimulation type.
trial = nwb.intervals_trials.getRow(unit_trial_id, 'useId', true,...
'columns', {'start_time', 'HitR', 'HitL', 'StimTrials', 'PhotostimulationType'});
unit_is_photostim = logical(trial.StimTrials);
unit_stim_type = trial.PhotostimulationType;
unit_no_stim = ~unit_is_photostim & 0 == unit_stim_type;
unit_sample_stim = unit_is_photostim & 1 == unit_stim_type;
unit_early_stim = unit_is_photostim & 2 == unit_stim_type;
unit_middle_stim = unit_is_photostim & 3 == unit_stim_type;
Compose Scatter Plots and the Peristimulus Time Histogram zeroed on the Response time.
xs = unit_spike_time - trial.start_time - avg_response;
ys = unit_trial_id;
curr_unit = unit_struct;
curr_unit.xs = xs;
curr_unit.ys = ys;
curr_unit.left_scatter = logical(trial.HitL);
curr_unit.right_scatter = logical(trial.HitR);
curr_unit.sample = avg_sample - avg_response;
curr_unit.delay = avg_delay - avg_response;
curr_unit.response = 0;
% Photostim periods
curr_unit.photostim.ind = unit_no_stim;
% Sample
if any(unit_sample_stim)
SampleStim = Photostim;
SampleStim.ind = unit_sample_stim;
SampleStim.period = 'Sample';
SampleStim.duration = 0.5;
SampleStim.ramp_offset = 0.1;
curr_unit.photostim(end+1) = SampleStim;
end
% Early Delay
if any(unit_early_stim)
early_stim_types = unique(unit_stim_type(unit_early_stim));
for i_early_types=1:length(early_stim_types)
early_type = early_stim_types(i_early_types);
EarlyStim = Photostim;
EarlyStim.period = 'Early Delay';
EarlyStim.ind = early_type == unit_stim_type & unit_early_stim;
if early_type == 2
EarlyStim.duration = 0.5;
EarlyStim.ramp_offset = 0.1;
else
EarlyStim.duration = 0.8;
EarlyStim.ramp_offset = 0.2;
end
curr_unit.photostim(end+1) = EarlyStim;
end
end
% Middle Delay
if any(unit_middle_stim)
MiddleStim = Photostim;
MiddleStim.ind = unit_middle_stim;
MiddleStim.period = 'Middle Delay';
MiddleStim.duration = 0.5;
MiddleStim.ramp_offset = 0.1;
curr_unit.photostim(end+1) = MiddleStim;
end
Unit(unit_id) = curr_unit;
end

Plot Example Neurons

neuron_labels = [2, 3]; % neuron labels from Figure 1e
neuron_ids = [11, 2]; % neuron unit IDs corresponding to the Fig 1e labels
num_conditions = 4; % photostim conditions: nostim, sample, early, middle if applicable
num_neurons = length(neuron_ids);
% Inititalize data structures for each summary plot of categorized neural spike data at specified stimulus condition
RasterPlot = struct(...
'xs', 0,...
'ys', 0);
ConditionPlot = struct(...
'label', '',...
'xlim', 0,...
'sample', 0,...
'delay', 0,...
'response', 0,...
'right_scatter', RasterPlot,...
'left_scatter', RasterPlot,...
'psth_bin_window', 0,...
'stim_type', '');
fig = figure;
% Plot neural spike data for each neuron and stimulus condition in a subplot array: num_neurons (rows) x num_conditions (columns)
for nn=1:num_neurons
Neuron = Unit(neuron_ids(nn));
% Initialize structure with neural + stimulus condition data
CurrPlot = ConditionPlot;
CurrPlot.xlim = [min(Neuron.xs) max(Neuron.xs)];
CurrPlot.sample = Neuron.sample;
CurrPlot.delay = Neuron.delay;
CurrPlot.response = Neuron.response;
% Plot each neuron/condition
plot_row = (nn - 1) * num_conditions;
for cc=1:num_conditions
ax = subplot(num_neurons, num_conditions, plot_row + cc, 'Parent', fig);
Stim = Neuron.photostim(cc);
CurrPlot.stim_type = Stim.period;
if strcmp(Stim.period, 'none')
CurrPlot.label = sprintf('Neuron %d', neuron_labels(nn));
CurrPlot.psth_bin_window = 9;
else
CurrPlot.label = Stim.period;
CurrPlot.psth_bin_window = 2;
end
stim_left_scatter_ind = Stim.ind & Neuron.left_scatter;
stim_left_scatter_trials = Neuron.ys(stim_left_scatter_ind);
CurrPlot.left_scatter.xs = Neuron.xs(stim_left_scatter_ind);
[~,CurrPlot.left_scatter.ys] = ismember(stim_left_scatter_trials,unique(stim_left_scatter_trials));
stim_right_scatter_ind = Stim.ind & Neuron.right_scatter;
stim_right_scatter_trials = Neuron.ys(stim_right_scatter_ind);
CurrPlot.right_scatter.xs = Neuron.xs(stim_right_scatter_ind);
[~,CurrPlot.right_scatter.ys] = ismember(stim_right_scatter_trials,unique(stim_right_scatter_trials));
plot_condition(ax, CurrPlot);
end
end

Helper Functions

PSTH helper function
function [psth_xs, psth_ys] = calculate_psth(xs, bin_window, bin_width)
[bin_counts, edges] = histcounts(xs, 'BinWidth', bin_width);
psth_xs = edges(1:end-1) + (bin_width / 2);
moving_avg_b = (1/bin_window) * ones(1,bin_window);
psth_ys = filter(moving_avg_b, 1, bin_counts);
end
Plotter function for each stimulus condition
function plot_condition(ax, ConditionPlot)
left_cdata = [1 0 0]; % red
right_cdata = [0 0 1]; % blue
hist_margin = 50;
scatter_margin = 10;
% Calculate PSTH values
% moving average over 200 ms as per figure 1e
hist_bin_width = 0.2 / ConditionPlot.psth_bin_window;
[left_psth_xs, left_psth_ys] =...
calculate_psth(ConditionPlot.left_scatter.xs, ConditionPlot.psth_bin_window, hist_bin_width);
[right_psth_xs, right_psth_ys] =...
calculate_psth(ConditionPlot.right_scatter.xs, ConditionPlot.psth_bin_window, hist_bin_width);
right_scatter_offset = min(ConditionPlot.right_scatter.ys);
right_scatter_height = max(ConditionPlot.right_scatter.ys) - right_scatter_offset;
left_scatter_offset = min(ConditionPlot.left_scatter.ys);
left_scatter_height = max(ConditionPlot.left_scatter.ys) - left_scatter_offset;
psth_height = max([left_psth_ys right_psth_ys]);
left_y_offset = hist_margin...
+ psth_height...
- left_scatter_offset;
right_y_offset = scatter_margin...
+ left_y_offset...
+ left_scatter_offset...
+ left_scatter_height...
- right_scatter_offset;
subplot_height = right_y_offset...
+ right_scatter_offset...
+ right_scatter_height;
hold(ax, 'on');
% PSTH
plot(ax, left_psth_xs, left_psth_ys, 'Color', left_cdata);
plot(ax, right_psth_xs, right_psth_ys, 'Color', right_cdata);
% Scatter Plot
scatter(ax,...
ConditionPlot.left_scatter.xs,...
left_y_offset + ConditionPlot.left_scatter.ys,...
'Marker', '.',...
'CData', left_cdata,...
'SizeData', 1);
scatter(ax,...
ConditionPlot.right_scatter.xs,...
right_y_offset + ConditionPlot.right_scatter.ys,...
'Marker', '.',...
'CData', right_cdata,...
'SizeData', 1);
% sample, delay, response lines
line(ax, repmat(ConditionPlot.sample, 1, 2), [0 subplot_height],...
'Color', 'k', 'LineStyle', '--');
line(ax, repmat(ConditionPlot.delay, 1, 2), [0 subplot_height],...
'Color', 'k', 'LineStyle', '--');
line(ax, repmat(ConditionPlot.response, 1, 2), [0 subplot_height],...
'Color', 'k', 'LineStyle', '--');
% blue bar for photoinhibition period
if ~strcmp(ConditionPlot.stim_type, 'none')
stim_height = subplot_height;
stim_width = 0.5; % seconds
% end time relative to 'go' cue as described in the paper.
switch ConditionPlot.stim_type
case 'Sample'
end_offset = 1.6;
case 'Early Delay'
end_offset = 0.8;
case 'Middle Delay'
end_offset = 0.3;
otherwise
error('Invalid photostim period `%s`', ConditionPlot.stim_type);
end
stim_offset = ConditionPlot.response - stim_width - end_offset;
patch_vertices = [...
stim_offset, 0;...
stim_offset, stim_height;...
stim_offset+stim_width, stim_height;...
stim_offset+stim_width, 0];
patch(ax,...
'Faces', 1:4,...
'Vertices', patch_vertices,...
'FaceColor', '#B3D3EC',... % light blue shading
'EdgeColor', 'none',...
'FaceAlpha', 0.8);
end
title(ax, ConditionPlot.label);
xlabel(ax, 'Time (Seconds)');
ylabel(ax, 'Spikes s^{-1}')
xticks(ax, [-2 0 2]);
yticks(ax, [0 max(10, round(psth_height, -1))]);
% legend(ax, [scatter_left_plot, scatter_right_plot], {'Left Lick', 'Right Lick'},...
% 'location', 'northwestoutside');
ax.TickDir = 'out';
ax.XLim = ConditionPlot.xlim;
ax.YLim = [0 subplot_height];
hold(ax, 'off');
end
@@ -23,7 +23,7 @@ % _last updated: February 9, 2021_ % % In this tutorial, we demonstrate the reading and usage of the NWB file produced -% in the . The output is a near-reproduction of Figure 1e from % the publication, showing % raster and peristimulus time histogram (PSTH) plots for neural recordings from @@ -428,4 +428,13 @@ end ##### SOURCE END ##### --> -
\ No newline at end of file +
+ \ No newline at end of file diff --git a/docs/source/_static/html/tutorials/behavior.html b/docs/source/_static/html/tutorials/behavior.html new file mode 100644 index 00000000..43486545 --- /dev/null +++ b/docs/source/_static/html/tutorials/behavior.html @@ -0,0 +1,379 @@ + +Behavior Data

Behavior Data

This tutorial will guide you in writing behavioral data to NWB.

Creating an NWB File

Create an NWBFile object with the required fields (session_description, identifier, and session_start_time) and additional metadata.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'general_experimenter', 'My Name', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', 'DOI:10.1016/j.neuron.2016.12.011'); % optional
nwb
nwb =
NwbFile with properties: + + nwb_version: '2.8.0' + file_create_date: [] + identifier: 'Mouse5_Day3' + session_description: 'mouse in open exploration' + session_start_time: {[2018-04-25T02:30:03.000000+02:00]} + timestamps_reference_time: [] + acquisition: [0×1 types.untyped.Set] + analysis: [0×1 types.untyped.Set] + general: [0×1 types.untyped.Set] + general_data_collection: '' + general_devices: [0×1 types.untyped.Set] + general_experiment_description: '' + general_experimenter: 'My Name' + general_extracellular_ephys: [0×1 types.untyped.Set] + general_extracellular_ephys_electrodes: [] + general_institution: 'University of My Institution' + general_intracellular_ephys: [0×1 types.untyped.Set] + general_intracellular_ephys_experimental_conditions: [] + general_intracellular_ephys_filtering: '' + general_intracellular_ephys_intracellular_recordings: [] + general_intracellular_ephys_repetitions: [] + general_intracellular_ephys_sequential_recordings: [] + general_intracellular_ephys_simultaneous_recordings: [] + general_intracellular_ephys_sweep_table: [] + general_keywords: '' + general_lab: '' + general_notes: '' + general_optogenetics: [0×1 types.untyped.Set] + general_optophysiology: [0×1 types.untyped.Set] + general_pharmacology: '' + general_protocol: '' + general_related_publications: 'DOI:10.1016/j.neuron.2016.12.011' + general_session_id: 'session_1234' + general_slices: '' + general_source_script: '' + general_source_script_file_name: '' + general_stimulus: '' + general_subject: [] + general_surgery: '' + general_virus: '' + general_was_generated_by: '' + intervals: [0×1 types.untyped.Set] + intervals_epochs: [] + intervals_invalid_times: [] + intervals_trials: [] + processing: [0×1 types.untyped.Set] + scratch: [0×1 types.untyped.Set] + stimulus_presentation: [0×1 types.untyped.Set] + stimulus_templates: [0×1 types.untyped.Set] + units: [] + +Warning: The following required properties are missing for instance for type "NwbFile": + timestamps_reference_time

SpatialSeries: Storing continuous spatial data

SpatialSeries is a subclass of TimeSeries that represents data in space, such as the spatial direction e.g., of gaze or travel or position of an animal over time.
Create data that corresponds to x, y position over time.
position_data = [linspace(0, 10, 50); linspace(0, 8, 50)]; % 2 x nT array
In SpatialSeries data, the first dimension is always time (in seconds), the second dimension represents the x, y position. However, as described in the dimensionMapNoDataPipes tutorial, when a MATLAB array is exported to HDF5, the array is transposed. Therefore, in order to correctly export the data, in MATLAB the last dimension of an array should be time. SpatialSeries data should be stored as one continuous stream as it is acquired, not by trials as is often reshaped for analysis. Data can be trial-aligned on-the-fly using the trials table. See the trials tutorial for further information.
For position data reference_frame indicates the zero-position, e.g. the 0,0 point might be the bottom-left corner of an enclosure, as viewed from the tracking camera.
timestamps = linspace(0, 50, 50)/ 200;
position_spatial_series = types.core.SpatialSeries( ...
'description', 'Postion (x, y) in an open field.', ...
'data', position_data, ...
'timestamps', timestamps, ...
'reference_frame', '(0,0) is the bottom left corner.' ...
)
position_spatial_series =
SpatialSeries with properties: + + reference_frame: '(0,0) is the bottom left corner.' + starting_time_unit: 'seconds' + timestamps_interval: 1 + timestamps_unit: 'seconds' + data: [2×50 double] + comments: 'no comments' + control: [] + control_description: '' + data_continuity: '' + data_conversion: 1 + data_offset: 0 + data_resolution: -1 + data_unit: 'meters' + description: 'Postion (x, y) in an open field.' + starting_time: [] + starting_time_rate: [] + timestamps: [0 0.0051 0.0102 0.0153 0.0204 0.0255 0.0306 0.0357 0.0408 0.0459 0.0510 0.0561 0.0612 0.0663 0.0714 0.0765 0.0816 0.0867 0.0918 0.0969 0.1020 0.1071 0.1122 0.1173 0.1224 0.1276 0.1327 0.1378 0.1429 0.1480 0.1531 … ] (1×50 double) +

Position: Storing position measured over time

To help data analysis and visualization tools know that this SpatialSeries object represents the position of the subject, store the SpatialSeries object inside a Position object, which can hold one or more SpatialSeries objects.
position = types.core.Position();
position.spatialseries.set('SpatialSeries', position_spatial_series);

Create a Behavior Processing Module

Create a processing module called "behavior" for storing behavioral data in the NWBFile, then add the Position object to the processing module.
behavior_processing_module = types.core.ProcessingModule('description', 'stores behavioral data.');
behavior_processing_module.nwbdatainterface.set("Position", position);
nwb.processing.set("behavior", behavior_processing_module);

CompassDirection: Storing view angle measured over time

Analogous to how position can be stored, we can create a SpatialSeries object for representing the view angle of the subject.
For direction data reference_frame indicates the zero direction, for instance in this case "straight ahead" is 0 radians.
view_angle_data = linspace(0, 4, 50);
direction_spatial_series = types.core.SpatialSeries( ...
'description', 'View angle of the subject measured in radians.', ...
'data', view_angle_data, ...
'timestamps', timestamps, ...
'reference_frame', 'straight ahead', ...
'data_unit', 'radians' ...
);
direction = types.core.CompassDirection();
direction.spatialseries.set('spatial_series', direction_spatial_series);
We can add a CompassDirection object to the behavior processing module the same way we have added the position data.
%behavior_processing_module = types.core.ProcessingModule("stores behavioral data."); % if you have not already created it
behavior_processing_module.nwbdatainterface.set('CompassDirection', direction);
%nwb.processing.set('behavior', behavior_processing_module); % if you have not already added it

BehaviorTimeSeries: Storing continuous behavior data

BehavioralTimeSeries is an interface for storing continuous behavior data, such as the speed of a subject.
speed_data = linspace(0, 0.4, 50);
 
speed_time_series = types.core.TimeSeries( ...
'data', speed_data, ...
'starting_time', 1.0, ... % NB: Important to set starting_time when using starting_time_rate
'starting_time_rate', 10.0, ... % Hz
'description', 'he speed of the subject measured over time.', ...
'data_unit', 'm/s' ...
);
 
behavioral_time_series = types.core.BehavioralTimeSeries();
behavioral_time_series.timeseries.set('speed', speed_time_series);
 
%behavior_processing_module = types.core.ProcessingModule("stores behavioral data."); % if you have not already created it
behavior_processing_module.nwbdatainterface.set('BehavioralTimeSeries', behavioral_time_series);
%nwb.processing.set('behavior', behavior_processing_module); % if you have not already added it

BehavioralEvents: Storing behavioral events

BehavioralEvents is an interface for storing behavioral events. We can use it for storing the timing and amount of rewards (e.g. water amount) or lever press times.
reward_amount = [1.0, 1.5, 1.0, 1.5];
event_timestamps = [1.0, 2.0, 5.0, 6.0];
 
time_series = types.core.TimeSeries( ...
'data', reward_amount, ...
'timestamps', event_timestamps, ...
'description', 'The water amount the subject received as a reward.', ...
'data_unit', 'ml' ...
);
 
behavioral_events = types.core.BehavioralEvents();
behavioral_events.timeseries.set('lever_presses', time_series);
 
%behavior_processing_module = types.core.ProcessingModule("stores behavioral data."); % if you have not already created it
behavior_processing_module.nwbdatainterface.set('BehavioralEvents', behavioral_events);
%nwb.processing.set('behavior', behavior_processing_module); % if you have not already added it
Storing only the timestamps of the events is possible with the ndx-events NWB extension. You can also add labels associated with the events with this extension. You can find information about installation and example usage here.

BehavioralEpochs: Storing intervals of behavior data

BehavioralEpochs is for storing intervals of behavior data. BehavioralEpochs uses IntervalSeries to represent the time intervals. Create an IntervalSeries object that represents the time intervals when the animal was running. IntervalSeries uses 1 to indicate the beginning of an interval and -1 to indicate the end.
run_intervals = types.core.IntervalSeries( ...
'description', 'Intervals when the animal was running.', ...
'data', [1, -1, 1, -1, 1, -1], ...
'timestamps', [0.5, 1.5, 3.5, 4.0, 7.0, 7.3] ...
);
 
behavioral_epochs = types.core.BehavioralEpochs();
behavioral_epochs.intervalseries.set('running', run_intervals);
You can add more than one IntervalSeries to a BehavioralEpochs object.
sleep_intervals = types.core.IntervalSeries( ...
'description', 'Intervals when the animal was sleeping', ...
'data', [1, -1, 1, -1], ...
'timestamps', [15.0, 30.0, 60.0, 95.0] ...
);
behavioral_epochs.intervalseries.set('sleeping', sleep_intervals);
 
% behavior_processing_module = types.core.ProcessingModule("stores behavioral data.");
% behavior_processing_module.nwbdatainterface.set('BehavioralEvents', behavioral_events);
% nwb.processing.set('behavior', behavior_processing_module);

Another approach: TimeIntervals

Using TimeIntervals to represent time intervals is often preferred over BehavioralEpochs and IntervalSeries. TimeIntervals is a subclass of DynamicTable, which offers flexibility for tabular data by allowing the addition of optional columns which are not defined in the standard DynamicTable class.
sleep_intervals = types.core.TimeIntervals( ...
'description', 'Intervals when the animal was sleeping.', ...
'colnames', {'start_time', 'stop_time', 'stage'} ...
);
 
sleep_intervals.addRow('start_time', 0.3, 'stop_time', 0.35, 'stage', 1);
sleep_intervals.addRow('start_time', 0.7, 'stop_time', 0.9, 'stage', 2);
sleep_intervals.addRow('start_time', 1.3, 'stop_time', 3.0, 'stage', 3);
 
nwb.intervals.set('sleep_intervals', sleep_intervals);

EyeTracking: Storing continuous eye-tracking data of gaze direction

EyeTracking is for storing eye-tracking data which represents direction of gaze as measured by an eye tracking algorithm. An EyeTracking object holds one or more SpatialSeries objects that represent the gaze direction over time extracted from a video.
eye_position_data = [linspace(-20, 30, 50); linspace(30, -20, 50)];
 
right_eye_position = types.core.SpatialSeries( ...
'description', 'The position of the right eye measured in degrees.', ...
'data', eye_position_data, ...
'starting_time', 1.0, ... % NB: Important to set starting_time when using starting_time_rate
'starting_time_rate', 50.0, ... % Hz
'reference_frame', '(0,0) is middle', ...
'data_unit', 'degrees' ...
);
 
left_eye_position = types.core.SpatialSeries( ...
'description', 'The position of the right eye measured in degrees.', ...
'data', eye_position_data, ...
'starting_time', 1.0, ... % NB: Important to set starting_time when using starting_time_rate
'starting_time_rate', 50.0, ... % Hz
'reference_frame', '(0,0) is middle', ...
'data_unit', 'degrees' ...
);
 
eye_tracking = types.core.EyeTracking();
eye_tracking.spatialseries.set('right_eye_position', right_eye_position);
eye_tracking.spatialseries.set('left_eye_position', left_eye_position);
 
% behavior_processing_module = types.core.ProcessingModule("stores behavioral data.");
behavior_processing_module.nwbdatainterface.set('EyeTracking', eye_tracking);
% nwb.processing.set('behavior', behavior_processing_module);

PupilTracking: Storing continuous eye-tracking data of pupil size

PupilTracking is for storing eye-tracking data which represents pupil size. PupilTracking holds one or more TimeSeries objects that can represent different features such as the dilation of the pupil measured over time by a pupil tracking algorithm.
pupil_diameter = types.core.TimeSeries( ...
'description', 'Pupil diameter extracted from the video of the right eye.', ...
'data', linspace(0.001, 0.002, 50), ...
'starting_time', 1.0, ... % NB: Important to set starting_time when using starting_time_rate
'starting_time_rate', 20.0, ... % Hz
'data_unit', 'meters' ...
);
 
pupil_tracking = types.core.PupilTracking();
pupil_tracking.timeseries.set('pupil_diameter', pupil_diameter);
 
% behavior_processing_module = types.core.ProcessingModule("stores behavioral data.");
behavior_processing_module.nwbdatainterface.set('PupilTracking', pupil_tracking);
% nwb.processing.set('behavior', behavior_processing_module);

Writing the behavior data to an NWB file

All of the above commands build an NWBFile object in-memory. To write this file, use nwbExport.
% Save to tutorials/tutorial_nwb_files folder
nwbFilePath = misc.getTutorialNwbFilePath('behavior_tutorial.nwb');
nwbExport(nwb, nwbFilePath);
fprintf('Exported NWB file to "%s"\n', 'behavior_tutorial.nwb')
Exported NWB file to "behavior_tutorial.nwb"
+
+ +
+ \ No newline at end of file diff --git a/tutorials/html/convertTrials.html b/docs/source/_static/html/tutorials/convertTrials.html similarity index 99% rename from tutorials/html/convertTrials.html rename to docs/source/_static/html/tutorials/convertTrials.html index 1df73a43..d3528cb7 100644 --- a/tutorials/html/convertTrials.html +++ b/docs/source/_static/html/tutorials/convertTrials.html @@ -477,7 +477,7 @@

Hashes

end end -

To better understand how spike_times_index and spike_times map to each other, refer to this diagram from the Extracellular Electrophysiology Tutorial.

+

To better understand how spike_times_index and spike_times map to each other, refer to this diagram from the Extracellular Electrophysiology Tutorial.

Raw Acquisition Data

Each ALM-3 session is associated with a large number of raw voltage data grouped by trial ID. To map this data to NWB, each trial is created as its own ElectricalSeries object under the name 'trial n' where 'n' is the trial ID. The trials are then linked to the trials dynamic table for easy referencing.

fprintf('Processing Raw Acquisition Data from `%s` (will take a while)\n', rawdata_loc);
@@ -1023,7 +1023,7 @@ 

Export

end %% % To better understand how |spike_times_index| and |spike_times| map to each other, refer to -% from the Extracellular Electrophysiology Tutorial. %% Raw Acquisition Data diff --git a/tutorials/html/dataPipe.html b/docs/source/_static/html/tutorials/dataPipe.html similarity index 100% rename from tutorials/html/dataPipe.html rename to docs/source/_static/html/tutorials/dataPipe.html diff --git a/tutorials/html/dimensionMapNoDataPipes.html b/docs/source/_static/html/tutorials/dimensionMapNoDataPipes.html similarity index 99% rename from tutorials/html/dimensionMapNoDataPipes.html rename to docs/source/_static/html/tutorials/dimensionMapNoDataPipes.html index 34e8d692..ccd7d571 100644 --- a/tutorials/html/dimensionMapNoDataPipes.html +++ b/docs/source/_static/html/tutorials/dimensionMapNoDataPipes.html @@ -89,4 +89,13 @@ % ##### SOURCE END ##### --> -
\ No newline at end of file +
+ \ No newline at end of file diff --git a/tutorials/html/dimensionMapWithDataPipes.html b/docs/source/_static/html/tutorials/dimensionMapWithDataPipes.html similarity index 99% rename from tutorials/html/dimensionMapWithDataPipes.html rename to docs/source/_static/html/tutorials/dimensionMapWithDataPipes.html index fb17536d..4fc3e014 100644 --- a/tutorials/html/dimensionMapWithDataPipes.html +++ b/docs/source/_static/html/tutorials/dimensionMapWithDataPipes.html @@ -107,4 +107,13 @@ % ##### SOURCE END ##### --> -
\ No newline at end of file +
+ \ No newline at end of file diff --git a/docs/source/_static/html/tutorials/dynamic_tables.html b/docs/source/_static/html/tutorials/dynamic_tables.html new file mode 100644 index 00000000..8abf43c9 --- /dev/null +++ b/docs/source/_static/html/tutorials/dynamic_tables.html @@ -0,0 +1,586 @@ + +DynamicTables Tutorial

DynamicTables Tutorial

This is a user guide to interacting with DynamicTable objects in MatNWB.

MatNWB Setup

Start by setting up your MATLAB workspace. The code below adds the directory containing the MatNWB package to the MATLAB search path. MatNWB works by automatically creating API classes based on a defined schema.
%{
path_to_matnwb = '~/Repositories/matnwb'; % change to your own path location
addpath(genpath(pwd));
%}

Constructing a table with initialized columns

The DynamicTable class represents a column-based table to which you can add custom columns. It consists of a description, a list of columns , and a list of row IDs. You can create a DynamicTable by first defining the VectorData objects that will make up the columns of the table. Each VectorData object must contain the same number of rows. A list of rows IDs may be passed to the DynamicTable using the id argument. Row IDs are a useful way to access row information independent of row location index. The list of row IDs must be cast as an ElementIdentifiers object before being passed to the DynamicTable object. If no value is passed to id, an ElementIdentifiers object with 0-indexed row IDs will be created for you automatically.
MATLAB Syntax Note: Using column vectors is crucial to properly build vectors and tables. When defining individual values, make sure to use semi-colon (;) instead of instead of comma (,) when defining the data fields of these.
col1 = types.hdmf_common.VectorData( ...
'description', 'column #1', ...
'data', [1;2] ...
);
 
col2 = types.hdmf_common.VectorData( ...
'description', 'column #2', ...
'data', {'a';'b'} ...
);
 
my_table = types.hdmf_common.DynamicTable( ...
'description', 'an example table', ...
'colnames', {'col1', 'col2'}, ...
'col1', col1, ...
'col2', col2, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0;1]) ... % 0-indexed, for compatibility with Python
);
my_table
my_table =
DynamicTable with properties: + + id: [1×1 types.hdmf_common.ElementIdentifiers] + colnames: {'col1' 'col2'} + description: 'an example table' + vectordata: [2×1 types.untyped.Set] +

Adding rows

You can add rows to an existing DynamicTable using the object's addRow method. One way of using this method is to pass in the names of columns as parameter names followed by the elements to append. The class of the elements of the column must match the elements to append.
my_table.addRow('col1', 3, 'col2', {'c'}, 'id', 2);

Adding columns

You can add new columns to an existing DynamicTable object using the addColumn method. One way of using this method is to pass in the names of each new column followed by the corresponding values for each new column. The height of the new columns must match the height of the table.
col3 = types.hdmf_common.VectorData('description', 'column #3', ...
'data', [100; 200; 300]);
col4 = types.hdmf_common.VectorData('description', 'column #4', ...
'data', {'a1'; 'b2'; 'c3'});
 
my_table.addColumn('col3', col3,'col4', col4);

Create MATLAB table and convert to dynamic table

As an alternative to building a dynamic table using the DynamicTable and VectorData data types, it is also possible to create a MATLAB table and convert it to a dynamic table. Lets create the same table as before, but using MATLAB's table class:
% Create a table with two variables (columns):
T = table([1;2], {'a';'b'}, 'VariableNames', {'col1', 'col2'});
T.Properties.VariableDescriptions = {'column #1', 'column #2'};

Adding rows

T(end+1, :) = {3, 'c'};

Adding variables (columns)

T = addvars(T, [100;200;300], 'NewVariableNames',{'col3'});
T.Properties.VariableDescriptions{3} = 'column #3';
 
% Alternatively, a new variable can be added directly using dot syntax.
T.col4 = {'a1'; 'b2'; 'c3'};
T.Properties.VariableDescriptions{4} = 'column #4';
T
T = 3×4 table
 col1col2col3col4
11'a'100'a1'
22'b'200'b2'
33'c'300'c3'

Convert to dynamic table

dynamic_table = util.table2nwb(T, 'A MATLAB table that was converted to a dynamic table')
dynamic_table =
DynamicTable with properties: + + id: [1×1 types.hdmf_common.ElementIdentifiers] + colnames: {'col1' 'col2' 'col3' 'col4'} + description: 'A MATLAB table that was converted to a dynamic table' + vectordata: [4×1 types.untyped.Set] +

Enumerated (categorical) data

EnumData is a special type of column for storing an enumerated data type. This way each unique value is stored once, and the data references those values by index. Using this method is more efficient than storing a single value many times, and has the advantage of communicating to downstream tools that the data is categorical in nature.

Warning Regarding EnumData

EnumData is currently an experimental feature and as such should not be used in a production environment.
CellTypeElements = types.hdmf_common.VectorData(...
'description', 'fixed set of elements referenced by cell_type' ...
, 'data', {'aa', 'bb', 'cc'} ... % the enumerated elements
);
CellType = types.hdmf_experimental.EnumData( ...
'description', 'this column holds categorical variables' ... % properties derived from VectorData
, 'data', [0, 1, 2, 1, 0] ... % zero-indexed offset to elements.
, 'elements', types.untyped.ObjectView(CellTypeElements) ...
);
 
MyTable = types.hdmf_common.DynamicTable('description', 'an example table');
MyTable.vectordata.set('cell_type_elements', CellTypeElements); % the *_elements format is required for compatibility with pynwb
MyTable.addColumn('cell_type', CellType);

Ragged array columns

A table column with a different number of elements for each row is called a "ragged array column." To define a table with a ragged array column, pass both the VectorData and the corresponding VectorIndex as columns of the DynamicTable object. The VectorData columns will contain the data values. The VectorIndex column serves to indicate how to arrange the data across rows. By convention the VectorIndex object corresponding to a particular column must have have the same name with the addition of the '_index' suffix.
Below, the VectorIndex values indicate to place the 1st to 3rd (inclusive) elements of the VectorData into the first row and 4th element into the second row. The resulting table will have the cell {'1a'; '1b'; '1c'} in the first row and the cell {'2a'} in the second row.
 
col1 = types.hdmf_common.VectorData( ...
'description', 'column #1', ...
'data', {'1a'; '1b'; '1c'; '2a'} ...
);
 
col1_index = types.hdmf_common.VectorIndex( ...
'description', 'column #1 index', ...
'target',types.untyped.ObjectView(col1), ... % object view of target column
'data', [3; 4] ...
);
 
table_ragged_col = types.hdmf_common.DynamicTable( ...
'description', 'an example table', ...
'colnames', {'col1'}, ...
'col1', col1, ...
'col1_index', col1_index, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0; 1]) ... % 0-indexed, for compatibility with Python
);

Adding ragged array rows

You can add a new row to the ragged array column. Under the hood, the addRow method will add the appropriate value to the VectorIndex column to maintain proper formatting.
table_ragged_col.addRow('col1', {'3a'; '3b'; '3c'}, 'id', 2);

Accessing row elements

You can access data from entire rows of a DynamicTable object by calling the getRow method for the corresponding object. You can supply either an individual row number or a list of row numbers.
my_table.getRow(1)
ans = 1×4 table
 col1col2col3col4
11'a'100'a1'
If you want to access values for just a subset of columns you can pass in the 'columns' argument along with a cell array with the desired column names
my_table.getRow(1:3, 'columns', {'col1'})
ans = 3×1 table
 col1
11
22
33
You can also access specific rows by their corresponding row ID's, if they have been defined, by supplying a 'true' Boolean to the 'useId' parameter
my_table.getRow(1, 'useId', true)
ans = 1×4 table
 col1col2col3col4
12'b'200'b2'
For a ragged array columns, the getRow method will return a cell with different number of elements for each row
table_ragged_col.getRow(1:2)
ans = 2×1 table
 col1
1[{'1a'};{'1b'};{'1c'}]
21×1 cell

Accessing column elements

To access all rows from a particular column use the .get method on the vectordata field of the DynamicTable object
 
my_table.vectordata.get('col2').data
ans = 3×1 cell
'a'
'b'
'c'

Referencing rows of other tables

You can create a column that references rows of other tables by adding a DynamicTableRegion object as a column of a DynamicTable. This is analogous to a foreign key in a relational database. The DynamicTableRegion class takes in an ObjectView object as argument. ObjectView objects create links from one object type referencing another.
dtr_col = types.hdmf_common.DynamicTableRegion( ...
'description', 'references multiple rows of earlier table', ...
'data', [0; 1; 1; 0], ... # 0-indexed
'table',types.untyped.ObjectView(my_table) ... % object view of target table
);
 
data_col = types.hdmf_common.VectorData( ...
'description', 'data column', ...
'data', {'a'; 'b'; 'c'; 'd'} ...
);
 
dtr_table = types.hdmf_common.DynamicTable( ...
'description', 'test table with DynamicTableRegion', ...
'colnames', {'data_col', 'dtr_col'}, ...
'dtr_col', dtr_col, ...
'data_col',data_col, ...
'id',types.hdmf_common.ElementIdentifiers('data', [0; 1; 2; 3]) ...
);

Converting a DynamicTable to a MATLAB table

You can convert a DynamicTable object to a MATLAB table by making use of the object's toTable method. This is a useful way to view the whole table in a human-readable format.
my_table.toTable()
ans = 3×5 table
 idcol1col2col3col4
101'a'100'a1'
212'b'200'b2'
323'c'300'c3'
When the DynamicTable object contains a column that references other tables, you can pass in a Boolean to indicate whether to include just the row indices of the referenced table. Passing in false will result in inclusion of the referenced rows as nested tables.
dtr_table.toTable(false)
ans = 4×3 table
 iddata_coldtr_col
10'a'1×4 table
21'b'1×4 table
32'c'1×4 table
43'd'1×4 table

Creating an expandable table

When using the default HDF5 backend, each column of these tables is an HDF5 Dataset, which by default are set to an unchangeable size. This means that once a file is written, it is not possible to add a new row. If you want to be able to save this file, load it, and add more rows to the table, you will need to set this up when you create the VectorData and ElementIdentifiers columns of a DynamicTable. Specifically, you must wrap the column data with a DataPipe object. The DataPipe class takes in maxSize and axis as arguments to indicate the maximum desired size for each axis and the axis to which to append to, respectively. For example, creating a DataPipe object with a maxSize value equal to [Inf, 1] indicates that the number of rows may increase indifinetely. In contrast, setting maxSize equal to [8, 1] would allow the column to grow to a maximum height of 8.
% create NwbFile object with required fields
file= NwbFile( ...
'session_start_time', datetime('2021-01-01 00:00:00', 'TimeZone', 'local'), ...
'identifier', 'ident1', ...
'session_description', 'ExpandableTableTutorial' ...
);
 
% create VectorData objects with DataPipe objects
start_time_exp = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', [1, 2], ... # data must be numerical
'maxSize', Inf ...
) ...
);
 
stop_time_exp = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', [2, 3], ... #data must be numerical
'maxSize', Inf ...
) ...
);
 
random_exp = types.hdmf_common.VectorData( ...
'description', 'random data column', ...
'data', types.untyped.DataPipe( ...
'data', rand(5, 2), ... #data must be numerical
'maxSize', [5, Inf], ...
'axis', 2 ...
) ...
);
 
ids_exp = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', int32([0; 1]), ... # data must be numerical
'maxSize', Inf ...
) ...
);
% create expandable table
colnames = {'start_time', 'stop_time', 'randomvalues'};
file.intervals_trials = types.core.TimeIntervals( ...
'description', 'test expdandable dynamic table', ...
'colnames', colnames, ...
'start_time', start_time_exp, ...
'stop_time', stop_time_exp, ...
'randomvalues', random_exp, ...
'id', ids_exp ...
);
% export file
nwbExport(file, 'expandableTableTestFile.nwb');
Now, you can read in the file, add more rows, and save again to file
readFile = nwbRead('expandableTableTestFile.nwb', 'ignorecache');
readFile.intervals_trials.addRow( ...
'start_time', 3, ...
'stop_time', 4, ...
'randomvalues', rand(5,1), ...
'id', 2 ...
)
nwbExport(readFile, 'expandableTableTestFile.nwb');
Note: DataPipe objects change how the dimension of the datasets for each column map onto the shape of HDF5 datasets. See README for more details.

Multidimensional Columns

The order of dimensions of multidimensional columns in MatNWB is reversed relative to the Python HDMF package (see README for detailed explanation). Therefore, the height of a multidimensional column belonging to a DynamicTable object is defined by the shape of its last dimension. A valid DynamicTable must have matched height across columns.

Constructing multidimensional columns

% Define 1D column
simple_col = types.hdmf_common.VectorData( ...
'description', '1D column',...
'data', rand(10,1) ...
);
% Define ND column
multi_col = types.hdmf_common.VectorData( ...
'description', 'multidimensional column',...
'data', rand(3,2,10) ...
);
% construct table
multi_dim_table = types.hdmf_common.DynamicTable( ...
'description','test table', ...
'colnames', {'simple','multi'}, ...
'simple', simple_col, ...
'multi', multi_col, ...
'id', types.hdmf_common.ElementIdentifiers('data', (0:9)') ... % 0-indexed, for compatibility with Python
);
 

Multidimensional ragged array columns

DynamicTable objects with multidimensional ragged array columns can be constructed by passing in the corresponding VectorIndex column
% Define column with data
multi_ragged_col = types.hdmf_common.VectorData( ...
'description', 'multidimensional ragged array column',...
'data', rand(2,3,5) ...
);
% Define column with VectorIndex
multi_ragged_index = types.hdmf_common.VectorIndex( ...
'description', 'index to multi_ragged_col', ...
'target', types.untyped.ObjectView(multi_ragged_col),'data', [2; 3; 5] ...
);
 
multi_ragged_table = types.hdmf_common.DynamicTable( ...
'description','test table', ...
'colnames', {'multi_ragged'}, ...
'multi_ragged', multi_ragged_col, ...
'multi_ragged_index', multi_ragged_index, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0; 1; 2]) ... % 0-indexed, for compatibility with Python
);

Adding rows to multidimensional array columns

DynamicTable objects with multidimensional array columns can also be constructed by adding a single row at a time. This method makes use of DataPipe objects due to the fact that MATLAB doesn't support singleton dimensions for arrays with more than 2 dimensions. The code block below demonstrates how to build a DynamicTable object with a mutidimensional raaged array column in this manner.
% Create file
file = NwbFile( ...
'session_start_time', datetime('2021-01-01 00:00:00', 'TimeZone', 'local'), ...
'identifier', 'ident1', ...
'session_description', 'test_file' ...
);
 
% Define Vector Data Objects with first row of table
start_time_exp = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', 1, ...
'maxSize', Inf ...
) ...
);
stop_time_exp = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', 10, ...
'maxSize', Inf ...
) ...
);
random_exp = types.hdmf_common.VectorData( ...
'description', 'random data column', ...
'data', types.untyped.DataPipe( ...
'data', rand(3,2,5), ... #random data
'maxSize', [3, 2, Inf], ...
'axis', 3 ...
) ...
);
random_exp_index = types.hdmf_common.VectorIndex( ...
'description', 'index to random data column', ...
'target',types.untyped.ObjectView(random_exp), ...
'data', types.untyped.DataPipe( ...
'data', uint64(5), ...
'maxSize', Inf ...
) ...
);
ids_exp = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', int64(0), ... # data must be numerical
'maxSize', Inf ...
) ...
);
% Create expandable table
colnames = {'start_time', 'stop_time', 'randomvalues'};
file.intervals_trials = types.core.TimeIntervals( ...
'description', 'test expdandable dynamic table', ...
'colnames', colnames, ...
'start_time', start_time_exp, ...
'stop_time', stop_time_exp, ...
'randomvalues', random_exp, ...
'randomvalues_index', random_exp_index, ...
'id', ids_exp ...
);
% Export file
nwbExport(file, 'multiRaggedExpandableTableTest.nwb');
% Read in file
read_file = nwbRead('multiRaggedExpandableTableTest.nwb', 'ignorecache');
% add individual rows
read_file.intervals_trials.addRow( ...
'start_time', 2, ...
'stop_time', 20, ...
'randomvalues', rand(3,2,6), ...
'id', 1 ...
);
read_file.intervals_trials.addRow( ...
'start_time', 3, ...
'stop_time', 30, ...
'randomvalues', rand(3,2,3), ...
'id', 2 ...
);
read_file.intervals_trials.addRow( ...
'start_time', 4, ...
'stop_time', 40, ...
'randomvalues', rand(3,2,8), ...
'id', 3 ...
);
 

Learn More!

Python Tutorial

+
+ +
+ \ No newline at end of file diff --git a/tutorials/html/dynamically_loaded_filters.html b/docs/source/_static/html/tutorials/dynamically_loaded_filters.html similarity index 99% rename from tutorials/html/dynamically_loaded_filters.html rename to docs/source/_static/html/tutorials/dynamically_loaded_filters.html index 7250e315..9948aa9c 100644 --- a/tutorials/html/dynamically_loaded_filters.html +++ b/docs/source/_static/html/tutorials/dynamically_loaded_filters.html @@ -138,4 +138,13 @@ % ##### SOURCE END ##### --> -
\ No newline at end of file +
+ \ No newline at end of file diff --git a/docs/source/_static/html/tutorials/ecephys.html b/docs/source/_static/html/tutorials/ecephys.html new file mode 100644 index 00000000..0f73bbd1 --- /dev/null +++ b/docs/source/_static/html/tutorials/ecephys.html @@ -0,0 +1,1511 @@ + +Neurodata Without Borders Extracellular Electrophysiology Tutorial

Neurodata Without Borders Extracellular Electrophysiology Tutorial

About This Tutorial

This tutorial describes storage of hypothetical data from extracellular electrophysiology experiments in NWB for the following data categories:
  • Raw voltage recording
  • Local field potential (LFP) and filtered electrical signals
  • Spike times

Before You Begin

It is recommended to first work through the Introduction to MatNWB tutorial, which demonstrates installing MatNWB and creating an NWB file with subject information, animal position, and trials, as well as writing and reading NWB files in MATLAB.
Important: The dimensions of timeseries data in MatNWB should be defined in the opposite order of how it is defined in the nwb-schemas. In NWB, time is always stored in the first dimension of the data, whereas in MatNWB time should be stored in the last dimension of the data. This is explained in more detail here: MatNWB <-> HDF5 Dimension Mapping.

Setting up the NWB File

An NWB file represents a single session of an experiment. Each file must have a session_description, identifier, and session_start_time. Create a new NWBFile object these required fields along with any additional metadata. In MatNWB, arguments are specified using MATLAB's keyword argument pair convention, where each argument name is followed by its value.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'timestamps_reference_time', datetime(2018, 4, 25, 3, 0, 45, 'TimeZone', 'local'), ...
'general_experimenter', 'Last Name, First Name', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', {'DOI:10.1016/j.neuron.2016.12.011'}); % optional
nwb
nwb =
NwbFile with properties: + + nwb_version: '2.8.0' + file_create_date: [] + identifier: 'Mouse5_Day3' + session_description: 'mouse in open exploration' + session_start_time: {[2018-04-25T02:30:03.000000+02:00]} + timestamps_reference_time: {[2018-04-25T03:00:45.000000+02:00]} + acquisition: [0×1 types.untyped.Set] + analysis: [0×1 types.untyped.Set] + general: [0×1 types.untyped.Set] + general_data_collection: '' + general_devices: [0×1 types.untyped.Set] + general_experiment_description: '' + general_experimenter: 'Last Name, First Name' + general_extracellular_ephys: [0×1 types.untyped.Set] + general_extracellular_ephys_electrodes: [] + general_institution: 'University of My Institution' + general_intracellular_ephys: [0×1 types.untyped.Set] + general_intracellular_ephys_experimental_conditions: [] + general_intracellular_ephys_filtering: '' + general_intracellular_ephys_intracellular_recordings: [] + general_intracellular_ephys_repetitions: [] + general_intracellular_ephys_sequential_recordings: [] + general_intracellular_ephys_simultaneous_recordings: [] + general_intracellular_ephys_sweep_table: [] + general_keywords: '' + general_lab: '' + general_notes: '' + general_optogenetics: [0×1 types.untyped.Set] + general_optophysiology: [0×1 types.untyped.Set] + general_pharmacology: '' + general_protocol: '' + general_related_publications: {'DOI:10.1016/j.neuron.2016.12.011'} + general_session_id: 'session_1234' + general_slices: '' + general_source_script: '' + general_source_script_file_name: '' + general_stimulus: '' + general_subject: [] + general_surgery: '' + general_virus: '' + general_was_generated_by: '' + intervals: [0×1 types.untyped.Set] + intervals_epochs: [] + intervals_invalid_times: [] + intervals_trials: [] + processing: [0×1 types.untyped.Set] + scratch: [0×1 types.untyped.Set] + stimulus_presentation: [0×1 types.untyped.Set] + stimulus_templates: [0×1 types.untyped.Set] + units: [] +

Electrode Information

In order to store extracellular electrophysiology data, you first must create an electrodes table describing the electrodes that generated this data. Extracellular electrodes are stored in an electrodes table, which is also a DynamicTable. electrodes has several required fields: x, y, z, impedance, location, filtering, and electrode_group.
The electrodes table references a required ElectrodeGroup, which is used to represent a group of electrodes. Before creating an ElectrodeGroup, you must define a Device object. The fields description, manufacturer, model_number, model_name, and serial_number are optional, but recommended.
device = types.core.Device(...
'description', 'A 12-channel array with 4 shanks and 3 channels per shank', ...
'manufacturer', 'Array Technologies', ...
'model_number', 'PRB_1_4_0480_123', ...
'model_name', 'Neurovoxels 0.99', ...
'serial_number', '1234567890' ...
);
 
% Add device to nwb object
nwb.general_devices.set('array', device);

Electrodes Table

Since this is a DynamicTable, we can add additional metadata fields. We will be adding a "label" column to the table.
numShanks = 4;
numChannelsPerShank = 3;
numChannels = numShanks * numChannelsPerShank;
 
electrodesDynamicTable = types.hdmf_common.DynamicTable(...
'colnames', {'location', 'group', 'group_name', 'label'}, ...
'description', 'all electrodes');
 
for iShank = 1:numShanks
shankGroupName = sprintf('shank%d', iShank);
electrodeGroup = types.core.ElectrodeGroup( ...
'description', sprintf('electrode group for %s', shankGroupName), ...
'location', 'brain area', ...
'device', types.untyped.SoftLink(device) ...
);
nwb.general_extracellular_ephys.set(shankGroupName, electrodeGroup);
for iElectrode = 1:numChannelsPerShank
electrodesDynamicTable.addRow( ...
'location', 'unknown', ...
'group', types.untyped.ObjectView(electrodeGroup), ...
'group_name', shankGroupName, ...
'label', sprintf('%s-electrode%d', shankGroupName, iElectrode));
end
end
electrodesDynamicTable.toTable() % Display the table
ans = 12×5 table
 idlocationgroupgroup_namelabel
10'unknown'1×1 ObjectView'shank1''shank1-electrode1'
21'unknown'1×1 ObjectView'shank1''shank1-electrode2'
32'unknown'1×1 ObjectView'shank1''shank1-electrode3'
43'unknown'1×1 ObjectView'shank2''shank2-electrode1'
54'unknown'1×1 ObjectView'shank2''shank2-electrode2'
65'unknown'1×1 ObjectView'shank2''shank2-electrode3'
76'unknown'1×1 ObjectView'shank3''shank3-electrode1'
87'unknown'1×1 ObjectView'shank3''shank3-electrode2'
98'unknown'1×1 ObjectView'shank3''shank3-electrode3'
109'unknown'1×1 ObjectView'shank4''shank4-electrode1'
1110'unknown'1×1 ObjectView'shank4''shank4-electrode2'
1211'unknown'1×1 ObjectView'shank4''shank4-electrode3'
nwb.general_extracellular_ephys_electrodes = electrodesDynamicTable;

Links

In the above loop, we create ElectrodeGroup objects. The electrodes table then uses an ObjectView in each row to link to the corresponding ElectrodeGroup object. An ObjectView is a construct that enables linking one neurodata type to another, allowing a neurodata type to reference another within the NWB file.

Recorded Extracellular Signals

Voltage data are stored using the ElectricalSeries class, a subclass of the TimeSeries class specialized for voltage data.

Referencing Electrodes

In order to create our ElectricalSeries object, we first need to reference a set of rows in the electrodes table to indicate which electrode (channel) each entry in the electrical series were recorded from. We will do this by creating a DynamicTableRegion, which is a type of link that allows you to reference specific rows of a DynamicTable, such as the electrodes table, using row indices.
Create a DynamicTableRegion that references all rows of the electrodes table.
electrode_table_region = types.hdmf_common.DynamicTableRegion( ...
'table', types.untyped.ObjectView(electrodesDynamicTable), ...
'description', 'all electrodes', ...
'data', (0:length(electrodesDynamicTable.id.data)-1)');

Raw Voltage Data

Now create an ElectricalSeries object to hold acquisition data collected during the experiment.
raw_electrical_series = types.core.ElectricalSeries( ...
'starting_time', 0.0, ... % seconds
'starting_time_rate', 30000., ... % Hz
'data', randn(numChannels, 3000), ... % nChannels x nTime
'electrodes', electrode_table_region, ...
'data_unit', 'volts');
This is the voltage data recorded directly from our electrodes, so it goes in the acquisition group.
nwb.acquisition.set('ElectricalSeries', raw_electrical_series);

Processed Extracellular Electrical Signals

LFP

LFP refers to data that has been low-pass filtered, typically below 300 Hz. This data may also be downsampled. Because it is filtered and potentially resampled, it is categorized as processed data. LFP data would also be stored in an ElectricalSeries. To help data analysis and visualization tools know that this ElectricalSeries object represents LFP data, we store it inside an LFP object and then place the LFP object in a ProcessingModule named 'ecephys'. This is analogous to how we stored the SpatialSeries object inside of a Position object and stored the Position object in a ProcessingModule named 'behavior' in the behavior tutorial
lfp_electrical_series = types.core.ElectricalSeries( ...
'starting_time', 0.0, ... % seconds
'starting_time_rate', 1000., ... % Hz
'data', randn(numChannels, 100), ... nChannels x nTime
'filtering', 'Low-pass filter at 300 Hz', ...
'electrodes', electrode_table_region, ...
'data_unit', 'volts');
 
lfp = types.core.LFP('ElectricalSeries', lfp_electrical_series);
 
ecephys_module = types.core.ProcessingModule(...
'description', 'extracellular electrophysiology');
 
ecephys_module.nwbdatainterface.set('LFP', lfp);
nwb.processing.set('ecephys', ecephys_module);

Other Types of Filtered Electrical Signals

If your acquired data is filtered for frequency ranges other than LFP—such as Gamma or Theta—you can store the result in an ElectricalSeries and encapsulate it within a FilteredEphys object instead of the LFP object.
% Generate filtered data
filtered_data = randn(50, 12); % 50 time points, 12 channels
filtered_data = permute(filtered_data, [2, 1]); % permute timeseries for matnwb
 
% Create an ElectricalSeries object
filtered_electrical_series = types.core.ElectricalSeries( ...
'description', 'Data filtered in the theta range', ...
'data', filtered_data, ...
'electrodes', electrode_table_region, ...
'filtering', 'Band-pass filtered between 4 and 8 Hz', ...
'starting_time', 0.0, ...
'starting_time_rate', 200.0 ...
);
 
% Create a FilteredEphys object and add the filtered electrical series
filtered_ephys = types.core.FilteredEphys();
filtered_ephys.electricalseries.set('FilteredElectricalSeries', filtered_electrical_series);
 
% Add the FilteredEphys object to the ecephys module
ecephys_module.nwbdatainterface.set('FilteredEphys', filtered_ephys);

Decomposition of LFP Data into Frequency Bands

In some cases, you may want to further process the LFP data and decompose the signal into different frequency bands for additional downstream analyses. You can then store the processed data from these spectral analyses using a DecompositionSeries object. This object allows you to include metadata about the frequency bands and metric used (e.g., power, phase, amplitude), as well as link the decomposed data to the original TimeSeries signal the data was derived from.
In this tutorial, the examples for FilteredEphys and DecompositionSeries may appear similar. However, the key difference is that DecompositionSeries is specialized for storing the results of spectral analyses of timeseries data in general, whereas FilteredEphys is defined specifically as a container for filtered electrical signals.
Note: When adding data to a DecompositionSeries, the data argument is assumed to be 3D where the first dimension is time, the second dimension is channels, and the third dimension is bands. As mentioned in the beginning of this tutorial, in MatNWB the data needs to be permuted because the dimensions are written to file in reverse order (See the dimensionMapNoDataPipes tutorial)
% Define the frequency bands of interest (in Hz):
band_names = {'theta'; 'beta'; 'gamma'};
band_mean = [8; 21; 55];
band_stdev = [2; 4.5; 12.5];
band_limits = [band_mean - 2*band_stdev, band_mean + 2*band_stdev];
 
% The bands should be added to the DecompositionSeries as a dynamic table
bands = table(band_names, band_mean, band_stdev, band_limits, ...
'VariableNames', {'band_name', 'band_mean', 'band_stdev', 'band_limits'})
bands = 3×4 table
 band_nameband_meanband_stdevband_limits
12
1'theta'82412
2'beta'214.50001230
3'gamma'5512.50003080
 
bands = util.table2nwb( bands );
 
% Generate random phase data for the demonstration.
phase_data = randn(50, 12, numel(band_names)); % 50 samples, 12 channels, 3 frequency bands
phase_data = permute(phase_data, [3,2,1]); % See dimensionMapNoDataPipes tutorial
 
decomp_series = types.core.DecompositionSeries(...
'data', phase_data, ...
'bands', bands, ...
'metric', 'phase', ...
'starting_time', 0.0, ... % seconds
'starting_time_rate', 1000.0, ... % Hz
'source_channels', electrode_table_region, ...
'source_timeseries', lfp_electrical_series);
 
% Add decomposition series to ecephys module
ecephys_module.nwbdatainterface.set('theta', decomp_series);

Spike Times and Extracellular Events

Sorted Spike Times

Spike times are stored in a Units table, a specialization of the DynamicTable class. The default Units table is located at /units in the HDF5 file. You can add columns to the Units table just like you did for electrodes and trials (see convertTrials). Here, we generate some random spike data and populate the table.
num_cells = 10;
spikes = cell(1, num_cells);
for iShank = 1:num_cells
spikes{iShank} = rand(1, randi([16, 28]));
end
spikes
spikes = 1×10 cell
 12345678910
11×28 double1×23 double1×22 double1×18 double1×28 double1×27 double1×16 double1×20 double1×16 double1×25 double

Ragged Arrays

Spike times are an example of a ragged array- it's like a matrix, but each row has a different number of elements. We can represent this type of data as an indexed column of the Units table. These indexed columns have two components, the VectorData object that holds the data and the VectorIndex object that holds the indices in the vector that indicate the row breaks. You can use the convenience function util.create_indexed_column to create these objects. For more information about ragged arrays, we refer you to the "Ragged Array Columns" section of the dynamic table tutorial.
[spike_times_vector, spike_times_index] = util.create_indexed_column(spikes);
 
nwb.units = types.core.Units( ...
'colnames', {'spike_times'}, ...
'description', 'units table', ...
'spike_times', spike_times_vector, ...
'spike_times_index', spike_times_index ...
);
 
nwb.units.toTable
ans = 10×2 table
 idspike_times
1128×1 double
2223×1 double
3322×1 double
4418×1 double
5528×1 double
6627×1 double
7716×1 double
8820×1 double
9916×1 double
101025×1 double

Unsorted Spike Times

While the Units table is used to store spike times and waveform data for spike-sorted, single-unit activity, you may also want to store spike times and waveform snippets of unsorted spiking activity. This is useful for recording multi-unit activity detected via threshold crossings during data acquisition. Such information can be stored using SpikeEventSeries objects.
% In the SpikeEventSeries the dimensions should be ordered as
% [num_events, num_channels, num_samples].
% Define spike snippets: 20 events, 3 channels, 40 samples per event.
spike_snippets = rand(20, 3, 40);
% Permute spike snippets (See dimensionMapNoDataPipes tutorial)
spike_snippets = permute(spike_snippets, [3,2,1])
spike_snippets =
spike_snippets(:,:,1) = + + 0.7721 0.4186 0.8978 + 0.2509 0.4331 0.9201 + 0.4121 0.8097 0.5474 + 0.6766 0.6901 0.7003 + 0.9761 0.0974 0.7282 + 0.9363 0.0973 0.9006 + 0.7959 0.1128 0.4305 + 0.1774 0.1088 0.9861 + 0.4365 0.3725 0.8407 + 0.1547 0.5183 0.0451 + 0.8014 0.2587 0.8502 + 0.1367 0.6650 0.3467 + 0.8221 0.2554 0.7571 + 0.5777 0.8543 0.4088 + 0.3711 0.4353 0.8602 + 0.9710 0.6124 0.5794 + 0.8179 0.9439 0.3879 + 0.7157 0.2269 0.2449 + 0.4963 0.7104 0.1879 + 0.3320 0.9611 0.5329 + 0.8438 0.9800 0.8357 + 0.9069 0.5102 0.1159 + 0.8225 0.6475 0.9629 + 0.7529 0.0426 0.7078 + 0.3720 0.3672 0.2116 + 0.2290 0.4065 0.6740 + 0.5090 0.8042 0.8171 + 0.4314 0.4390 0.1004 + 0.8181 0.3190 0.0069 + 0.4683 0.6322 0.5789 + 0.9492 0.2342 0.5573 + 0.2321 0.0714 0.9012 + 0.7628 0.1412 0.7725 + 0.1994 0.3482 0.5344 + 0.6254 0.3642 0.8617 + 0.1393 0.3989 0.7346 + 0.8227 0.1083 0.3425 + 0.6131 0.7240 0.7850 + 0.1566 0.1857 0.1532 + 0.6505 0.7560 0.2314 + + +spike_snippets(:,:,2) = + + 0.9695 0.1732 0.5344 + 0.6623 0.7471 0.8938 + 0.1626 0.2299 0.4142 + 0.6077 0.9556 0.6919 + 0.7354 0.2935 0.7654 + 0.0634 0.6077 0.9384 + 0.8509 0.0279 0.4716 + 0.8870 0.5853 0.2144 + 0.9717 0.5542 0.3452 + 0.5051 0.5784 0.1091 + 0.0158 0.1732 0.0555 + 0.6389 0.8657 0.9876 + 0.7511 0.7091 0.2217 + 0.2962 0.3779 0.0967 + 0.7659 0.5798 0.4302 + 0.9324 0.6246 0.7090 + 0.6582 0.7142 0.4316 + 0.2455 0.5363 0.3668 + 0.2971 0.1588 0.1406 + 0.3620 0.6395 0.5792 + 0.8477 0.4695 0.7759 + 0.2036 0.5324 0.3392 + 0.4071 0.9292 0.1885 + 0.1066 0.4432 0.1791 + 0.9378 0.5936 0.5793 + 0.6382 0.5387 0.3923 + 0.8702 0.7674 0.3439 + 0.1970 0.8585 0.3719 + 0.7975 0.6273 0.2213 + 0.6131 0.2117 0.1478 + 0.6473 0.2089 0.2147 + 0.4371 0.2121 0.8549 + 0.6304 0.3877 0.8218 + 0.4871 0.4665 0.6263 + 0.3671 0.7630 0.4288 + 0.2386 0.8559 0.8007 + 0.4337 0.3558 0.8540 + 0.4894 0.2612 0.6366 + 0.5999 0.2971 0.6239 + 0.8697 0.4717 0.7815 + + +spike_snippets(:,:,3) = + + 0.3588 0.1211 0.9987 + 0.8300 0.9808 0.0090 + 0.2065 0.4420 0.4541 + 0.5611 0.7314 0.0613 + 0.9302 0.6570 0.5520 + 0.0903 0.6504 0.6132 + 0.1376 0.3508 0.6342 + 0.3862 0.4969 0.1280 + 0.1578 0.2955 0.8778 + 0.7907 0.9602 0.9340 + 0.5777 0.1254 0.6656 + 0.0900 0.3439 0.9697 + 0.5321 0.7743 0.7808 + 0.9919 0.8976 0.8620 + 0.3152 0.2576 0.2627 + 0.6401 0.1212 0.8892 + 0.0220 0.1373 0.8681 + 0.0807 0.6494 0.3066 + 0.7634 0.0180 0.9438 + 0.9919 0.9429 0.4263 + 0.3342 0.5051 0.5986 + 0.8017 0.8442 0.1287 + 0.1970 0.6680 0.7211 + 0.2527 0.9022 0.8359 + 0.7552 0.6376 0.1869 + 0.4545 0.9794 0.8209 + 0.4009 0.0518 0.2598 + 0.4646 0.2514 0.8992 + 0.5916 0.9837 0.9105 + 0.1326 0.3417 0.1438 + 0.1049 0.0281 0.9576 + 0.3636 0.8688 0.0136 + 0.6470 0.3291 0.6816 + 0.2676 0.8251 0.1740 + 0.4727 0.0489 0.9707 + 0.8001 0.1857 0.4514 + 0.0575 0.7207 0.8015 + 0.6950 0.6518 0.4612 + 0.7833 0.0853 0.8534 + 0.8640 0.7798 0.3466 + + +spike_snippets(:,:,4) = + + 0.5190 0.4652 0.9117 + 0.9820 0.5477 0.8661 + 0.8221 0.7385 0.2969 + 0.5053 0.5576 0.9185 + 0.7070 0.4797 0.2268 + 0.3946 0.1163 0.4851 + 0.8118 0.4648 0.0197 + 0.4436 0.5845 0.2776 + 0.9902 0.9564 0.6790 + 0.0180 0.3591 0.2120 + 0.4468 0.0360 0.7327 + 0.7436 0.8743 0.4325 + 0.5001 0.5444 0.3469 + 0.5002 0.6822 0.7366 + 0.1508 0.0696 0.5618 + 0.8920 0.1665 0.5807 + 0.8081 0.8589 0.5380 + 0.9532 0.7052 0.5562 + 0.5963 0.4803 0.6848 + 0.6637 0.5375 0.7669 + 0.8935 0.8085 0.9525 + 0.9802 0.2312 0.1005 + 0.9068 0.1864 0.6404 + 0.6853 0.1881 0.9801 + 0.7686 0.5867 0.9409 + 0.3720 0.5459 0.6993 + 0.4141 0.9023 0.2465 + 0.7739 0.6608 0.4812 + 0.8826 0.9172 0.9408 + 0.6557 0.2082 0.4705 + 0.4600 0.1433 0.6470 + 0.3906 0.1515 0.8750 + 0.0211 0.0336 0.0403 + 0.5329 0.5357 0.3557 + 0.3519 0.7119 0.8132 + 0.9618 0.1229 0.5169 + 0.9215 0.8680 0.1022 + 0.7034 0.7315 0.1086 + 0.6164 0.9366 0.6446 + 0.0711 0.2445 0.1678 + + +spike_snippets(:,:,5) = + + 0.4835 0.0328 0.5219 + 0.6424 0.2738 0.4422 + 0.7522 0.4538 0.3755 + 0.7407 0.6156 0.1790 + 0.7932 0.6369 0.5384 + 0.6717 0.0954 0.2666 + 0.8996 0.9500 0.6566 + 0.0831 0.1622 0.7042 + 0.9831 0.4238 0.3342 + 0.3185 0.5218 0.8921 + 0.0327 0.9415 0.1419 + 0.7818 0.1318 0.9903 + 0.1100 0.6551 0.7753 + 0.1357 0.8802 0.9615 + 0.9879 0.0733 0.1277 + 0.9406 0.5890 0.0671 + 0.4482 0.4158 0.9352 + 0.1918 0.9572 0.6173 + 0.6702 0.9769 0.1844 + 0.0942 0.6440 0.7523 + 0.2489 0.4720 0.2941 + 0.8470 0.4465 0.1718 + 0.1782 0.2288 0.8664 + 0.2979 0.1514 0.0458 + 0.8978 0.1270 0.9475 + 0.8018 0.9304 0.4932 + 0.3104 0.9120 0.3437 + 0.1854 0.0183 0.4310 + 0.6781 0.9980 0.2735 + 0.8526 0.3616 0.3118 + 0.9888 0.6143 0.2234 + 0.1099 0.2442 0.4467 + 0.9992 0.6049 0.9577 + 0.1614 0.9427 0.4898 + 0.0243 0.3328 0.6342 + 0.3731 0.4579 0.9302 + 0.3514 0.0350 0.0301 + 0.2559 0.0146 0.2057 + 0.5987 0.5850 0.8070 + 0.3078 0.4993 0.5962 + + +spike_snippets(:,:,6) = + + 0.5835 0.7159 0.6858 + 0.1192 0.2383 0.5421 + 0.5155 0.3675 0.2391 + 0.3666 0.3282 0.5572 + 0.6990 0.6003 0.0431 + 0.0106 0.9886 0.9999 + 0.6870 0.9667 0.3067 + 0.3314 0.9246 0.3287 + 0.0350 0.3158 0.7143 + 0.6168 0.6192 0.3693 + 0.7161 0.5301 0.5927 + 0.9667 0.4975 0.6749 + 0.0790 0.3932 0.5059 + 0.6931 0.6493 0.9192 + 0.6162 0.0456 0.2967 + 0.0791 0.5421 0.2839 + 0.9582 0.9069 0.6259 + 0.5587 0.1295 0.1523 + 0.4216 0.6110 0.0241 + 0.4773 0.6817 0.5740 + 0.3361 0.1979 0.0647 + 0.6081 0.4032 0.2000 + 0.0737 0.4601 0.9771 + 0.7252 0.2479 0.7913 + 0.6167 0.1663 0.0435 + 0.5104 0.0526 0.7786 + 0.2907 0.5705 0.8265 + 0.0340 0.1717 0.5243 + 0.2928 0.9593 0.6201 + 0.0825 0.1330 0.2239 + 0.3872 0.3581 0.5989 + 0.9539 0.8746 0.2466 + 0.0226 0.0406 0.4605 + 0.7314 0.1008 0.1228 + 0.1272 0.3724 0.8092 + 0.8165 0.5486 0.7942 + 0.3973 0.7083 0.2486 + 0.8008 0.9652 0.8924 + 0.1209 0.4326 0.5126 + 0.5949 0.9885 0.0777 + + +spike_snippets(:,:,7) = + + 0.2402 0.3840 0.2294 + 0.4960 0.1668 0.9483 + 0.3388 0.6786 0.3371 + 0.1247 0.9403 0.5945 + 0.1136 0.7213 0.5830 + 0.5706 0.8923 0.1571 + 0.6498 0.1729 0.8972 + 0.7809 0.7985 0.0120 + 0.4727 0.1768 0.1935 + 0.3109 0.7895 0.3698 + 0.1880 0.7166 0.6566 + 0.1663 0.0619 0.1601 + 0.6078 0.5510 0.1970 + 0.1180 0.3554 0.8402 + 0.6362 0.2015 0.9043 + 0.3455 0.2844 0.0373 + 0.4768 0.6425 0.9056 + 0.6541 0.2919 0.6165 + 0.6365 0.1238 0.2479 + 0.5136 0.1844 0.7320 + 0.9665 0.9502 0.5388 + 0.7075 0.9260 0.9158 + 0.1525 0.6782 0.3070 + 0.3347 0.9773 0.0185 + 0.6162 0.8698 0.4987 + 0.8452 0.5602 0.1158 + 0.0647 0.9263 0.6350 + 0.8052 0.4258 0.5458 + 0.8296 0.1281 0.3709 + 0.2367 0.0609 0.3178 + 0.2247 0.4413 0.6661 + 0.3137 0.4710 0.4002 + 0.9119 0.6439 0.5994 + 0.2734 0.0377 0.3484 + 0.8307 0.6053 0.4887 + 0.8001 0.1835 0.9104 + 0.9003 0.4095 0.0460 + 0.8328 0.4488 0.8778 + 0.1542 0.4411 0.4820 + 0.6467 0.7138 0.5979 + + +spike_snippets(:,:,8) = + + 0.8039 0.5848 0.2841 + 0.6827 0.3689 0.9646 + 0.0068 0.4706 0.4700 + 0.3528 0.6549 0.9076 + 0.2998 0.9232 0.1803 + 0.9469 0.8678 0.0833 + 0.2526 0.6178 0.7043 + 0.8268 0.6010 0.8305 + 0.2687 0.3750 0.5410 + 0.1668 0.5890 0.1321 + 0.2874 0.1894 0.8432 + 0.2627 0.3684 0.5913 + 0.9477 0.7476 0.6643 + 0.7862 0.7575 0.8948 + 0.3577 0.3033 0.1181 + 0.1233 0.0432 0.5606 + 0.1443 0.9493 0.4230 + 0.8459 0.7458 0.6426 + 0.5382 0.2976 0.2669 + 0.0923 0.5977 0.2170 + 0.9287 0.6640 0.0482 + 0.8206 0.9502 0.7201 + 0.2658 0.7481 0.9262 + 0.5418 0.6531 0.5246 + 0.8204 0.5075 0.7395 + 0.2696 0.6460 0.3597 + 0.3633 0.6799 0.5031 + 0.9011 0.7107 0.5892 + 0.9868 0.6095 0.7154 + 0.9462 0.2481 0.8660 + 0.7485 0.2064 0.0998 + 0.0044 0.0623 0.8152 + 0.6489 0.4898 0.6279 + 0.8614 0.9987 0.1708 + 0.9175 0.0744 0.5148 + 0.7932 0.8818 0.1721 + 0.7683 0.4355 0.9893 + 0.1069 0.8551 0.7834 + 0.6245 0.6784 0.5023 + 0.0381 0.0421 0.8412 + + +spike_snippets(:,:,9) = + + 0.0068 0.2263 0.9575 + 0.8538 0.1684 0.6443 + 0.1683 0.0157 0.5606 + 0.0697 0.2022 0.4156 + 0.8439 0.1474 0.7335 + 0.1894 0.4362 0.6418 + 0.9030 0.8994 0.5759 + 0.0651 0.8944 0.8355 + 0.7388 0.5628 0.3552 + 0.9187 0.5454 0.6464 + 0.8893 0.3665 0.2839 + 0.4611 0.8456 0.1708 + 0.5304 0.6915 0.5782 + 0.2494 0.5182 0.3340 + 0.5138 0.7393 0.1579 + 0.2022 0.7730 0.2419 + 0.6547 0.1614 0.0474 + 0.5865 0.2035 0.3593 + 0.6296 0.7142 0.6756 + 0.8238 0.4622 0.3027 + 0.8721 0.7853 0.6325 + 0.9491 0.8374 0.6769 + 0.4838 0.9059 0.1728 + 0.4507 0.5741 0.3339 + 0.7504 0.2523 0.6357 + 0.6942 0.6148 0.7790 + 0.7014 0.2029 0.0002 + 0.9719 0.5250 0.4468 + 0.2977 0.8091 0.1645 + 0.7365 0.8042 0.7013 + 0.2915 0.5111 0.9938 + 0.8797 0.6186 0.2939 + 0.6103 0.7172 0.0858 + 0.5429 0.0941 0.9746 + 0.4412 0.0741 0.9773 + 0.7144 0.6230 0.2962 + 0.6672 0.9580 0.9226 + 0.2130 0.2999 0.6814 + 0.5844 0.2634 0.3168 + 0.1289 0.4455 0.7580 + + +spike_snippets(:,:,10) = + + 0.8819 0.2656 0.7507 + 0.1705 0.1237 0.2072 + 0.6225 0.6563 0.9892 + 0.3333 0.0519 0.0548 + 0.7013 0.4046 0.2652 + 0.6149 0.6955 0.9251 + 0.9978 0.0765 0.0319 + 0.9798 0.4794 0.9399 + 0.4983 0.3922 0.9067 + 0.7768 0.3711 0.7724 + 0.6156 0.7373 0.4134 + 0.3915 0.8870 0.3469 + 0.4070 0.6662 0.8365 + 0.6851 0.1816 0.8580 + 0.9570 0.6216 0.0252 + 0.4383 0.4624 0.3163 + 0.3690 0.1855 0.0724 + 0.3555 0.8590 0.0752 + 0.8086 0.1525 0.8663 + 0.3597 0.1014 0.1692 + 0.3840 0.4438 0.8963 + 0.7206 0.2219 0.0807 + 0.6785 0.6006 0.5347 + 0.6814 0.6818 0.6954 + 0.7595 0.9583 0.5766 + 0.7020 0.8970 0.9572 + 0.4366 0.2335 0.4942 + 0.4921 0.7993 0.0567 + 0.4341 0.4898 0.6123 + 0.4447 0.6111 0.3310 + 0.1887 0.3749 0.3132 + 0.2319 0.1533 0.3430 + 0.7788 0.7162 0.1792 + 0.5062 0.9377 0.4687 + 0.1196 0.5613 0.7435 + 0.3621 0.3364 0.4587 + 0.7488 0.5342 0.8091 + 0.0764 0.8270 0.2332 + 0.5028 0.0870 0.1637 + 0.9362 0.4987 0.2018 + + +spike_snippets(:,:,11) = + + 0.4228 0.6509 0.6980 + 0.7070 0.9063 0.6705 + 0.7610 0.5478 0.0392 + 0.3472 0.2063 0.1224 + 0.4090 0.8269 0.4980 + 0.7304 0.7053 0.9229 + 0.5260 0.1344 0.6305 + 0.1046 0.6975 0.1050 + 0.6983 0.5564 0.9857 + 0.3567 0.3748 0.5588 + 0.1095 0.2987 0.6222 + 0.3868 0.0384 0.6882 + 0.2195 0.1461 0.8316 + 0.1047 0.5459 0.2325 + 0.9128 0.0031 0.4694 + 0.9634 0.9757 0.9275 + 0.5506 0.1164 0.8131 + 0.5104 0.3399 0.3438 + 0.7273 0.1025 0.0206 + 0.8305 0.4141 0.4839 + 0.0225 0.5427 0.9719 + 0.1191 0.0805 0.9831 + 0.0589 0.7804 0.4155 + 0.4501 0.7773 0.9205 + 0.3881 0.1957 0.1995 + 0.8284 0.0118 0.1996 + 0.6245 0.1579 0.9944 + 0.1788 0.5791 0.2812 + 0.6077 0.4335 0.0497 + 0.6041 0.1249 0.9632 + 0.2370 0.3802 0.9014 + 0.4800 0.4400 0.8253 + 0.0395 0.3782 0.1752 + 0.6002 0.3345 0.1010 + 0.8281 0.4723 0.4452 + 0.1113 0.9339 0.0514 + 0.1984 0.8927 0.1515 + 0.5392 0.0415 0.3163 + 0.9402 0.7606 0.4633 + 0.4638 0.8775 0.0517 + + +spike_snippets(:,:,12) = + + 0.3181 0.7958 0.1170 + 0.7625 0.4210 0.7882 + 0.5256 0.0260 0.3588 + 0.9814 0.8436 0.4463 + 0.5569 0.1478 0.9858 + 0.5296 0.7302 0.2858 + 0.6469 0.3875 0.5899 + 0.9424 0.4311 0.7596 + 0.2481 0.8256 0.3279 + 0.5044 0.2673 0.9989 + 0.4781 0.7066 0.5382 + 0.8705 0.4223 0.7397 + 0.0720 0.8964 0.0290 + 0.9790 0.6577 0.6155 + 0.0503 0.0324 0.9895 + 0.7996 0.6219 0.5752 + 0.5210 0.1083 0.2937 + 0.6928 0.0501 0.1635 + 0.1726 0.5401 0.5522 + 0.0340 0.5107 0.3251 + 0.4965 0.3874 0.4123 + 0.7150 0.2174 0.2595 + 0.1694 0.6481 0.9802 + 0.0203 0.0168 0.2362 + 0.4543 0.2922 0.6410 + 0.3363 0.5027 0.3156 + 0.8393 0.9019 0.2755 + 0.1506 0.3617 0.0054 + 0.9740 0.8166 0.3839 + 0.4138 0.0406 0.3421 + 0.4741 0.2626 0.6461 + 0.7169 0.0307 0.5651 + 0.1958 0.4275 0.4985 + 0.2665 0.0909 0.0131 + 0.4424 0.3729 0.4346 + 0.3583 0.7029 0.2813 + 0.0020 0.4379 0.7995 + 0.7815 0.0914 0.9053 + 0.7322 0.3600 0.9608 + 0.5807 0.3987 0.8529 + + +spike_snippets(:,:,13) = + + 0.8740 0.9886 0.0533 + 0.6871 0.2768 0.5105 + 0.6843 0.1896 0.0709 + 0.7360 0.2580 0.1465 + 0.7184 0.3671 0.4363 + 0.6519 0.0559 0.1912 + 0.8053 0.7929 0.8521 + 0.8286 0.8181 0.7207 + 0.4643 0.5097 0.7201 + 0.0299 0.5776 0.1998 + 0.9819 0.2988 0.7911 + 0.1906 0.8276 0.3694 + 0.8579 0.2428 0.4391 + 0.5570 0.3813 0.5508 + 0.4073 0.8123 0.7140 + 0.0132 0.7021 0.0088 + 0.3698 0.2442 0.9094 + 0.1435 0.7508 0.5907 + 0.1524 0.2186 0.7794 + 0.5434 0.9029 0.6338 + 0.1203 0.5489 0.4813 + 0.9589 0.5399 0.5145 + 0.2931 0.2897 0.1991 + 0.3683 0.6924 0.8939 + 0.7312 0.4815 0.0791 + 0.2631 0.1194 0.7662 + 0.2835 0.4860 0.8588 + 0.7334 0.0244 0.5409 + 0.1534 0.2786 0.3102 + 0.0404 0.7797 0.2954 + 0.9352 0.4512 0.7227 + 0.8328 0.7579 0.9379 + 0.9333 0.3501 0.9839 + 0.1407 0.0319 0.6854 + 0.1210 0.5139 0.5698 + 0.9028 0.7196 0.3772 + 0.5955 0.5716 0.6764 + 0.9000 0.1238 0.6069 + 0.4134 0.9882 0.7379 + 0.5040 0.7762 0.6439 + + +spike_snippets(:,:,14) = + + 0.2341 0.2654 0.5427 + 0.4818 0.5293 0.5168 + 0.2294 0.6467 0.9958 + 0.7958 0.6934 0.2860 + 0.4975 0.5649 0.7959 + 0.7196 0.4737 0.8255 + 0.8057 0.4004 0.9509 + 0.6560 0.5369 0.5213 + 0.7477 0.1546 0.4835 + 0.4777 0.7304 0.4283 + 0.4262 0.0116 0.8948 + 0.8520 0.6529 0.6583 + 0.2270 0.6712 0.3022 + 0.8346 0.2083 0.1361 + 0.9550 0.7615 0.9587 + 0.8159 0.2257 0.5480 + 0.8204 0.9541 0.0969 + 0.9043 0.5086 0.8840 + 0.2408 0.5656 0.8873 + 0.3962 0.7000 0.5818 + 0.5095 0.5067 0.1167 + 0.0457 0.2312 0.9517 + 0.7403 0.2852 0.6256 + 0.3879 0.6242 0.4508 + 0.7053 0.7154 0.7075 + 0.6602 0.1139 0.9352 + 0.4393 0.1678 0.0105 + 0.6334 0.3512 0.0828 + 0.2127 0.4905 0.3339 + 0.0445 0.2485 0.4423 + 0.1670 0.9595 0.3990 + 0.2466 0.4493 0.6520 + 0.8565 0.9700 0.6262 + 0.0480 0.7151 0.8541 + 0.5696 0.9940 0.9387 + 0.2690 0.5500 0.1816 + 0.9310 0.2805 0.7570 + 0.2799 0.6424 0.1191 + 0.4140 0.4945 0.0446 + 0.6910 0.2914 0.2811 + + +spike_snippets(:,:,15) = + + 0.7050 0.6649 0.3797 + 0.2507 0.7904 0.1209 + 0.9459 0.3466 0.2877 + 0.2532 0.0977 0.8401 + 0.0798 0.2892 0.9548 + 0.3758 0.9385 0.3280 + 0.0019 0.1758 0.7814 + 0.3525 0.9813 0.4546 + 0.2155 0.5227 0.7190 + 0.7647 0.4441 0.8175 + 0.8143 0.9736 0.3749 + 0.5439 0.0450 0.4367 + 0.1543 0.1703 0.3108 + 0.1335 0.7879 0.7686 + 0.4827 0.2187 0.6511 + 0.9664 0.6140 0.7144 + 0.7171 0.5339 0.3209 + 0.9936 0.8141 0.8067 + 0.7622 0.8299 0.2185 + 0.5630 0.8397 0.4204 + 0.5107 0.8064 0.8680 + 0.7182 0.5786 0.6850 + 0.1174 0.0227 0.8446 + 0.1409 0.2166 0.0980 + 0.8518 0.6195 0.8271 + 0.6001 0.5309 0.1204 + 0.2990 0.4100 0.7585 + 0.0109 0.1546 0.2445 + 0.7000 0.2947 0.6451 + 0.0231 0.8864 0.6683 + 0.6646 0.3389 0.5603 + 0.8043 0.9280 0.9662 + 0.7408 0.2432 0.7456 + 0.7825 0.0207 0.2702 + 0.6970 0.8573 0.3516 + 0.8846 0.1799 0.5087 + 0.6843 0.8117 0.7298 + 0.3054 0.4165 0.6281 + 0.7008 0.4947 0.9817 + 0.3228 0.0900 0.7148 + + +spike_snippets(:,:,16) = + + 0.8501 0.1697 0.4830 + 0.7550 0.2698 0.4817 + 0.4231 0.2167 0.8478 + 0.6610 0.6920 0.4223 + 0.9003 0.8286 0.3787 + 0.6489 0.0868 0.8331 + 0.3726 0.5120 0.4634 + 0.8046 0.0630 0.9980 + 0.5001 0.2483 0.6720 + 0.4919 0.7457 0.3644 + 0.2806 0.8592 0.2708 + 0.7041 0.4565 0.7041 + 0.7571 0.3145 0.4306 + 0.1433 0.0681 0.0796 + 0.3416 0.1466 0.8312 + 0.2881 0.2956 0.2968 + 0.6497 0.6777 0.5812 + 0.7057 0.9174 0.3476 + 0.9881 0.0394 0.8190 + 0.4447 0.8762 0.8596 + 0.5798 0.2775 0.2431 + 0.6957 0.7380 0.2042 + 0.0013 0.8127 0.2558 + 0.6582 0.9655 0.0891 + 0.5809 0.8436 0.2228 + 0.6003 0.6662 0.9844 + 0.2841 0.4710 0.7061 + 0.1530 0.7042 0.0519 + 0.2881 0.6416 0.1521 + 0.9495 0.1540 0.3405 + 0.8380 0.5757 0.1285 + 0.1827 0.8129 0.2894 + 0.0654 0.1049 0.5028 + 0.7366 0.5585 0.0512 + 0.1126 0.3166 0.0850 + 0.1825 0.6805 0.1989 + 0.2679 0.3978 0.1293 + 0.9158 0.7006 0.2685 + 0.4545 0.2932 0.3404 + 1.0000 0.0030 0.6564 + + +spike_snippets(:,:,17) = + + 0.3781 0.0891 0.7495 + 0.5811 0.6245 0.4117 + 0.1735 0.0526 0.5633 + 0.4201 0.5983 0.0860 + 0.2610 0.2303 0.7280 + 0.2623 0.1760 0.5405 + 0.0586 0.2317 0.6323 + 0.4182 0.6497 0.7193 + 0.6031 0.2744 0.1561 + 0.0022 0.6643 0.6347 + 0.4616 0.1107 0.8060 + 0.7255 0.0674 0.8882 + 0.7225 0.2843 0.0686 + 0.7713 0.5789 0.3788 + 0.0632 0.5334 0.4694 + 0.5042 0.7561 0.6075 + 0.4723 0.3115 0.9758 + 0.3407 0.8297 0.4657 + 0.6990 0.0129 0.6239 + 0.5529 0.1378 0.0185 + 0.7666 0.8722 0.6284 + 0.8398 0.0162 0.7303 + 0.7897 0.5385 0.1133 + 0.4075 0.4509 0.0731 + 0.5501 0.6582 0.5385 + 0.4924 0.8690 0.9472 + 0.2847 0.6788 0.5375 + 0.8678 0.9092 0.4420 + 0.9705 0.5647 0.1101 + 0.1067 0.2457 0.5798 + 0.6038 0.7344 0.8686 + 0.9354 0.2040 0.0322 + 0.1363 0.2150 0.1660 + 0.8959 0.6751 0.0340 + 0.7246 0.9388 0.2265 + 0.1198 0.6017 0.1741 + 0.7284 0.6173 0.0153 + 0.0748 0.7500 0.5675 + 0.3675 0.2857 0.4057 + 0.0648 0.6220 0.6498 + + +spike_snippets(:,:,18) = + + 0.9810 0.9405 0.0119 + 0.7337 0.8453 0.9263 + 0.2645 0.3840 0.5125 + 0.0452 0.7139 0.1787 + 0.2421 0.2881 0.9904 + 0.0013 0.3479 0.7737 + 0.0323 0.6977 0.4620 + 0.2364 0.3529 0.8011 + 0.4242 0.1228 0.1930 + 0.0482 0.9142 0.9933 + 0.8719 0.5799 0.0832 + 0.2371 0.9634 0.7850 + 0.3332 0.4510 0.8518 + 0.3140 0.7218 0.1550 + 0.2430 0.2083 0.8879 + 0.8895 0.2607 0.6277 + 0.8244 0.0320 0.0768 + 0.5073 0.8188 0.0817 + 0.6267 0.7696 0.8384 + 0.8671 0.8632 0.1109 + 0.6538 0.3771 0.9772 + 0.0837 0.4417 0.6070 + 0.1695 0.6778 0.5626 + 0.1685 0.3904 0.5842 + 0.0359 0.0570 0.7002 + 0.2400 0.6206 0.0046 + 0.3518 0.2178 0.2698 + 0.2080 0.5689 0.6852 + 0.5844 0.1788 0.6406 + 0.5629 0.2768 0.8897 + 0.7105 0.7671 0.7617 + 0.3463 0.5626 0.9751 + 0.5426 0.1850 0.3000 + 0.8981 0.9939 0.1073 + 0.3221 0.4474 0.9807 + 0.5579 0.6733 0.7314 + 0.9361 0.1010 0.1900 + 0.8001 0.5527 0.4779 + 0.9882 0.0613 0.9023 + 0.0154 0.2626 0.4647 + + +spike_snippets(:,:,19) = + + 0.9411 0.2141 0.3295 + 0.1142 0.7536 0.2215 + 0.8159 0.2883 0.3557 + 0.7468 0.0402 0.0546 + 0.9047 0.2719 0.0076 + 0.5188 0.3717 0.6596 + 0.3360 0.8668 0.9273 + 0.7289 0.5772 0.0905 + 0.7476 0.4973 0.1308 + 0.5768 0.6858 0.2103 + 0.1768 0.9881 0.8935 + 0.7582 0.9268 0.0932 + 0.3598 0.2827 0.3469 + 0.0330 0.1115 0.1471 + 0.7715 0.7440 0.9154 + 0.7907 0.4724 0.8064 + 0.4657 0.9669 0.0401 + 0.5913 0.1725 0.4714 + 0.3025 0.4115 0.6948 + 0.1618 0.5454 0.1382 + 0.3927 0.4498 0.1724 + 0.5647 0.9850 0.6612 + 0.9736 0.5928 0.1097 + 0.9527 0.8762 0.0309 + 0.2944 0.2214 0.3343 + 0.9212 0.8000 0.1992 + 0.3045 0.7142 0.9465 + 0.7422 0.0515 0.9299 + 0.4502 0.5540 0.1971 + 0.8310 0.8865 0.0221 + 0.2633 0.6030 0.0202 + 0.6542 0.3310 0.5329 + 0.6198 0.3466 0.4807 + 0.0751 0.6014 0.6222 + 0.0974 0.6281 0.7800 + 0.0531 0.1339 0.7712 + 0.9470 0.1350 0.6427 + 0.6851 0.0855 0.5139 + 0.9419 0.2321 0.0898 + 0.6144 0.8249 0.7300 + + +spike_snippets(:,:,20) = + + 0.9347 0.0631 0.9858 + 0.3126 0.2723 0.9104 + 0.5088 0.3233 0.4097 + 0.5101 0.6301 0.9434 + 0.0866 0.4273 0.5294 + 0.9717 0.8892 0.2561 + 0.3369 0.7906 0.3802 + 0.5707 0.9811 0.4617 + 0.7202 0.2679 0.7243 + 0.1354 0.9824 0.7818 + 0.5404 0.5866 0.3406 + 0.6356 0.7751 0.6401 + 0.2628 0.9497 0.3567 + 0.3216 0.1848 0.7129 + 0.1225 0.0552 0.7156 + 0.5192 0.4793 0.4657 + 0.3591 0.8921 0.1434 + 0.9425 0.3574 0.1688 + 0.2434 0.0231 0.7779 + 0.2197 0.5138 0.7525 + 0.7575 0.6033 0.6561 + 0.7893 0.2490 0.5304 + 0.1295 0.4641 0.9965 + 0.4562 0.3820 0.9148 + 0.7810 0.6530 0.7915 + 0.3228 0.1347 0.1928 + 0.5534 0.2789 0.8759 + 0.2467 0.4068 0.8975 + 0.5308 0.9809 0.3812 + 0.5501 0.1708 0.4069 + 0.6986 0.0152 0.7410 + 0.3920 0.9160 0.9353 + 0.7672 0.9505 0.2352 + 0.6263 0.1511 0.9096 + 0.3258 0.4288 0.6539 + 0.5240 0.1281 0.6987 + 0.8426 0.6987 0.6547 + 0.4928 0.3055 0.6311 + 0.2620 0.4354 0.3397 + 0.4799 0.1731 0.1259 +
 
% Create electrode table region referencing electrodes 0, 1, and 2
shank0_table_region = types.hdmf_common.DynamicTableRegion( ...
'table', types.untyped.ObjectView(electrodesDynamicTable), ...
'description', 'shank0', ...
'data', (0:2)');
 
% Define spike event series for unsorted spike times
spike_events = types.core.SpikeEventSeries( ...
'data', spike_snippets, ...
'timestamps', (0:19)', ... % Timestamps for each event
'description', 'events detected with 100uV threshold', ...
'electrodes', shank0_table_region ...
);
 
% Add spike event series to NWB file acquisition
nwb.acquisition.set('SpikeEvents_Shank0', spike_events);

Detected Events

If you need to store the complete, continuous raw voltage traces, along with unsorted spike times, you should store the traces in ElectricalSeries objects in the acquisition group, and use the EventDetection class to identify the spike events in your raw traces.
% Create the EventDetection object
event_detection = types.core.EventDetection( ...
'detection_method', 'thresholding, 1.5 * std', ...
'source_electricalseries', types.untyped.SoftLink(raw_electrical_series), ...
'source_idx', [1000; 2000; 3000], ...
'times', [.033, .066, .099] ...
);
 
% Add the EventDetection object to the ecephys module
ecephys_module.nwbdatainterface.set('ThresholdEvents', event_detection);

Storing Spike Features (e.g Principal Components)

NWB also provides a way to store features of spikes, such as principal components, using the FeatureExtraction class.
% Generate random feature data (time x channel x feature)
features = rand(3, 12, 4); % 3 time points, 12 channels, 4 features
features = permute(features, [3,2,1]); % reverse dimension order for matnwb
 
% Create the FeatureExtraction object
feature_extraction = types.core.FeatureExtraction( ...
'description', {'PC1', 'PC2', 'PC3', 'PC4'}, ... % Feature descriptions
'electrodes', electrode_table_region, ... % DynamicTableRegion referencing the electrodes table
'times', [.033; .066; .099], ... % Column vector for times
'features', features ...
);
 
% Add the FeatureExtraction object to the ecephys module (if required)
ecephys_module.nwbdatainterface.set('PCA_features', feature_extraction);

Choosing NWB-Types for Electrophysiology Data (A Summary)

As mentioned above, ElectricalSeries objects are meant for storing electrical timeseries data like raw voltage signals or processed signals like LFP or other filtered signals. In addition to the ElectricalSeries class, NWB provides some more classes for storing event-based electropysiological data. We will briefly discuss them here, and refer the reader to the API documentation and the section on Extracellular Physiology in the "NWB Format Specification" for more details on using these objects.
For storing unsorted spiking data, there are two options. Which one you choose depends on what data you have available. If you need to store complete and/or continuous raw voltage traces, you should store the traces with ElectricalSeries objects as acquisition data, and use the EventDetection class for identifying the spike events in your raw traces. If you do not want to store the entire raw voltage traces, only the waveform ‘snippets’ surrounding spike events, you should use SpikeEventSeries objects.
The results of spike sorting (or clustering) should be stored in the top-level Units table. The Units table can hold just the spike times of sorted units or, optionally, include additional waveform information. You can use the optional predefined columns waveform_mean, waveform_sd, and waveforms in the Units table to store individual and mean waveform data.

Writing the NWB File

nwbExport(nwb, 'ecephys_tutorial.nwb')

Reading NWB Data

Data arrays are read passively from the file. Calling TimeSeries.data does not read the data values, but presents an HDF5 object that can be indexed to read data. This allows you to conveniently work with datasets that are too large to fit in RAM all at once. load with no input arguments reads the entire dataset:
nwb2 = nwbRead('ecephys_tutorial.nwb', 'ignorecache');
nwb2.processing.get('ecephys'). ...
nwbdatainterface.get('LFP'). ...
electricalseries.get('ElectricalSeries'). ...
data.load;

Accessing Data Regions

If all you need is a data region, you can index a DataStub object like you would any normal array in MATLAB, as shown below. When indexing the dataset this way, only the selected region is read from disk into RAM. This allows you to handle very large datasets that would not fit entirely into RAM.
% read section of LFP
nwb2.processing.get('ecephys'). ...
nwbdatainterface.get('LFP'). ...
electricalseries.get('ElectricalSeries'). ...
data(1:5, 1:10)
ans = 5×10
1.2857 0.3967 -0.1756 -0.7123 -1.1857 -0.5928 2.0533 -1.9719 -0.7640 -0.6813 + 1.2358 -1.8348 0.0753 0.9697 -0.2605 -0.3033 0.8593 -0.3006 -0.5140 -0.4031 + 0.8860 0.6285 0.7116 -1.6342 -0.7995 -0.6525 0.9014 0.4567 0.8246 -0.7725 + 0.9067 0.8559 1.5436 -1.2608 -0.9221 -0.3216 -0.7603 0.5512 -1.0313 -0.7309 + 1.5242 -0.0141 -0.6159 0.0304 -0.8619 0.4074 0.9767 0.6611 -0.8672 1.4337 +
 
% You can use the getRow method of the table to load spike times of a specific unit.
% To get the values, unpack from the returned table.
nwb.units.getRow(1).spike_times{1}
ans = 28×1
0.6780 + 0.0572 + 0.7376 + 0.1984 + 0.0617 + 0.0767 + 0.0697 + 0.1639 + 0.9400 + 0.6451 +

Learn more!

See the API documentation to learn what data types are available.

MATLAB tutorials

Python tutorials

See our tutorials for more details about your data type:
Check out other tutorials that teach advanced NWB topics:
+
+ +
+ \ No newline at end of file diff --git a/tutorials/html/ecephys.png b/docs/source/_static/html/tutorials/ecephys.png similarity index 100% rename from tutorials/html/ecephys.png rename to docs/source/_static/html/tutorials/ecephys.png diff --git a/tutorials/html/ecephys_01.png b/docs/source/_static/html/tutorials/ecephys_01.png similarity index 100% rename from tutorials/html/ecephys_01.png rename to docs/source/_static/html/tutorials/ecephys_01.png diff --git a/tutorials/html/ecephys_data_deps.png b/docs/source/_static/html/tutorials/ecephys_data_deps.png similarity index 100% rename from tutorials/html/ecephys_data_deps.png rename to docs/source/_static/html/tutorials/ecephys_data_deps.png diff --git a/tutorials/html/icephys.html b/docs/source/_static/html/tutorials/icephys.html similarity index 61% rename from tutorials/html/icephys.html rename to docs/source/_static/html/tutorials/icephys.html index 440dfc36..ec504c72 100644 --- a/tutorials/html/icephys.html +++ b/docs/source/_static/html/tutorials/icephys.html @@ -52,9 +52,9 @@ Add repetitions table Add experimental condition table Write the NWB file -Read the NWB file
The following tutorial describes storage of intracellular electrophysiology data in NWB. NWB supports storage of the time series describing the stimulus and response, information about the electrode and device used, as well as metadata about the organization of the experiment.
Illustration of the hierarchy of metadata tables used to describe the organization of intracellular electrophysiology experiments.

Creating an NWBFile

When creating an NWB file, the first step is to create the NWBFile, which you can create using the NwbFile command.
session_start_time = datetime(2018, 3, 1, 12, 0, 0, 'TimeZone', 'local');
 
 
nwbfile = NwbFile( ...
'session_description', 'my first synthetic recording', ...
'identifier', 'EXAMPLE_ID', ...
'session_start_time', session_start_time, ...
'general_experimenter', 'Dr. Bilbo Baggins', ...
'general_lab', 'Bag End Laboratory', ...
'general_institution', 'University of Middle Earth at the Shire', ...
'general_experiment_description', 'I went on an adventure with thirteen dwarves to reclaim vast treasures.', ...
'general_session_id', 'LONELYMTN' ...
);
 

Device metadata

Device metadata is represented by Device objects.
 
device = types.core.Device();
nwbfile.general_devices.set('Heka ITC-1600', device);

Electrode metadata

Intracellular electrode metadata is represented by IntracellularElectrode objects. Create an electrode object, which requires a link to the device of the previous step. Then add it to the NWB file.
electrode = types.core.IntracellularElectrode( ...
'description', 'a mock intracellular electrode', ...
'device', types.untyped.SoftLink(device), ...
'cell_id', 'a very interesting cell' ...
);
nwbfile.general_intracellular_ephys.set('elec0', electrode);

Stimulus and response data

Intracellular stimulus and response data are represented with subclasses of PatchClampSeries. A stimulus is described by a time series representing voltage or current stimulation with a particular set of parameters. There are two classes for representing stimulus data:
The response is then described by a time series representing voltage or current recorded from a single cell using a single intracellular electrode via one of the following classes:
Below we create a simple example stimulus/response recording data pair for a voltage clamp recording.
ccss = types.core.VoltageClampStimulusSeries( ...
'data', [1, 2, 3, 4, 5], ...
'starting_time', 123.6, ...
'starting_time_rate', 10e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'sweep_number', uint64(15), ...
'stimulus_description', 'N/A' ...
);
nwbfile.stimulus_presentation.set('ccss', ccss);
 
vcs = types.core.VoltageClampSeries( ...
'data', [0.1, 0.2, 0.3, 0.4, 0.5], ...
'data_conversion', 1e-12, ...
'data_resolution', NaN, ...
'starting_time', 123.6, ...
'starting_time_rate', 20e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'capacitance_slow', 100e-12, ...
'resistance_comp_correction', 70.0, ...
'stimulus_description', 'N/A', ...
'sweep_number', uint64(15) ...
);
nwbfile.acquisition.set('vcs', vcs);
You can add stimulus/response recording data pair from a current clamp recording in the same way:
% Create a CurrentClampStimulusSeries object
ccss = types.core.CurrentClampStimulusSeries(...
'data', [1, 2, 3, 4, 5], ...
'starting_time', 123.6, ...
'starting_time_rate', 10e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'sweep_number', uint16(16), ...
'stimulus_description', 'N/A' ...
);
nwbfile.stimulus_presentation.set('ccss', ccss);
 
% Create a CurrentClampSeries object
ccs = types.core.CurrentClampSeries(...
'data', [0.1, 0.2, 0.3, 0.4, 0.5], ...
'data_conversion', 1e-12, ...
'data_resolution', NaN, ...
'starting_time', 123.6, ...
'starting_time_rate', 20e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'bias_current', 1e-12, ...
'bridge_balance', 70e6, ...
'capacitance_compensation', 1e-12, ...
'stimulus_description', 'N/A', ...
'sweep_number', uint16(16) ...
);
nwbfile.acquisition.set('ccs', ccs);
 
IZeroClampSeries is used when the current is clamped to 0.
% Create an IZeroClampSeries object
izcs = types.core.IZeroClampSeries(...
'data', [0.1, 0.2, 0.3, 0.4, 0.5], ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'data_conversion', 1e-12, ...
'data_resolution', NaN, ...
'starting_time', 345.6, ...
'starting_time_rate', 20e3, ...
'sweep_number', uint16(17) ...
);
nwbfile.acquisition.set('izcs', izcs);

Adding an intracellular recording

The IntracellularRecordingsTable relates electrode, stimulus and response pairs and describes metadata specific to individual recordings.
Illustration of the structure of the IntracellularRecordingsTable
ic_rec_table = types.core.IntracellularRecordingsTable( ...
'categories', {'electrodes', 'stimuli', 'responses'}, ...
'colnames', {'recordings_tag'}, ...
'description', [ ...
'A table to group together a stimulus and response from a single ', ...
'electrode and a single simultaneous recording and for storing ', ...
'metadata about the intracellular recording.'], ...
'id', types.hdmf_common.ElementIdentifiers('data', int64([0, 1, 2])), ...
'recordings_tag', types.hdmf_common.VectorData( ...
'data', repmat({'Tag'}, 3, 1), ...
'description', 'Column for storing a custom recordings tag' ...
) ...
);
 
ic_rec_table.electrodes = types.core.IntracellularElectrodesTable( ...
'description', 'Table for storing intracellular electrode related metadata.', ...
'colnames', {'electrode'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'electrode', types.hdmf_common.VectorData( ...
'data', repmat(types.untyped.ObjectView(electrode), 3, 1), ...
'description', 'Column for storing the reference to the intracellular electrode' ...
) ...
);
 
ic_rec_table.stimuli = types.core.IntracellularStimuliTable( ...
'description', 'Table for storing intracellular stimulus related metadata.', ...
'colnames', {'stimulus'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'stimulus', types.core.TimeSeriesReferenceVectorData( ...
'description', 'Column storing the reference to the recorded stimulus for the recording (rows)', ...
'data', struct( ...
'idx_start', [0, 1, -1], ...
'count', [5, 3, -1], ...
'timeseries', [ ...
types.untyped.ObjectView(ccss), ...
types.untyped.ObjectView(ccss), ...
types.untyped.ObjectView(vcs) ...
] ...
)...
)...
);
 
ic_rec_table.responses = types.core.IntracellularResponsesTable( ...
'description', 'Table for storing intracellular response related metadata.', ...
'colnames', {'response'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'response', types.core.TimeSeriesReferenceVectorData( ...
'description', 'Column storing the reference to the recorded response for the recording (rows)', ...
'data', struct( ...
'idx_start', [0, 2, 0], ...
'count', [5, 3, 5], ...
'timeseries', [ ...
types.untyped.ObjectView(vcs), ...
types.untyped.ObjectView(vcs), ...
types.untyped.ObjectView(vcs) ...
] ...
)...
)...
);
 
The IntracellularRecordingsTable table is not just a DynamicTable but an AlignedDynamicTable. The AlignedDynamicTable type is itself a DynamicTable that may contain an arbitrary number of additional DynamicTable, each of which defines a "category." This is similar to a table with “sub-headings”. In the case of the IntracellularRecordingsTable, we have three predefined categories, i.e., electrodes, stimuli, and responses. We can also dynamically add new categories to the table. As each category corresponds to a DynamicTable, this means we have to create a new DynamicTable and add it to our table.
% add category
ic_rec_table.categories = [ic_rec_table.categories, {'recording_lab_data'}];
ic_rec_table.dynamictable.set( ...
'recording_lab_data', types.hdmf_common.DynamicTable( ...
'description', 'category table for lab-specific recording metadata', ...
'colnames', {'location'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'location', types.hdmf_common.VectorData( ...
'data', {'Mordor', 'Gondor', 'Rohan'}, ...
'description', 'Recording location in Middle Earth' ...
) ...
) ...
);
In an AlignedDynamicTable all category tables must align with the main table, i.e., all tables must have the same number of rows and rows are expected to correspond to each other by index.
We can also add custom columns to any of the subcategory tables, i.e., the electrodes, stimuli, and responses tables, and any custom subcategory tables. All we need to do is indicate the name of the category we want to add the column to.
% Add voltage threshold as column of electrodes table
ic_rec_table.electrodes.colnames = [ic_rec_table.electrodes.colnames {'voltage_threshold'}];
ic_rec_table.electrodes.vectordata.set('voltage_threshold', types.hdmf_common.VectorData( ...
'data', [0.1, 0.12, 0.13], ...
'description', 'Just an example column on the electrodes category table' ...
) ...
);
 
nwbfile.general_intracellular_ephys_intracellular_recordings = ic_rec_table;

Hierarchical organization of recordings

To describe the organization of intracellular experiments, the metadata is organized hierarchically in a sequence of tables. All of the tables are so-called DynamicTables enabling users to add columns for custom metadata. Storing data in hierarchical tables has the advantage that it allows us to avoid duplication of metadata. E.g., for a single experiment we only need to describe the metadata that is constant across an experimental condition as a single row in the SimultaneousRecordingsTable without having to replicate the same information across all repetitions and sequential-, simultaneous-, and individual intracellular recordings. For analysis, this means that we can easily focus on individual aspects of an experiment while still being able to easily access information about information from related tables. All of these tables are optional, but to use one you must use all of the lower level tables, even if you only need a single row.

Add a simultaneous recording

The SimultaneousRecordingsTable groups intracellular recordings from the IntracellularRecordingsTable together that were recorded simultaneously from different electrodes and/or cells and describes metadata that is constant across the simultaneous recordings. In practice a simultaneous recording is often also referred to as a sweep. This example adds a custom column, "simultaneous_recording_tag."
% create simultaneous recordings table with custom column
% 'simultaneous_recording_tag'
 
[recordings_vector_data, recordings_vector_index] = util.create_indexed_column( ...
{[0, 1, 2],}, ...
'Column with references to one or more rows in the IntracellularRecordingsTable table', ...
ic_rec_table);
 
ic_sim_recs_table = types.core.SimultaneousRecordingsTable( ...
'description', [ ...
'A table for grouping different intracellular recordings from ', ...
'the IntracellularRecordingsTable table together that were recorded ', ...
'simultaneously from different electrodes.'...
], ...
'colnames', {'recordings', 'simultaneous_recording_tag'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64(12) ...
), ...
'recordings', recordings_vector_data, ...
'recordings_index', recordings_vector_index, ...
'simultaneous_recording_tag', types.hdmf_common.VectorData( ...
'description', 'A custom tag for simultaneous_recordings', ...
'data', {'LabTag1'} ...
) ...
);
 
Depending on the lab workflow, it may be useful to add complete columns to a table after we have already populated the table with rows. That would be done like so:
ic_sim_recs_table.colnames = [ic_sim_recs_table.colnames, {'simultaneous_recording_type'}];
ic_sim_recs_table.vectordata.set( ...
'simultaneous_recording_type', types.hdmf_common.VectorData(...
'description', 'Description of the type of simultaneous_recording', ...
'data', {'SimultaneousRecordingType1'} ...
) ...
);
 
nwbfile.general_intracellular_ephys_simultaneous_recordings = ic_sim_recs_table;

Add a sequential recording

The SequentialRecordingsTable groups simultaneously recorded intracellular recordings from the SimultaneousRecordingsTable together and describes metadata that is constant across the simultaneous recordings. In practice a sequential recording is often also referred to as a sweep sequence. A common use of sequential recordings is to group together simultaneous recordings where a sequence of stimuli of the same type with varying parameters have been presented in a sequence (e.g., a sequence of square waveforms with varying amplitude).
[simultaneous_recordings_vector_data, simultaneous_recordings_vector_index] = util.create_indexed_column( ...
{0,}, ...
'Column with references to one or more rows in the SimultaneousRecordingsTable table', ...
ic_sim_recs_table);
 
sequential_recordings = types.core.SequentialRecordingsTable( ...
'description', [ ...
'A table for grouping different intracellular recording ', ...
'simultaneous_recordings from the SimultaneousRecordingsTable ', ...
'table together. This is typically used to group together ', ...
'simultaneous_recordings where the a sequence of stimuli of ', ...
'the same type with varying parameters have been presented in ', ...
'a sequence.' ...
], ...
'colnames', {'simultaneous_recordings', 'stimulus_type'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64(15) ...
), ...
'simultaneous_recordings', simultaneous_recordings_vector_data, ...
'simultaneous_recordings_index', simultaneous_recordings_vector_index, ...
'stimulus_type', types.hdmf_common.VectorData( ...
'description', 'Column storing the type of stimulus used for the sequential recording', ...
'data', {'square'} ...
) ...
);
 
nwbfile.general_intracellular_ephys_sequential_recordings = sequential_recordings;

Add repetitions table

The RepetitionsTable groups sequential recordings from the SequentialRecordingsTable. In practice, a repetition is often also referred to a run. A typical use of the RepetitionsTable is to group sets of different stimuli that are applied in sequence that may be repeated.
[sequential_recordings_vector_data, sequential_recordings_vector_index] = util.create_indexed_column( ...
{0,}, ...
'Column with references to one or more rows in the SequentialRecordingsTable table', ...
sequential_recordings);
 
 
nwbfile.general_intracellular_ephys_repetitions = types.core.RepetitionsTable( ...
'description', [ ...
'A table for grouping different intracellular recording sequential ', ...
'recordings together. With each SimultaneousRecording typically ', ...
'representing a particular type of stimulus, the RepetitionsTable ', ...
'table is typically used to group sets of stimuli applied in sequence.' ...
], ...
'colnames', {'sequential_recordings'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64(17) ...
), ...
'sequential_recordings', sequential_recordings_vector_data, ...
'sequential_recordings_index', sequential_recordings_vector_index ...
);

Add experimental condition table

The ExperimentalConditionsTable groups repetitions of intracellular recording from the RepetitionsTable together that belong to the same experimental conditions.
[repetitions_vector_data, repetitions_vector_index] = util.create_indexed_column( ...
{0, 0}, ...
'Column with references to one or more rows in the RepetitionsTable table', ...
nwbfile.general_intracellular_ephys_repetitions);
 
nwbfile.general_intracellular_ephys_experimental_conditions = types.core.ExperimentalConditionsTable( ...
'description', [ ...
'A table for grouping different intracellular recording ', ...
'repetitions together that belong to the same experimental ', ...
'conditions.' ...
], ...
'colnames', {'repetitions', 'tag'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([19, 21]) ...
), ...
'repetitions', repetitions_vector_data, ...
'repetitions_index', repetitions_vector_index, ...
'tag', types.hdmf_common.VectorData( ...
'description', 'integer tag for a experimental condition', ...
'data', [1,3] ...
) ...
);

Write the NWB file

nwbExport(nwbfile, 'test_new_icephys.nwb');

Read the NWB file

nwbfile2 = nwbRead('test_new_icephys.nwb', 'ignorecache')
nwbfile2 =
NwbFile with properties: +Read the NWB file
The following tutorial describes storage of intracellular electrophysiology data in NWB. NWB supports storage of the time series describing the stimulus and response, information about the electrode and device used, as well as metadata about the organization of the experiment.
Illustration of the hierarchy of metadata tables used to describe the organization of intracellular electrophysiology experiments.

Creating an NWBFile

When creating an NWB file, the first step is to create the NWBFile, which you can create using the NwbFile command.
session_start_time = datetime(2018, 3, 1, 12, 0, 0, 'TimeZone', 'local');
 
 
nwbfile = NwbFile( ...
'session_description', 'my first synthetic recording', ...
'identifier', 'EXAMPLE_ID', ...
'session_start_time', session_start_time, ...
'general_experimenter', 'Dr. Bilbo Baggins', ...
'general_lab', 'Bag End Laboratory', ...
'general_institution', 'University of Middle Earth at the Shire', ...
'general_experiment_description', 'I went on an adventure with thirteen dwarves to reclaim vast treasures.', ...
'general_session_id', 'LONELYMTN' ...
);
 

Device metadata

Device metadata is represented by Device objects.
 
device = types.core.Device();
nwbfile.general_devices.set('Heka ITC-1600', device);

Electrode metadata

Intracellular electrode metadata is represented by IntracellularElectrode objects. Create an electrode object, which requires a link to the device of the previous step. Then add it to the NWB file.
electrode = types.core.IntracellularElectrode( ...
'description', 'a mock intracellular electrode', ...
'device', types.untyped.SoftLink(device), ...
'cell_id', 'a very interesting cell' ...
);
nwbfile.general_intracellular_ephys.set('elec0', electrode);

Stimulus and response data

Intracellular stimulus and response data are represented with subclasses of PatchClampSeries. A stimulus is described by a time series representing voltage or current stimulation with a particular set of parameters. There are two classes for representing stimulus data:
The response is then described by a time series representing voltage or current recorded from a single cell using a single intracellular electrode via one of the following classes:
Below we create a simple example stimulus/response recording data pair for a voltage clamp recording.
ccss = types.core.VoltageClampStimulusSeries( ...
'data', [1, 2, 3, 4, 5], ...
'starting_time', 123.6, ...
'starting_time_rate', 10e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'sweep_number', uint64(15), ...
'stimulus_description', 'N/A' ...
);
nwbfile.stimulus_presentation.set('ccss', ccss);
 
vcs = types.core.VoltageClampSeries( ...
'data', [0.1, 0.2, 0.3, 0.4, 0.5], ...
'data_conversion', 1e-12, ...
'data_resolution', NaN, ...
'starting_time', 123.6, ...
'starting_time_rate', 20e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'capacitance_slow', 100e-12, ...
'resistance_comp_correction', 70.0, ...
'stimulus_description', 'N/A', ...
'sweep_number', uint64(15) ...
);
nwbfile.acquisition.set('vcs', vcs);
You can add stimulus/response recording data pair from a current clamp recording in the same way:
% Create a CurrentClampStimulusSeries object
ccss = types.core.CurrentClampStimulusSeries(...
'data', [1, 2, 3, 4, 5], ...
'starting_time', 123.6, ...
'starting_time_rate', 10e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'sweep_number', uint16(16), ...
'stimulus_description', 'N/A' ...
);
nwbfile.stimulus_presentation.set('ccss', ccss);
 
% Create a CurrentClampSeries object
ccs = types.core.CurrentClampSeries(...
'data', [0.1, 0.2, 0.3, 0.4, 0.5], ...
'data_conversion', 1e-12, ...
'data_resolution', NaN, ...
'starting_time', 123.6, ...
'starting_time_rate', 20e3, ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'bias_current', 1e-12, ...
'bridge_balance', 70e6, ...
'capacitance_compensation', 1e-12, ...
'stimulus_description', 'N/A', ...
'sweep_number', uint16(16) ...
);
nwbfile.acquisition.set('ccs', ccs);
 
IZeroClampSeries is used when the current is clamped to 0.
% Create an IZeroClampSeries object
izcs = types.core.IZeroClampSeries(...
'data', [0.1, 0.2, 0.3, 0.4, 0.5], ...
'electrode', types.untyped.SoftLink(electrode), ...
'gain', 0.02, ...
'data_conversion', 1e-12, ...
'data_resolution', NaN, ...
'starting_time', 345.6, ...
'starting_time_rate', 20e3, ...
'sweep_number', uint16(17) ...
);
nwbfile.acquisition.set('izcs', izcs);

Adding an intracellular recording

The IntracellularRecordingsTable relates electrode, stimulus and response pairs and describes metadata specific to individual recordings.
Illustration of the structure of the IntracellularRecordingsTable
ic_rec_table = types.core.IntracellularRecordingsTable( ...
'categories', {'electrodes', 'stimuli', 'responses'}, ...
'colnames', {'recordings_tag'}, ...
'description', [ ...
'A table to group together a stimulus and response from a single ', ...
'electrode and a single simultaneous recording and for storing ', ...
'metadata about the intracellular recording.'], ...
'id', types.hdmf_common.ElementIdentifiers('data', int64([0, 1, 2])), ...
'recordings_tag', types.hdmf_common.VectorData( ...
'data', repmat({'Tag'}, 3, 1), ...
'description', 'Column for storing a custom recordings tag' ...
) ...
);
 
ic_rec_table.electrodes = types.core.IntracellularElectrodesTable( ...
'description', 'Table for storing intracellular electrode related metadata.', ...
'colnames', {'electrode'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'electrode', types.hdmf_common.VectorData( ...
'data', repmat(types.untyped.ObjectView(electrode), 3, 1), ...
'description', 'Column for storing the reference to the intracellular electrode' ...
) ...
);
 
ic_rec_table.stimuli = types.core.IntracellularStimuliTable( ...
'description', 'Table for storing intracellular stimulus related metadata.', ...
'colnames', {'stimulus'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'stimulus', types.core.TimeSeriesReferenceVectorData( ...
'description', 'Column storing the reference to the recorded stimulus for the recording (rows)', ...
'data', struct( ...
'idx_start', [0, 1, -1], ...
'count', [5, 3, -1], ...
'timeseries', [ ...
types.untyped.ObjectView(ccss), ...
types.untyped.ObjectView(ccss), ...
types.untyped.ObjectView(vcs) ...
] ...
)...
)...
);
 
ic_rec_table.responses = types.core.IntracellularResponsesTable( ...
'description', 'Table for storing intracellular response related metadata.', ...
'colnames', {'response'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'response', types.core.TimeSeriesReferenceVectorData( ...
'description', 'Column storing the reference to the recorded response for the recording (rows)', ...
'data', struct( ...
'idx_start', [0, 2, 0], ...
'count', [5, 3, 5], ...
'timeseries', [ ...
types.untyped.ObjectView(vcs), ...
types.untyped.ObjectView(vcs), ...
types.untyped.ObjectView(vcs) ...
] ...
)...
)...
);
 
The IntracellularRecordingsTable table is not just a DynamicTable but an AlignedDynamicTable. The AlignedDynamicTable type is itself a DynamicTable that may contain an arbitrary number of additional DynamicTable, each of which defines a "category." This is similar to a table with “sub-headings”. In the case of the IntracellularRecordingsTable, we have three predefined categories, i.e., electrodes, stimuli, and responses. We can also dynamically add new categories to the table. As each category corresponds to a DynamicTable, this means we have to create a new DynamicTable and add it to our table.
% add category
ic_rec_table.categories = [ic_rec_table.categories, {'recording_lab_data'}];
ic_rec_table.dynamictable.set( ...
'recording_lab_data', types.hdmf_common.DynamicTable( ...
'description', 'category table for lab-specific recording metadata', ...
'colnames', {'location'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([0, 1, 2]) ...
), ...
'location', types.hdmf_common.VectorData( ...
'data', {'Mordor', 'Gondor', 'Rohan'}, ...
'description', 'Recording location in Middle Earth' ...
) ...
) ...
);
In an AlignedDynamicTable all category tables must align with the main table, i.e., all tables must have the same number of rows and rows are expected to correspond to each other by index.
We can also add custom columns to any of the subcategory tables, i.e., the electrodes, stimuli, and responses tables, and any custom subcategory tables. All we need to do is indicate the name of the category we want to add the column to.
% Add voltage threshold as column of electrodes table
ic_rec_table.electrodes.colnames = [ic_rec_table.electrodes.colnames {'voltage_threshold'}];
ic_rec_table.electrodes.vectordata.set('voltage_threshold', types.hdmf_common.VectorData( ...
'data', [0.1, 0.12, 0.13], ...
'description', 'Just an example column on the electrodes category table' ...
) ...
);
 
nwbfile.general_intracellular_ephys_intracellular_recordings = ic_rec_table;

Hierarchical organization of recordings

To describe the organization of intracellular experiments, the metadata is organized hierarchically in a sequence of tables. All of the tables are so-called DynamicTables enabling users to add columns for custom metadata. Storing data in hierarchical tables has the advantage that it allows us to avoid duplication of metadata. E.g., for a single experiment we only need to describe the metadata that is constant across an experimental condition as a single row in the SimultaneousRecordingsTable without having to replicate the same information across all repetitions and sequential-, simultaneous-, and individual intracellular recordings. For analysis, this means that we can easily focus on individual aspects of an experiment while still being able to easily access information about information from related tables. All of these tables are optional, but to use one you must use all of the lower level tables, even if you only need a single row.

Add a simultaneous recording

The SimultaneousRecordingsTable groups intracellular recordings from the IntracellularRecordingsTable together that were recorded simultaneously from different electrodes and/or cells and describes metadata that is constant across the simultaneous recordings. In practice a simultaneous recording is often also referred to as a sweep. This example adds a custom column, "simultaneous_recording_tag."
% create simultaneous recordings table with custom column
% 'simultaneous_recording_tag'
 
[recordings_vector_data, recordings_vector_index] = util.create_indexed_column( ...
{[0, 1, 2],}, ...
'Column with references to one or more rows in the IntracellularRecordingsTable table', ...
ic_rec_table);
 
ic_sim_recs_table = types.core.SimultaneousRecordingsTable( ...
'description', [ ...
'A table for grouping different intracellular recordings from ', ...
'the IntracellularRecordingsTable table together that were recorded ', ...
'simultaneously from different electrodes.'...
], ...
'colnames', {'recordings', 'simultaneous_recording_tag'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64(12) ...
), ...
'recordings', recordings_vector_data, ...
'recordings_index', recordings_vector_index, ...
'simultaneous_recording_tag', types.hdmf_common.VectorData( ...
'description', 'A custom tag for simultaneous_recordings', ...
'data', {'LabTag1'} ...
) ...
);
 
Depending on the lab workflow, it may be useful to add complete columns to a table after we have already populated the table with rows. That would be done like so:
ic_sim_recs_table.colnames = [ic_sim_recs_table.colnames, {'simultaneous_recording_type'}];
ic_sim_recs_table.vectordata.set( ...
'simultaneous_recording_type', types.hdmf_common.VectorData(...
'description', 'Description of the type of simultaneous_recording', ...
'data', {'SimultaneousRecordingType1'} ...
) ...
);
 
nwbfile.general_intracellular_ephys_simultaneous_recordings = ic_sim_recs_table;

Add a sequential recording

The SequentialRecordingsTable groups simultaneously recorded intracellular recordings from the SimultaneousRecordingsTable together and describes metadata that is constant across the simultaneous recordings. In practice a sequential recording is often also referred to as a sweep sequence. A common use of sequential recordings is to group together simultaneous recordings where a sequence of stimuli of the same type with varying parameters have been presented in a sequence (e.g., a sequence of square waveforms with varying amplitude).
[simultaneous_recordings_vector_data, simultaneous_recordings_vector_index] = util.create_indexed_column( ...
{0,}, ...
'Column with references to one or more rows in the SimultaneousRecordingsTable table', ...
ic_sim_recs_table);
 
sequential_recordings = types.core.SequentialRecordingsTable( ...
'description', [ ...
'A table for grouping different intracellular recording ', ...
'simultaneous_recordings from the SimultaneousRecordingsTable ', ...
'table together. This is typically used to group together ', ...
'simultaneous_recordings where the a sequence of stimuli of ', ...
'the same type with varying parameters have been presented in ', ...
'a sequence.' ...
], ...
'colnames', {'simultaneous_recordings', 'stimulus_type'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64(15) ...
), ...
'simultaneous_recordings', simultaneous_recordings_vector_data, ...
'simultaneous_recordings_index', simultaneous_recordings_vector_index, ...
'stimulus_type', types.hdmf_common.VectorData( ...
'description', 'Column storing the type of stimulus used for the sequential recording', ...
'data', {'square'} ...
) ...
);
 
nwbfile.general_intracellular_ephys_sequential_recordings = sequential_recordings;

Add repetitions table

The RepetitionsTable groups sequential recordings from the SequentialRecordingsTable. In practice, a repetition is often also referred to a run. A typical use of the RepetitionsTable is to group sets of different stimuli that are applied in sequence that may be repeated.
[sequential_recordings_vector_data, sequential_recordings_vector_index] = util.create_indexed_column( ...
{0,}, ...
'Column with references to one or more rows in the SequentialRecordingsTable table', ...
sequential_recordings);
 
 
nwbfile.general_intracellular_ephys_repetitions = types.core.RepetitionsTable( ...
'description', [ ...
'A table for grouping different intracellular recording sequential ', ...
'recordings together. With each SimultaneousRecording typically ', ...
'representing a particular type of stimulus, the RepetitionsTable ', ...
'table is typically used to group sets of stimuli applied in sequence.' ...
], ...
'colnames', {'sequential_recordings'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64(17) ...
), ...
'sequential_recordings', sequential_recordings_vector_data, ...
'sequential_recordings_index', sequential_recordings_vector_index ...
);

Add experimental condition table

The ExperimentalConditionsTable groups repetitions of intracellular recording from the RepetitionsTable together that belong to the same experimental conditions.
[repetitions_vector_data, repetitions_vector_index] = util.create_indexed_column( ...
{0, 0}, ...
'Column with references to one or more rows in the RepetitionsTable table', ...
nwbfile.general_intracellular_ephys_repetitions);
 
nwbfile.general_intracellular_ephys_experimental_conditions = types.core.ExperimentalConditionsTable( ...
'description', [ ...
'A table for grouping different intracellular recording ', ...
'repetitions together that belong to the same experimental ', ...
'conditions.' ...
], ...
'colnames', {'repetitions', 'tag'}, ...
'id', types.hdmf_common.ElementIdentifiers( ...
'data', int64([19, 21]) ...
), ...
'repetitions', repetitions_vector_data, ...
'repetitions_index', repetitions_vector_index, ...
'tag', types.hdmf_common.VectorData( ...
'description', 'integer tag for a experimental condition', ...
'data', [1,3] ...
) ...
);

Write the NWB file

nwbExport(nwbfile, 'test_new_icephys.nwb');

Read the NWB file

nwbfile2 = nwbRead('test_new_icephys.nwb', 'ignorecache')
nwbfile2 =
NwbFile with properties: - nwb_version: '2.7.0' + nwb_version: '2.8.0' file_create_date: [1×1 types.untyped.DataStub] identifier: 'EXAMPLE_ID' session_description: 'my first synthetic recording' @@ -94,6 +94,7 @@ general_subject: [] general_surgery: '' general_virus: '' + general_was_generated_by: '' intervals: [0×1 types.untyped.Set] intervals_epochs: [] intervals_invalid_times: [] @@ -119,8 +120,8 @@ % _Illustration of the hierarchy of metadata tables used to describe the organization % of intracellular electrophysiology experiments._ %% Creating an NWBFile -% When creating an NWB file, the first step is to create the , which you can create using the , which you can create using the command. session_start_time = datetime(2018, 3, 1, 12, 0, 0, 'TimeZone', 'local'); @@ -138,14 +139,14 @@ ); % Device metadata -% Device metadata is represented by objects. device = types.core.Device(); nwbfile.general_devices.set('Heka ITC-1600', device); % *Electrode metadata* -% Intracellular electrode metadata is represented by objects. Create an electrode object, which requires % a link to the device of the previous step. Then add it to the NWB file. @@ -157,25 +158,25 @@ nwbfile.general_intracellular_ephys.set('elec0', electrode); %% *Stimulus and response data* % Intracellular stimulus and response data are represented with subclasses of -% . A stimulus is described by a time series representing % voltage or current stimulation with a particular set of parameters. There are % two classes for representing stimulus data: %% -% * -% * %% % The response is then described by a time series representing voltage or current % recorded from a single cell using a single intracellular electrode via one of % the following classes: %% -% * -% * -% * %% % Below we create a simple example stimulus/response recording data pair for @@ -240,7 +241,7 @@ nwbfile.acquisition.set('ccs', ccs); %% -% is used when the current is clamped to 0. % Create an IZeroClampSeries object @@ -256,7 +257,7 @@ ); nwbfile.acquisition.set('izcs', izcs); % Adding an intracellular recording -% The relates electrode, stimulus and response pairs % and describes metadata specific to individual recordings. % @@ -264,11 +265,11 @@ % % _Illustration of the structure of the IntracellularRecordingsTable_ % -% We can add an and add the , , and to it, then add them all to the and add the , , and to it, then add them all to the object. ic_rec_table = types.core.IntracellularRecordingsTable( ... @@ -338,18 +339,18 @@ ); %% -% The table is not just a but an |. The type is itself a that may contain an arbitrary number of additional table is not just a but an |. The type is itself a that may contain an arbitrary number of additional , each of which defines a "category." This is similar to a -% table with “sub-headings”. In the case of the , we have three predefined categories, i.e., % electrodes, stimuli, and responses. We can also dynamically add new categories -% to the table. As each category corresponds to a , this means we have to create a new , this means we have to create a new and add it to our table. % add category @@ -368,7 +369,7 @@ ) ... ); %% -% In an all category tables must align with the main table, % i.e., all tables must have the same number of rows and rows are expected to % correspond to each other by index. @@ -393,7 +394,7 @@ % DynamicTables enabling users to add columns for custom metadata. Storing data % in hierarchical tables has the advantage that it allows us to avoid duplication % of metadata. E.g., for a single experiment we only need to describe the metadata -% that is constant across an experimental condition as a single row in the without having to replicate the same information % across all repetitions and sequential-, simultaneous-, and individual intracellular % recordings. For analysis, this means that we can easily focus on individual @@ -402,8 +403,8 @@ % to use one you must use all of the lower level tables, even if you only need % a single row. % Add a simultaneous recording -% The groups intracellular recordings from the groups intracellular recordings from the together that were recorded simultaneously % from different electrodes and/or cells and describes metadata that is constant % across the simultaneous recordings. In practice a simultaneous recording is @@ -450,9 +451,9 @@ nwbfile.general_intracellular_ephys_simultaneous_recordings = ic_sim_recs_table; % Add a sequential recording -% The groups simultaneously recorded intracellular -% recordings from the together and describes metadata that is constant % across the simultaneous recordings. In practice a sequential recording is often % also referred to as a sweep sequence. A common use of sequential recordings @@ -488,10 +489,10 @@ nwbfile.general_intracellular_ephys_sequential_recordings = sequential_recordings; % Add repetitions table -% The groups sequential recordings from the groups sequential recordings from the . In practice, a repetition is often also referred -% to a run. A typical use of the is to group sets of different stimuli that are applied % in sequence that may be repeated. @@ -516,9 +517,9 @@ 'sequential_recordings_index', sequential_recordings_vector_index ... ); % Add experimental condition table -% The groups repetitions of intracellular recording -% from the together that belong to the same experimental conditions. [repetitions_vector_data, repetitions_vector_index] = util.create_indexed_column( ... @@ -555,4 +556,13 @@ % ##### SOURCE END ##### --> -
\ No newline at end of file +
+ \ No newline at end of file diff --git a/docs/source/_static/html/tutorials/images.html b/docs/source/_static/html/tutorials/images.html new file mode 100644 index 00000000..4e25dc83 --- /dev/null +++ b/docs/source/_static/html/tutorials/images.html @@ -0,0 +1,381 @@ + +Storing Image Data in NWB

Storing Image Data in NWB

Image data can be a collection of individual images or movie segments (as a movie is simply a series of images), about the subject, the environment, the presented stimuli, or other parts related to the experiment. This tutorial focuses in particular on the usage of:

Create an NWB File

nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'timestamps_reference_time', datetime(2018, 4, 25, 3, 0, 45, 'TimeZone', 'local'), ...
'general_experimenter', 'LastName, FirstName', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', 'DOI:10.1016/j.neuron.2016.12.011' ... % optional
);
nwb
nwb =
NwbFile with properties: + + nwb_version: '2.8.0' + file_create_date: [] + identifier: 'Mouse5_Day3' + session_description: 'mouse in open exploration' + session_start_time: {[2018-04-25T02:30:03.000000+02:00]} + timestamps_reference_time: {[2018-04-25T03:00:45.000000+02:00]} + acquisition: [0×1 types.untyped.Set] + analysis: [0×1 types.untyped.Set] + general: [0×1 types.untyped.Set] + general_data_collection: '' + general_devices: [0×1 types.untyped.Set] + general_experiment_description: '' + general_experimenter: 'LastName, FirstName' + general_extracellular_ephys: [0×1 types.untyped.Set] + general_extracellular_ephys_electrodes: [] + general_institution: 'University of My Institution' + general_intracellular_ephys: [0×1 types.untyped.Set] + general_intracellular_ephys_experimental_conditions: [] + general_intracellular_ephys_filtering: '' + general_intracellular_ephys_intracellular_recordings: [] + general_intracellular_ephys_repetitions: [] + general_intracellular_ephys_sequential_recordings: [] + general_intracellular_ephys_simultaneous_recordings: [] + general_intracellular_ephys_sweep_table: [] + general_keywords: '' + general_lab: '' + general_notes: '' + general_optogenetics: [0×1 types.untyped.Set] + general_optophysiology: [0×1 types.untyped.Set] + general_pharmacology: '' + general_protocol: '' + general_related_publications: 'DOI:10.1016/j.neuron.2016.12.011' + general_session_id: 'session_1234' + general_slices: '' + general_source_script: '' + general_source_script_file_name: '' + general_stimulus: '' + general_subject: [] + general_surgery: '' + general_virus: '' + general_was_generated_by: '' + intervals: [0×1 types.untyped.Set] + intervals_epochs: [] + intervals_invalid_times: [] + intervals_trials: [] + processing: [0×1 types.untyped.Set] + scratch: [0×1 types.untyped.Set] + stimulus_presentation: [0×1 types.untyped.Set] + stimulus_templates: [0×1 types.untyped.Set] + units: [] +

OpticalSeries: Storing series of images as stimuli

OpticalSeries is for time series of images that were presented to the subject as stimuli. We will create an OpticalSeries object with the name "StimulusPresentation" representing what images were shown to the subject and at what times.
Image data can be stored either in the HDF5 file or as an external image file. For this tutorial, we will use fake image data with shape of ('time', 'x', 'y', 'RGB') = (200, 50, 50, 3). As in all TimeSeries, the first dimension is time. The second and third dimensions represent x and y. The fourth dimension represents the RGB value (length of 3) for color images. Please note: As described in the dimensionMapNoDataPipes tutorial, when a MATLAB array is exported to HDF5, the array is transposed. Therefore, in order to correctly export the data, we will need to create a transposed array, where the dimensions are in reverse order compared to the type specification.
NWB differentiates between acquired data and data that was presented as stimulus. We can add it to the NWBFile object as stimulus data.
If the sampling rate is constant, use rate and starting_time to specify time. For irregularly sampled recordings, use timestamps to specify time for each sample image.
image_data = randi(255, [3, 50, 50, 200]); % NB: Array is transposed
optical_series = types.core.OpticalSeries( ...
'distance', 0.7, ... % required
'field_of_view', [0.2, 0.3, 0.7], ... % required
'orientation', 'lower left', ... % required
'data', image_data, ...
'data_unit', 'n.a.', ...
'starting_time_rate', 1.0, ...
'starting_time', 0.0, ...
'description', 'The images presented to the subject as stimuli' ...
);
 
nwb.stimulus_presentation.set('StimulusPresentation', optical_series);

AbstractFeatureSeries: Storing features of visual stimuli

While it is usually recommended to store the entire image data as an OpticalSeries, sometimes it is useful to store features of the visual stimuli instead of or in addition to the raw image data. For example, you may want to store the mean luminance of the image, the contrast, or the spatial frequency. This can be done using an instance of AbstractFeatureSeries. This class is a general container for storing time series of features that are derived from the raw image data.
% Create some fake feature data
feature_data = rand(3, 200); % 200 time points, 3 features
 
% Create an AbstractFeatureSeries object
abstract_feature_series = types.core.AbstractFeatureSeries( ...
'data', feature_data, ...
'timestamps', linspace(0, 1, 200), ...
'description', 'Features of the visual stimuli', ...
'features', {'luminance', 'contrast', 'spatial frequency'}, ...
'feature_units', {'n.a.', 'n.a.', 'cycles/degree'} ...
);
% Add the AbstractFeatureSeries to the NWBFile
nwb.stimulus_presentation.set('StimulusFeatures', abstract_feature_series);

ImageSeries: Storing series of images as acquisition

ImageSeries is a general container for time series of images acquired during the experiment. Image data can be stored either in the HDF5 file or as an external image file. When color images are stored in the HDF5 file the color channel order is expected to be RGB.
image_data = randi(255, [3, 50, 50, 200]);
behavior_images = types.core.ImageSeries( ...
'data', image_data, ...
'description', 'Image data of an animal in environment', ...
'data_unit', 'n.a.', ...
'starting_time_rate', 1.0, ...
'starting_time', 0.0 ...
);
 
nwb.acquisition.set('ImageSeries', behavior_images);

External Files

External files (e.g. video files of the behaving animal) can be added to the NWBFile by creating an ImageSeries object using the external_file attribute that specifies the path to the external file(s) on disk. The file(s) path must be relative to the path of the NWB file. Either external_file or data must be specified, but not both. external_file can be a cell array of multiple video files.
The starting_frame attribute serves as an index to indicate the starting frame of each external file, allowing you to skip the beginning of videos.
external_files = {'video1.pmp4', 'video2.pmp4'};
 
timestamps = [0.0, 0.04, 0.07, 0.1, 0.14, 0.16, 0.21];
behavior_external_file = types.core.ImageSeries( ...
'description', 'Behavior video of animal moving in environment', ...
'data_unit', 'n.a.', ...
'external_file', external_files, ...
'format', 'external', ...
'external_file_starting_frame', [0, 2, 4], ...
'timestamps', timestamps ...
);
 
nwb.acquisition.set('ExternalVideos', behavior_external_file);

Static Images

Static images can be stored in an NWBFile object by creating an RGBAImage, RGBImage or GrayscaleImage object with the image data. All of these image types provide an optional description parameter to include text description about the image and the resolution parameter to specify the pixels/cm resolution of the image.

RGBAImage: for color images with transparency

RGBAImage is for storing data of color image with transparency. data must be 3D where the first and second dimensions represent x and y. The third dimension has length 4 and represents the RGBA value.
image_data = randi(255, [4, 200, 200]);
 
rgba_image = types.core.RGBAImage( ...
'data', image_data, ... % required
'resolution', 70.0, ...
'description', 'RGBA image' ...
);

RGBImage: for color images

RGBImage is for storing data of RGB color image. data must be 3D where the first and second dimensions represent x and y. The third dimension has length 3 and represents the RGB value.
image_data = randi(255, [3, 200, 200]);
 
rgb_image = types.core.RGBImage( ...
'data', image_data, ... % required
'resolution', 70.0, ...
'description', 'RGB image' ...
);

GrayscaleImage: for grayscale images

GrayscaleImage is for storing grayscale image data. data must be 2D where the first and second dimensions represent x and y.
image_data = randi(255, [200, 200]);
 
grayscale_image = types.core.GrayscaleImage( ...
'data', image_data, ... % required
'resolution', 70.0, ...
'description', 'Grayscale image' ...
);

Images: a container for images

Add the images to an Images container that accepts any of these image types.
image_collection = types.core.Images( ...
'description', 'A collection of logo images presented to the subject.'...
);
 
image_collection.image.set('rgba_image', rgba_image);
image_collection.image.set('rgb_image', rgb_image);
image_collection.image.set('grayscale_image', grayscale_image);
 
nwb.acquisition.set('image_collection', image_collection);

Index Series for Repeated Images

You may want to set up a time series of images where some images are repeated many times. You could create an ImageSeries that repeats the data each time the image is shown, but that would be inefficient, because it would store the same data multiple times. A better solution would be to store the unique images once and reference those images. This is how IndexSeries works. First, create an Images container with the order of images defined using an ImageReferences. Then create an IndexSeries that indexes into the Images.
rgbImage = imread('street2.jpg');
grayImage = uint8(sum(double(rgbImage), 3) ./ double(max(max(max(rgbImage)))));
GsStreet = types.core.GrayscaleImage(...
'data', grayImage, ...
'description', 'grayscale image of a street.', ...
'resolution', 28 ...
);
 
RgbStreet = types.core.RGBImage( ...
'data', rgbImage, ...
'resolution', 28, ...
'description', 'RGB Street' ...
);
 
ImageOrder = types.core.ImageReferences(...
'data', [types.untyped.ObjectView(RgbStreet), types.untyped.ObjectView(GsStreet)] ...
);
Images = types.core.Images( ...
'gs_face', GsStreet, ...
'rgb_face', RgbStreet, ...
'description', 'A collection of streets.', ...
'order_of_images', ImageOrder ...
);
 
types.core.IndexSeries(...
'data', [0, 1, 0, 1], ... % NOTE: 0-indexed
'indexed_images', Images, ...
'timestamps', [0.1, 0.2, 0.3, 0.4] ...
)
ans =
IndexSeries with properties: + + indexed_images: [1×1 types.core.Images] + indexed_timeseries: [] + starting_time_unit: 'seconds' + timestamps_interval: 1 + timestamps_unit: 'seconds' + data: [0 1 0 1] + comments: 'no comments' + control: [] + control_description: '' + data_continuity: '' + data_conversion: [] + data_offset: [] + data_resolution: [] + data_unit: 'N/A' + description: 'no description' + starting_time: [] + starting_time_rate: [] + timestamps: [0.1000 0.2000 0.3000 0.4000] +
Here data contains the (0-indexed) index of the displayed image as they are ordered in the ImageReference.

Writing the images to an NWB File

Now use nwbExport to write the file.
nwbExport(nwb, "images_test.nwb");
+
+ +
+ \ No newline at end of file diff --git a/docs/source/_static/html/tutorials/intro.html b/docs/source/_static/html/tutorials/intro.html new file mode 100644 index 00000000..f468010b --- /dev/null +++ b/docs/source/_static/html/tutorials/intro.html @@ -0,0 +1,519 @@ + +Introduction to MatNWB

Introduction to MatNWB

Installing MatNWB

Use the code below within the brackets to install MatNWB from source. MatNWB works by automatically creating API classes based on the schema.
%{
!git clone https://github.com/NeurodataWithoutBorders/matnwb.git
addpath(genpath(pwd));
%}

Set up the NWB File

An NWB file represents a single session of an experiment. Each file must have a session_description, identifier, and session start time. Create a new NWBFile object with those and additional metadata using the NwbFile command. For all MatNWB classes and functions, we use the Matlab method of entering keyword argument pairs, where arguments are entered as name followed by value. Ellipses are used for clarity.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'general_experimenter', 'Last, First', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', {'DOI:10.1016/j.neuron.2016.12.011'}); % optional
nwb
nwb =
NwbFile with properties: + + nwb_version: '2.8.0' + file_create_date: [] + identifier: 'Mouse5_Day3' + session_description: 'mouse in open exploration' + session_start_time: {[2018-04-25T02:30:03.000000+02:00]} + timestamps_reference_time: [] + acquisition: [0×1 types.untyped.Set] + analysis: [0×1 types.untyped.Set] + general: [0×1 types.untyped.Set] + general_data_collection: '' + general_devices: [0×1 types.untyped.Set] + general_experiment_description: '' + general_experimenter: 'Last, First' + general_extracellular_ephys: [0×1 types.untyped.Set] + general_extracellular_ephys_electrodes: [] + general_institution: 'University of My Institution' + general_intracellular_ephys: [0×1 types.untyped.Set] + general_intracellular_ephys_experimental_conditions: [] + general_intracellular_ephys_filtering: '' + general_intracellular_ephys_intracellular_recordings: [] + general_intracellular_ephys_repetitions: [] + general_intracellular_ephys_sequential_recordings: [] + general_intracellular_ephys_simultaneous_recordings: [] + general_intracellular_ephys_sweep_table: [] + general_keywords: '' + general_lab: '' + general_notes: '' + general_optogenetics: [0×1 types.untyped.Set] + general_optophysiology: [0×1 types.untyped.Set] + general_pharmacology: '' + general_protocol: '' + general_related_publications: {'DOI:10.1016/j.neuron.2016.12.011'} + general_session_id: 'session_1234' + general_slices: '' + general_source_script: '' + general_source_script_file_name: '' + general_stimulus: '' + general_subject: [] + general_surgery: '' + general_virus: '' + general_was_generated_by: '' + intervals: [0×1 types.untyped.Set] + intervals_epochs: [] + intervals_invalid_times: [] + intervals_trials: [] + processing: [0×1 types.untyped.Set] + scratch: [0×1 types.untyped.Set] + stimulus_presentation: [0×1 types.untyped.Set] + stimulus_templates: [0×1 types.untyped.Set] + units: [] + +Warning: The following required properties are missing for instance for type "NwbFile": + timestamps_reference_time

Subject Information

You can also provide information about your subject in the NWB file. Create a Subject object to store information such as age, species, genotype, sex, and a freeform description. Then set nwb.general_subject to the Subject object.
Each of these fields is free-form, so any values will be valid, but here are our recommendations:
  • For age, we recommend using the ISO 8601 Duration format
  • For species, we recommend using the formal latin binomal name (e.g. mouse -> Mus musculus, human -> Homo sapiens)
  • For sex, we recommend using F (female), M (male), U (unknown), and O (other)
subject = types.core.Subject( ...
'subject_id', '001', ...
'age', 'P90D', ...
'description', 'mouse 5', ...
'species', 'Mus musculus', ...
'sex', 'M' ...
);
nwb.general_subject = subject;
 
subject
subject =
Subject with properties: + + age: 'P90D' + age_reference: 'birth' + date_of_birth: [] + description: 'mouse 5' + genotype: '' + sex: 'M' + species: 'Mus musculus' + strain: '' + subject_id: '001' + weight: '' +
Note: the DANDI archive requires all NWB files to have a subject object with subject_id specified, and strongly encourages specifying the other fields.

Time Series Data

TimeSeries is a common base class for measurements sampled over time, and provides fields for data and timestamps (regularly or irregularly sampled). You will also need to supply the name and unit of measurement (SI unit).
For instance, we can store a TimeSeries data where recording started 0.0 seconds after start_time and sampled every second (1 Hz):
time_series_with_rate = types.core.TimeSeries( ...
'description', 'an example time series', ...
'data', linspace(0, 100, 10), ...
'data_unit', 'm', ...
'starting_time', 0.0, ...
'starting_time_rate', 1.0);
For irregularly sampled recordings, we need to provide the timestamps for the data:
time_series_with_timestamps = types.core.TimeSeries( ...
'description', 'an example time series', ...
'data', linspace(0, 100, 10), ...
'data_unit', 'm', ...
'timestamps', linspace(0, 1, 10));
The TimeSeries class serves as the foundation for all other time series types in the NWB format. Several specialized subclasses extend the functionality of TimeSeries, each tailored to handle specific kinds of data. In the next section, we’ll explore one of these specialized types. For a full overview, please check out the type hierarchy in the NWB schema documentation.

Other Types of Time Series

As mentioned previously, there are many subtypes of TimeSeries in MatNWB that are used to store different kinds of data. One example is AnnotationSeries, a subclass of TimeSeries that stores text-based records about the experiment. Similar to our TimeSeries example above, we can create an AnnotationSeries object with text information about a stimulus and add it to the stimulus_presentation group in the NWBFile. Below is an example where we create an AnnotationSeries object with annotations for airpuff stimuli and add it to the NWBFile.
% Create an AnnotationSeries object with annotations for airpuff stimuli
annotations = types.core.AnnotationSeries( ...
'description', 'Airpuff events delivered to the animal', ...
'data', {'Left Airpuff', 'Right Airpuff', 'Right Airpuff'}, ...
'timestamps', [1.0, 3.0, 8.0] ...
);
 
% Add the AnnotationSeries to the NWBFile's stimulus group
nwb.stimulus_presentation.set('Airpuffs', annotations)
ans =
Set with properties: + + Airpuffs: [types.core.AnnotationSeries] +

Behavior

SpatialSeries and Position

Many types of data have special data types in NWB. To store the spatial position of a subject, we will use the SpatialSeries and Position classes.
Note: These diagrams follow a standard convention called "UML class diagram" to express the object-oriented relationships between NWB classes. For our purposes, all you need to know is that an open triangle means "extends" (i.e., is a specialized subtype of), and an open diamond means "is contained within." Learn more about class diagrams on the wikipedia page.
SpatialSeries is a subclass of TimeSeries, a common base class for measurements sampled over time, and provides fields for data and time (regularly or irregularly sampled). Here, we put a SpatialSeries object called 'SpatialSeries' in a Position object. If the data is sampled at a regular interval, it is recommended to specify the starting_time and the sampling rate (starting_time_rate), although it is still possible to specify timestamps as in the time_series_with_timestamps example above.
% create SpatialSeries object
spatial_series_ts = types.core.SpatialSeries( ...
'data', [linspace(0,10,100); linspace(0,8,100)], ...
'reference_frame', '(0,0) is bottom left corner', ...
'starting_time', 0, ...
'starting_time_rate', 200 ...
);
 
% create Position object and add SpatialSeries
position = types.core.Position('SpatialSeries', spatial_series_ts);
NWB differentiates between raw, acquired data, which should never change, and processed data, which are the results of preprocessing algorithms and could change. Let's assume that the animal's position was computed from a video tracking algorithm, so it would be classified as processed data. Since processed data can be very diverse, NWB allows us to create processing modules, which are like folders, to store related processed data or data that comes from a single algorithm.
Create a processing module called "behavior" for storing behavioral data in the NWBFile and add the Position object to the module.
% create processing module
behavior_module = types.core.ProcessingModule('description', 'contains behavioral data');
 
% add the Position object (that holds the SpatialSeries object) to the module
% and name the Position object "Position"
behavior_module.nwbdatainterface.set('Position', position);
 
% add the processing module to the NWBFile object, and name the processing module "behavior"
nwb.processing.set('behavior', behavior_module);

Trials

Trials are stored in a TimeIntervals object which is a subclass of DynamicTable. DynamicTable objects are used to store tabular metadata throughout NWB, including for trials, electrodes, and sorted units. They offer flexibility for tabular data by allowing required columns, optional columns, and custom columns.
The trials DynamicTable can be thought of as a table with this structure:
Trials are stored in a TimeIntervals object which subclasses DynamicTable. Here, we are adding 'correct', which will be a logical array.
trials = types.core.TimeIntervals( ...
'colnames', {'start_time', 'stop_time', 'correct'}, ...
'description', 'trial data and properties');
 
trials.addRow('start_time', 0.1, 'stop_time', 1.0, 'correct', false)
trials.addRow('start_time', 1.5, 'stop_time', 2.0, 'correct', true)
trials.addRow('start_time', 2.5, 'stop_time', 3.0, 'correct', false)
 
trials.toTable() % visualize the table
ans = 3×4 table
 idstart_timestop_timecorrect
100.100010
211.500021
322.500030
nwb.intervals_trials = trials;
 
% If you have multiple trials tables, you will need to use custom names for
% each one:
nwb.intervals.set('custom_intervals_table_name', trials);

Write

Now, to write the NWB file that we have built so far:
nwbExport(nwb, 'intro_tutorial.nwb')
We can use the HDFView application to inspect the resulting NWB file.

Read

We can then read the file back in using MatNWB and inspect its contents.
read_nwbfile = nwbRead('intro_tutorial.nwb', 'ignorecache')
read_nwbfile =
NwbFile with properties: + + nwb_version: '2.8.0' + file_create_date: [1×1 types.untyped.DataStub] + identifier: 'Mouse5_Day3' + session_description: 'mouse in open exploration' + session_start_time: [1×1 types.untyped.DataStub] + timestamps_reference_time: [1×1 types.untyped.DataStub] + acquisition: [0×1 types.untyped.Set] + analysis: [0×1 types.untyped.Set] + general: [0×1 types.untyped.Set] + general_data_collection: '' + general_devices: [0×1 types.untyped.Set] + general_experiment_description: '' + general_experimenter: [1×1 types.untyped.DataStub] + general_extracellular_ephys: [0×1 types.untyped.Set] + general_extracellular_ephys_electrodes: [] + general_institution: 'University of My Institution' + general_intracellular_ephys: [0×1 types.untyped.Set] + general_intracellular_ephys_experimental_conditions: [] + general_intracellular_ephys_filtering: '' + general_intracellular_ephys_intracellular_recordings: [] + general_intracellular_ephys_repetitions: [] + general_intracellular_ephys_sequential_recordings: [] + general_intracellular_ephys_simultaneous_recordings: [] + general_intracellular_ephys_sweep_table: [] + general_keywords: '' + general_lab: '' + general_notes: '' + general_optogenetics: [0×1 types.untyped.Set] + general_optophysiology: [0×1 types.untyped.Set] + general_pharmacology: '' + general_protocol: '' + general_related_publications: [1×1 types.untyped.DataStub] + general_session_id: 'session_1234' + general_slices: '' + general_source_script: '' + general_source_script_file_name: '' + general_stimulus: '' + general_subject: [1×1 types.core.Subject] + general_surgery: '' + general_virus: '' + general_was_generated_by: '' + intervals: [1×1 types.untyped.Set] + intervals_epochs: [] + intervals_invalid_times: [] + intervals_trials: [1×1 types.core.TimeIntervals] + processing: [1×1 types.untyped.Set] + scratch: [0×1 types.untyped.Set] + stimulus_presentation: [1×1 types.untyped.Set] + stimulus_templates: [0×1 types.untyped.Set] + units: [] +
We can print the SpatialSeries data traversing the hierarchy of objects. The processing module called 'behavior' contains our Position object named 'Position'. The Position object contains our SpatialSeries object named 'SpatialSeries'.
read_spatial_series = read_nwbfile.processing.get('behavior'). ...
nwbdatainterface.get('Position').spatialseries.get('SpatialSeries')
read_spatial_series =
SpatialSeries with properties: + + reference_frame: '(0,0) is bottom left corner' + starting_time_unit: 'seconds' + timestamps_interval: 1 + timestamps_unit: 'seconds' + data: [1×1 types.untyped.DataStub] + comments: 'no comments' + control: [] + control_description: '' + data_continuity: '' + data_conversion: 1 + data_offset: 0 + data_resolution: -1 + data_unit: 'meters' + description: 'no description' + starting_time: 0 + starting_time_rate: 200 + timestamps: [] +

Reading Data

Counter to normal MATLAB workflow, data arrays are read passively from the file. Calling read_spatial_series.data does not read the data values, but presents a DataStub object that can be indexed to read data.
read_spatial_series.data
ans =
DataStub with properties: + + filename: 'intro_tutorial.nwb' + path: '/processing/behavior/Position/SpatialSeries/data' + dims: [2 100] + ndims: 2 + dataType: 'double' +
This allows you to conveniently work with datasets that are too large to fit in RAM all at once. Access all the data in the matrix using the load method with no arguments.
read_spatial_series.data.load
ans = 2×100
0 0.1010 0.2020 0.3030 0.4040 0.5051 0.6061 0.7071 0.8081 0.9091 1.0101 1.1111 1.2121 1.3131 1.4141 1.5152 1.6162 1.7172 1.8182 1.9192 2.0202 2.1212 2.2222 2.3232 2.4242 2.5253 2.6263 2.7273 2.8283 2.9293 3.0303 3.1313 3.2323 3.3333 3.4343 3.5354 3.6364 3.7374 3.8384 3.9394 4.0404 4.1414 4.2424 4.3434 4.4444 4.5455 4.6465 4.7475 4.8485 4.9495 + 0 0.0808 0.1616 0.2424 0.3232 0.4040 0.4848 0.5657 0.6465 0.7273 0.8081 0.8889 0.9697 1.0505 1.1313 1.2121 1.2929 1.3737 1.4545 1.5354 1.6162 1.6970 1.7778 1.8586 1.9394 2.0202 2.1010 2.1818 2.2626 2.3434 2.4242 2.5051 2.5859 2.6667 2.7475 2.8283 2.9091 2.9899 3.0707 3.1515 3.2323 3.3131 3.3939 3.4747 3.5556 3.6364 3.7172 3.7980 3.8788 3.9596 +
If you only need a section of the data, you can read only that section by indexing the DataStub object like a normal array in MATLAB. This will just read the selected region from disk into RAM. This technique is particularly useful if you are dealing with a large dataset that is too big to fit entirely into your available RAM.
read_spatial_series.data(:, 1:10)
ans = 2×10
0 0.1010 0.2020 0.3030 0.4040 0.5051 0.6061 0.7071 0.8081 0.9091 + 0 0.0808 0.1616 0.2424 0.3232 0.4040 0.4848 0.5657 0.6465 0.7273 +

Next Steps

This concludes the introductory tutorial. Please proceed to one of the specialized tutorials, which are designed to follow this one.
See the API documentation to learn what data types are available.
+
+ +
+ \ No newline at end of file diff --git a/tutorials/html/ogen.html b/docs/source/_static/html/tutorials/ogen.html similarity index 56% rename from tutorials/html/ogen.html rename to docs/source/_static/html/tutorials/ogen.html index 4e19478b..11ca64d0 100644 --- a/tutorials/html/ogen.html +++ b/docs/source/_static/html/tutorials/ogen.html @@ -33,11 +33,11 @@ .rightPaneElement .textElement { padding-top: 2px; padding-left: 9px;} .S7 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 0px none rgb(33, 33, 33); border-bottom: 1px solid rgb(217, 217, 217); border-radius: 0px 0px 4px 4px; padding: 0px 45px 4px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; } .S8 { margin: 10px 10px 9px 4px; padding: 0px; line-height: 21px; min-height: 0px; white-space: pre-wrap; color: rgb(33, 33, 33); font-family: Helvetica, Arial, sans-serif; font-style: normal; font-size: 14px; font-weight: 400; text-align: left; } -.S9 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 1px solid rgb(217, 217, 217); border-bottom: 1px solid rgb(217, 217, 217); border-radius: 4px; padding: 6px 45px 4px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; }

Optogenetics

This tutorial will demonstrate how to write optogenetics data.

Creating an NWBFile object

When creating a NWB file, the first step is to create the NWBFile object using NwbFile.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', char(java.util.UUID.randomUUID), ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'general_experimenter', 'Last, First M.', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', 'DOI:10.1016/j.neuron.2016.12.011'); % optional
nwb
nwb =
NwbFile with properties: +.S9 { border-left: 1px solid rgb(217, 217, 217); border-right: 1px solid rgb(217, 217, 217); border-top: 1px solid rgb(217, 217, 217); border-bottom: 1px solid rgb(217, 217, 217); border-radius: 4px; padding: 6px 45px 4px 13px; line-height: 18.004px; min-height: 0px; white-space: nowrap; color: rgb(33, 33, 33); font-family: Menlo, Monaco, Consolas, "Courier New", monospace; font-size: 14px; }

Optogenetics

This tutorial will demonstrate how to write optogenetics data.

Creating an NWBFile object

When creating a NWB file, the first step is to create the NWBFile object using NwbFile.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', char(java.util.UUID.randomUUID), ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'general_experimenter', 'Last, First M.', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', 'DOI:10.1016/j.neuron.2016.12.011'); % optional
nwb
nwb =
NwbFile with properties: - nwb_version: '2.6.0' + nwb_version: '2.8.0' file_create_date: [] - identifier: 'b843652e-3404-48c7-8686-4904e786ea4c' + identifier: 'c90fcea4-7fcd-4fdf-aa84-16470b818579' session_description: 'mouse in open exploration' session_start_time: {[2018-04-25T02:30:03.000000+02:00]} timestamps_reference_time: [] @@ -75,6 +75,7 @@ general_subject: [] general_surgery: '' general_virus: '' + general_was_generated_by: '' intervals: [0×1 types.untyped.Set] intervals_epochs: [] intervals_invalid_times: [] @@ -84,11 +85,13 @@ stimulus_presentation: [0×1 types.untyped.Set] stimulus_templates: [0×1 types.untyped.Set] units: [] -

Adding optogenetic data

The ogen module contains two data types that you will need to write optogenetics data, OptogeneticStimulusSite, which contains metadata about the stimulus site, and OptogeneticSeries, which contains the values of the time series.
First, you need to create a Device object linked to the NWBFile:
device = types.core.Device();
nwb.general_devices.set('Device', device);
Now, you can create and add an OptogeneticStimulusSite.
ogen_stim_site = types.core.OptogeneticStimulusSite( ...
'device', types.untyped.SoftLink(device), ...
'description', 'This is an example optogenetic site.', ...
'excitation_lambda', 600.0, ...
'location', 'VISrl');
 
nwb.general_optogenetics.set('OptogeneticStimulusSite', ogen_stim_site);
With the OptogeneticStimulusSite added, you can now create and add a OptogeneticSeries. Here, we will generate some random data and specify the timing using rate. If you have samples at irregular intervals, you should use timestamps instead.
ogen_series = types.core.OptogeneticSeries( ...
'data', randn(20, 1), ...
'site', types.untyped.SoftLink(ogen_stim_site), ...
'starting_time', 0.0, ...
'starting_time_rate', 30.0); % Hz
nwb.stimulus_presentation.set('OptogeneticSeries', ogen_series);
 
nwb
nwb =
NwbFile with properties: - nwb_version: '2.6.0' +Warning: The following required properties are missing for instance for type "NwbFile": + timestamps_reference_time

Adding optogenetic data

The ogen module contains two data types that you will need to write optogenetics data, OptogeneticStimulusSite, which contains metadata about the stimulus site, and OptogeneticSeries, which contains the values of the time series.
First, you need to create a Device object linked to the NWBFile:
device = types.core.Device();
nwb.general_devices.set('Device', device);
Now, you can create and add an OptogeneticStimulusSite.
ogen_stim_site = types.core.OptogeneticStimulusSite( ...
'device', types.untyped.SoftLink(device), ...
'description', 'This is an example optogenetic site.', ...
'excitation_lambda', 600.0, ...
'location', 'VISrl');
 
nwb.general_optogenetics.set('OptogeneticStimulusSite', ogen_stim_site);
With the OptogeneticStimulusSite added, you can now create and add a OptogeneticSeries. Here, we will generate some random data and specify the timing using rate. If you have samples at irregular intervals, you should use timestamps instead.
ogen_series = types.core.OptogeneticSeries( ...
'data', randn(20, 1), ...
'site', types.untyped.SoftLink(ogen_stim_site), ...
'starting_time', 0.0, ...
'starting_time_rate', 30.0); % Hz
nwb.stimulus_presentation.set('OptogeneticSeries', ogen_series);
 
nwb
nwb =
NwbFile with properties: + + nwb_version: '2.8.0' file_create_date: [] - identifier: 'b843652e-3404-48c7-8686-4904e786ea4c' + identifier: 'c90fcea4-7fcd-4fdf-aa84-16470b818579' session_description: 'mouse in open exploration' session_start_time: {[2018-04-25T02:30:03.000000+02:00]} timestamps_reference_time: [] @@ -126,6 +129,7 @@ general_subject: [] general_surgery: '' general_virus: '' + general_was_generated_by: '' intervals: [0×1 types.untyped.Set] intervals_epochs: [] intervals_invalid_times: [] @@ -135,7 +139,9 @@ stimulus_presentation: [1×1 types.untyped.Set] stimulus_templates: [0×1 types.untyped.Set] units: [] -
Now you can write the NWB file.
nwbExport(nwb, 'ogen_tutorial.nwb');
+ +Warning: The following required properties are missing for instance for type "NwbFile": + timestamps_reference_time
Now you can write the NWB file.
nwbExport(nwb, 'ogen_tutorial.nwb');

-
\ No newline at end of file +
+ \ No newline at end of file diff --git a/docs/source/_static/html/tutorials/ophys.html b/docs/source/_static/html/tutorials/ophys.html new file mode 100644 index 00000000..af436bd4 --- /dev/null +++ b/docs/source/_static/html/tutorials/ophys.html @@ -0,0 +1,573 @@ + +MatNWB Optical Physiology Tutorial

MatNWB Optical Physiology Tutorial

Introduction

In this tutorial, we will create fake data for a hypothetical optical physiology experiment with a freely moving animal. The types of data we will convert are:
  • Acquired two-photon images
  • Image segmentation (ROIs)
  • Fluorescence and dF/F response
It is recommended to first work through the Introduction to MatNWB tutorial, which demonstrates installing MatNWB and creating an NWB file with subject information, animal position, and trials, as well as writing and reading NWB files in MATLAB.
Please note: The dimensions of timeseries data in MatNWB should be defined in the opposite order of how it is defined in the nwb-schemas. In NWB, time is always stored in the first dimension of the data, whereas in MatNWB data should be specified with time along the last dimension. This is explained in more detail here: MatNWB <-> HDF5 Dimension Mapping.

Set up the NWB file

An NWB file represents a single session of an experiment. Each file must have a session_description, identifier, and session start time. Create a new NWBFile object with those and additional metadata. For all MatNWB functions, we use the Matlab method of entering keyword argument pairs, where arguments are entered as name followed by value.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'timestamps_reference_time', datetime(2018, 4, 25, 3, 0, 45, 'TimeZone', 'local'), ...
'general_experimenter', 'LastName, FirstName', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', {'DOI:10.1016/j.neuron.2016.12.011'}); % optional
nwb
nwb =
NwbFile with properties: + + nwb_version: '2.8.0' + file_create_date: [] + identifier: 'Mouse5_Day3' + session_description: 'mouse in open exploration' + session_start_time: {[2018-04-25T02:30:03.000000+02:00]} + timestamps_reference_time: {[2018-04-25T03:00:45.000000+02:00]} + acquisition: [0×1 types.untyped.Set] + analysis: [0×1 types.untyped.Set] + general: [0×1 types.untyped.Set] + general_data_collection: '' + general_devices: [0×1 types.untyped.Set] + general_experiment_description: '' + general_experimenter: 'LastName, FirstName' + general_extracellular_ephys: [0×1 types.untyped.Set] + general_extracellular_ephys_electrodes: [] + general_institution: 'University of My Institution' + general_intracellular_ephys: [0×1 types.untyped.Set] + general_intracellular_ephys_experimental_conditions: [] + general_intracellular_ephys_filtering: '' + general_intracellular_ephys_intracellular_recordings: [] + general_intracellular_ephys_repetitions: [] + general_intracellular_ephys_sequential_recordings: [] + general_intracellular_ephys_simultaneous_recordings: [] + general_intracellular_ephys_sweep_table: [] + general_keywords: '' + general_lab: '' + general_notes: '' + general_optogenetics: [0×1 types.untyped.Set] + general_optophysiology: [0×1 types.untyped.Set] + general_pharmacology: '' + general_protocol: '' + general_related_publications: {'DOI:10.1016/j.neuron.2016.12.011'} + general_session_id: 'session_1234' + general_slices: '' + general_source_script: '' + general_source_script_file_name: '' + general_stimulus: '' + general_subject: [] + general_surgery: '' + general_virus: '' + general_was_generated_by: '' + intervals: [0×1 types.untyped.Set] + intervals_epochs: [] + intervals_invalid_times: [] + intervals_trials: [] + processing: [0×1 types.untyped.Set] + scratch: [0×1 types.untyped.Set] + stimulus_presentation: [0×1 types.untyped.Set] + stimulus_templates: [0×1 types.untyped.Set] + units: [] +

Optical Physiology

Optical physiology results are written in four steps:
  1. Create imaging plane
  2. Acquired two-photon images
  3. Image segmentation
  4. Fluorescence and dF/F responses

Imaging Plane

First, you must create an ImagingPlane object, which will hold information about the area and method used to collect the optical imaging data. This requires creation of a Device object for the microscope and an OpticalChannel object. Then you can create an ImagingPlane.
Create a Device representing a two-photon microscope. The fields description, manufacturer, model_number, model_name, and serial_number are optional, but recommended. Then create an OpticalChannel and add both of these to the ImagingPlane.
device = types.core.Device( ...
'description', 'My two-photon microscope', ...
'manufacturer', 'Loki Labs', ...
'model_number', 'ABC-123', ...
'model_name', 'Loki 1.0', ...
'serial_number', '1234567890');
 
% Add device to nwb object
nwb.general_devices.set('Device', device);
 
optical_channel = types.core.OpticalChannel( ...
'description', 'description', ...
'emission_lambda', 500.);
 
imaging_plane_name = 'imaging_plane';
imaging_plane = types.core.ImagingPlane( ...
'optical_channel', optical_channel, ...
'description', 'a very interesting part of the brain', ...
'device', types.untyped.SoftLink(device), ...
'excitation_lambda', 600., ...
'imaging_rate', 5., ...
'indicator', 'GFP', ...
'location', 'my favorite brain location');
 
nwb.general_optophysiology.set(imaging_plane_name, imaging_plane);

Storing Two-Photon Data

You can create a TwoPhotonSeries class representing two photon imaging data. TwoPhotonSeries, like SpatialSeries, inherits from TimeSeries and is similar in behavior to OnePhotonSeries.
InternalTwoPhoton = types.core.TwoPhotonSeries( ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'starting_time', 0.0, ...
'starting_time_rate', 3.0, ...
'data', ones(200, 100, 1000), ...
'data_unit', 'lumens');
 
nwb.acquisition.set('2pInternal', InternalTwoPhoton);

Storing One-Photon Data

Now that we have our ImagingPlane, we can create a OnePhotonSeries object to store raw one-photon imaging data.
% using internal data. this data will be stored inside the NWB file
InternalOnePhoton = types.core.OnePhotonSeries( ...
'data', ones(100, 100, 1000), ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'starting_time', 0., ...
'starting_time_rate', 1.0, ...
'data_unit', 'normalized amplitude' ...
);
nwb.acquisition.set('1pInternal', InternalOnePhoton);

Motion Correction (optional)

You can also store the result of motion correction using a MotionCorrection object, a container type that can hold one or more CorrectedImageStack objects.
% Create the corrected ImageSeries
corrected = types.core.ImageSeries( ...
'description', 'A motion corrected image stack', ...
'data', ones(100, 100, 1000), ... % 3D data array
'data_unit', 'n/a', ...
'format', 'raw', ...
'starting_time', 0.0, ...
'starting_time_rate', 1.0 ...
);
 
% Create the xy_translation TimeSeries
xy_translation = types.core.TimeSeries( ...
'description', 'x,y translation in pixels', ...
'data', ones(2, 1000), ... % 2D data array
'data_unit', 'pixels', ...
'starting_time', 0.0, ...
'starting_time_rate', 1.0 ...
);
 
% Create the CorrectedImageStack
corrected_image_stack = types.core.CorrectedImageStack( ...
'corrected', corrected, ...
'original', types.untyped.SoftLink(InternalOnePhoton), ... % Ensure `InternalOnePhoton` exists
'xy_translation', xy_translation ...
);
 
% Create the MotionCorrection object
motion_correction = types.core.MotionCorrection();
motion_correction.correctedimagestack.set('CorrectedImageStack', corrected_image_stack);
The motion corrected data is considered processed data and will be added to the processing field of the nwb object using a ProcessingModule called "ophys". First, create the ProcessingModule object and then add the motion_correction object to it, naming it "MotionCorrection".
ophys_module = types.core.ProcessingModule( ...
'description', 'Contains optical physiology data');
ophys_module.nwbdatainterface.set('MotionCorrection', motion_correction);
Finally, add the "ophys" ProcessingModule to the nwb (Note that we can continue adding objects to the "ophys" ProcessingModule without needing to explicitly update the nwb):
nwb.processing.set('ophys', ophys_module);

Plane Segmentation

Image segmentation stores the detected regions of interest in the TwoPhotonSeries data. ImageSegmentation allows you to have more than one segmentation by creating more PlaneSegmentation objects.

Regions of interest (ROIs)

ROIs can be added to a PlaneSegmentation either as an image_mask or as a pixel_mask. An image mask is an array that is the same size as a single frame of the TwoPhotonSeries, and indicates where a single region of interest is. This image mask may be boolean or continuous between 0 and 1. A pixel_mask, on the other hand, is a list of indices (i.e coordinates) and weights for the ROI. The pixel_mask is represented as a compound data type using a ragged array and below is an example demonstrating how to create either an image_mask or a pixel_mask. Changing the dropdown selection will update the PlaneSegmentation object accordingly.
selection = "Create Image Mask"; % "Create Image Mask" or "Create Pixel Mask"
 
% generate fake image_mask data
imaging_shape = [100, 100];
x = imaging_shape(1);
y = imaging_shape(2);
 
n_rois = 20;
image_mask = zeros(y, x, n_rois);
center = randi(90,2,n_rois);
for i = 1:n_rois
image_mask(center(1,i):center(1,i)+10, center(2,i):center(2,i)+10, i) = 1;
end
 
if selection == "Create Pixel Mask"
ind = find(image_mask);
[y_ind, x_ind, roi_ind] = ind2sub(size(image_mask), ind);
 
pixel_mask_struct = struct();
pixel_mask_struct.x = uint32(x_ind); % Add x coordinates to struct field x
pixel_mask_struct.y = uint32(y_ind); % Add y coordinates to struct field y
pixel_mask_struct.weight = single(ones(size(x_ind)));
% Create pixel mask vector data
pixel_mask = types.hdmf_common.VectorData(...
'data', struct2table(pixel_mask_struct), ...
'description', 'pixel masks');
 
% When creating a pixel mask, it is also necessary to specify a
% pixel_mask_index vector. See the documentation for ragged arrays linked
% above to learn more.
num_pixels_per_roi = zeros(n_rois, 1); % Column vector
for i_roi = 1:n_rois
num_pixels_per_roi(i_roi) = sum(roi_ind == i_roi);
end
 
pixel_mask_index = uint16(cumsum(num_pixels_per_roi)); % Note: Use an integer
% type that can accommodate the maximum value of the cumulative sum
 
% Create pixel_mask_index vector
pixel_mask_index = types.hdmf_common.VectorIndex(...
'description', 'Index into pixel_mask VectorData', ...
'data', pixel_mask_index, ...
'target', types.untyped.ObjectView(pixel_mask) );
 
plane_segmentation = types.core.PlaneSegmentation( ...
'colnames', {'pixel_mask'}, ...
'description', 'roi pixel position (x,y) and pixel weight', ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'pixel_mask_index', pixel_mask_index, ...
'pixel_mask', pixel_mask ...
);
 
else % selection == "Create Image Mask"
plane_segmentation = types.core.PlaneSegmentation( ...
'colnames', {'image_mask'}, ...
'description', 'output from segmenting my favorite imaging plane', ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'image_mask', types.hdmf_common.VectorData(...
'data', image_mask, ...
'description', 'image masks') ...
);
end

Adding ROIs to NWB file

Now create an ImageSegmentation object and put the plane_segmentation object inside of it, naming it "PlaneSegmentation".
img_seg = types.core.ImageSegmentation();
img_seg.planesegmentation.set('PlaneSegmentation', plane_segmentation);
Add the img_seg object to the "ophys" ProcessingModule we created before, naming it "ImageSegmentation".
ophys_module.nwbdatainterface.set('ImageSegmentation', img_seg);

Storing fluorescence of ROIs over time

Now that ROIs are stored, you can store fluorescence data for these regions of interest. This type of data is stored using the RoiResponseSeries class.
To create a RoiResponseSeries object, we will need to reference a set of rows from the PlaneSegmentation table to indicate which ROIs correspond to which rows of your recorded data matrix. This is done using a DynamicTableRegion, which is a type of link that allows you to reference specific rows of a DynamicTable, such as a PlaneSegmentation table by row indices.
First, we create a DynamicTableRegion that references the ROIs of the PlaneSegmentation table.
roi_table_region = types.hdmf_common.DynamicTableRegion( ...
'table', types.untyped.ObjectView(plane_segmentation), ...
'description', 'all_rois', ...
'data', (0:n_rois-1)');
Then we create a RoiResponseSeries object to store fluorescence data for those ROIs.
roi_response_series = types.core.RoiResponseSeries( ...
'rois', roi_table_region, ...
'data', NaN(n_rois, 100), ... % [nRoi, nT]
'data_unit', 'lumens', ...
'starting_time_rate', 3.0, ...
'starting_time', 0.0);
To help data analysis and visualization tools know that this RoiResponseSeries object represents fluorescence data, we will store the RoiResponseSeries object inside of a Fluorescence object. Then we add the Fluorescence object into the same ProcessingModule named "ophys" that we created earlier.
fluorescence = types.core.Fluorescence();
fluorescence.roiresponseseries.set('RoiResponseSeries', roi_response_series);
 
ophys_module.nwbdatainterface.set('Fluorescence', fluorescence);
Tip: If you want to store dF/F data instead of fluorescence data, then store the RoiResponseSeries object in a DfOverF object, which works the same way as the Fluorescence class.

Writing the NWB file

nwb_file_name = 'ophys_tutorial.nwb';
if isfile(nwb_file_name); delete(nwb_file_name); end
nwbExport(nwb, nwb_file_name);
Warning: The property "grid_spacing_unit" of type "types.core.ImagingPlane" was not exported to file location "/general/optophysiology/imaging_plane" because it depends on the property "grid_spacing" which is unset.
Warning: The property "origin_coords_unit" of type "types.core.ImagingPlane" was not exported to file location "/general/optophysiology/imaging_plane" because it depends on the property "origin_coords" which is unset.

Reading the NWB file

read_nwb = nwbRead(nwb_file_name, 'ignorecache');
Data arrays are read passively from the file. Calling TimeSeries.data does not read the data values, but presents an HDF5 object that can be indexed to read data.
read_nwb.processing.get('ophys').nwbdatainterface.get('Fluorescence')...
.roiresponseseries.get('RoiResponseSeries').data
ans =
DataStub with properties: + + filename: 'ophys_tutorial.nwb' + path: '/processing/ophys/Fluorescence/RoiResponseSeries/data' + dims: [20 100] + ndims: 2 + dataType: 'double' +
This allows you to conveniently work with datasets that are too large to fit in RAM all at once. Access the data in the matrix using the load method.
load with no input arguments reads the entire dataset:
read_nwb.processing.get('ophys').nwbdatainterface.get('Fluorescence'). ...
roiresponseseries.get('RoiResponseSeries').data.load
ans = 20×100
NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN +
If all you need is a section of the data, you can read only that section by indexing the DataStub object like a normal array in MATLAB. This will just read the selected region from disk into RAM. This technique is particularly useful if you are dealing with a large dataset that is too big to fit entirely into your available RAM.
read_nwb.processing.get('ophys'). ...
nwbdatainterface.get('Fluorescence'). ...
roiresponseseries.get('RoiResponseSeries'). ...
data(1:5, 1:10)
ans = 5×10
NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN + NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN +
% read back the image/pixel masks and display the first roi
plane_segmentation = read_nwb.processing.get('ophys'). ...
nwbdatainterface.get('ImageSegmentation'). ...
planesegmentation.get('PlaneSegmentation');
 
if ~isempty(plane_segmentation.image_mask)
roi_mask = plane_segmentation.image_mask.data(:,:,1);
elseif ~isempty(plane_segmentation.pixel_mask)
row = plane_segmentation.getRow(1, 'columns', {'pixel_mask'});
pixel_mask = row.pixel_mask{1};
roi_mask = zeros(imaging_shape);
ind = sub2ind(imaging_shape, pixel_mask.y, pixel_mask.x);
roi_mask(ind) = pixel_mask.weight;
end
imshow(roi_mask)

Learn more!

See the API documentation to learn what data types are available.

Other MatNWB tutorials

Python tutorials

See our tutorials for more details about your data type:
Check out other tutorials that teach advanced NWB topics:

+
+ +
+ \ No newline at end of file diff --git a/tutorials/html/ophys_tutorial_schematic.png b/docs/source/_static/html/tutorials/ophys_tutorial_schematic.png similarity index 100% rename from tutorials/html/ophys_tutorial_schematic.png rename to docs/source/_static/html/tutorials/ophys_tutorial_schematic.png diff --git a/tutorials/html/read_demo.html b/docs/source/_static/html/tutorials/read_demo.html similarity index 99% rename from tutorials/html/read_demo.html rename to docs/source/_static/html/tutorials/read_demo.html index 46c9dafb..dce55448 100644 --- a/tutorials/html/read_demo.html +++ b/docs/source/_static/html/tutorials/read_demo.html @@ -353,4 +353,13 @@ % out the DANDI breakout session later in this event. ##### SOURCE END ##### --> -
\ No newline at end of file +
+ \ No newline at end of file diff --git a/tutorials/html/remote_read.html b/docs/source/_static/html/tutorials/remote_read.html similarity index 98% rename from tutorials/html/remote_read.html rename to docs/source/_static/html/tutorials/remote_read.html index 9746120e..10ca8819 100644 --- a/tutorials/html/remote_read.html +++ b/docs/source/_static/html/tutorials/remote_read.html @@ -55,4 +55,13 @@ % ##### SOURCE END ##### --> -
\ No newline at end of file +
+ \ No newline at end of file diff --git a/docs/source/_static/html/tutorials/scratch.html b/docs/source/_static/html/tutorials/scratch.html new file mode 100644 index 00000000..64624440 --- /dev/null +++ b/docs/source/_static/html/tutorials/scratch.html @@ -0,0 +1,173 @@ + +Scratch Data

Scratch Data

This tutorial will focus on the basics of working with a NWBFile for storing non-standardizable data. For example, you may want to store results from one-off analyses of some temporary utility. NWB provides in-file scratch space as a dedicated location where miscellaneous non-standard data may be written.

Setup

Let us first set up an environment with some "acquired data".
ContextFile = NwbFile(...
'session_description', 'demonstrate NWBFile scratch', ... % required
'identifier', 'SCRATCH-0', ... % required
'session_start_time', datetime(2019, 4, 3, 11, 0, 0, 'TimeZone', 'local'), ... % required
'file_create_date', datetime(2019, 4, 15, 12, 0, 0, 'TimeZone', 'local'), ... % optional
'general_experimenter', 'Niu, Lawrence', ...
'general_institution', 'NWB' ...
);
% simulate some data
timestamps = 0:100:1024;
data = sin(0.333 .* timestamps) ...
+ cos(0.1 .* timestamps) ...
+ randn(1, length(timestamps));
RawTs = types.core.TimeSeries(...
'data', data, ...
'data_unit', 'm', ...
'starting_time', 0., ...
'starting_time_rate', 100, ...
'description', 'simulated acquired data' ...
);
ContextFile.acquisition.set('raw_timeseries', RawTs);
 
% "analyze" the simulated data
% we provide a re-implementation of scipy.signal.correlate(..., mode='same')
% Ideally, you should use MATLAB-native code though using its equivalent function (xcorr) requires
% the Signal Processing Toolbox
correlatedData = sameCorr(RawTs.data, ones(128, 1)) ./ 128;
% If you are unsure of how HDF5 paths map to MatNWB property structures, we suggest using HDFView to
% verify. In most cases, MatNWB properties map directly to HDF5 paths.
FilteredTs = types.core.TimeSeries( ...
'data', correlatedData, ...
'data_unit', 'm', ...
'starting_time', 0, ...
'starting_time_rate', 100, ...
'description', 'cross-correlated data' ...
)
FilteredTs =
TimeSeries with properties: + + starting_time_unit: 'seconds' + timestamps_interval: 1 + timestamps_unit: 'seconds' + data: [0.0249 0.0249 0.0249 0.0249 0.0249 0.0249 0.0249 0.0249 0.0249 0.0249 0.0249] + comments: 'no comments' + control: [] + control_description: '' + data_continuity: '' + data_conversion: 1 + data_offset: 0 + data_resolution: -1 + data_unit: 'm' + description: 'cross-correlated data' + starting_time: 0 + starting_time_rate: 100 + timestamps: [] +
ProcModule = types.core.ProcessingModule( ...
'description', 'a module to store filtering results', ...
'filtered_timeseries', FilteredTs ...
);
ContextFile.processing.set('core', ProcModule);
nwbExport(ContextFile, 'context_file.nwb');

Warning Regarding the Usage of Scratch Space

Scratch data written into the scratch space should not be intended for reuse or sharing. Standard NWB types, along with any extensions, should always be used for any data intended to be shared. Published data should not include scratch data and any reuse should not require scratch data for data processing.

Writing Data to Scratch Space

Let us first copy what we need from the processed data file.
ScratchFile = NwbFile('identifier', 'SCRATCH-1');
ContextFile = nwbRead('./context_file.nwb', 'ignorecache');
% again, copy the required metadata from the processed file.
ScratchFile.session_description = ContextFile.session_description;
ScratchFile.session_start_time = ContextFile.session_start_time;
We can now do an analysis lacking specification but that we still wish to store results for.
% ProcessingModule stores its timeseries inside of the "nwbdatainterface" property which is a Set of
% NWBDataInterface objects. This is not directly mapped to the NWB file but is used to distinguish
% it and DynamicTable objects which it stores under the "dynamictable" property.
FilteredTs = ContextFile.processing.get('core').nwbdatainterface.get('filtered_timeseries');
% note: MatNWB does not currently support complex numbers. If you wish to store the data, consider
% storing each number as a struct which will write the data to HDF5 using compound types.
dataFft = real(fft(FilteredTs.data.load()));
ScratchData = types.core.ScratchData( ...
'data', dataFft, ...
'notes', 'discrete Fourier transform from filtered data' ...
)
ScratchData =
ScratchData with properties: + + notes: 'discrete Fourier transform from filtered data' + data: [11×1 double] +
ScratchFile.scratch.set('dft_filtered', ScratchData);
nwbExport(ScratchFile, 'scratch_analysis.nwb');
The scratch_analysis.nwb file will now have scratch data stored in it:
scratch_filtered.png
function C = sameCorr(A, B)
% SAMECORR scipy.signals.correlate(..., mode="same") equivalent
for iDim = 1:ndims(B)
B = flip(B, iDim);
end
C = conv(A, conj(B), 'same');
end
+
+ +
+ \ No newline at end of file diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 00000000..ffad775c --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,104 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +import os +import sys + +sys.path.append('sphinx_extensions') +from docstring_processors import process_matlab_docstring +from custom_roles import MatClassRole, register_matlab_types, register_type_short_names + +def setup(app): + app.connect("autodoc-process-docstring", process_matlab_docstring) + app.connect("env-purge-doc", register_matlab_types) + app.connect("env-purge-doc", register_type_short_names) + app.add_role('matclass', MatClassRole()) + + +project = 'MatNWB' +copyright = '2024, Neurodata Without Borders' # Todo: compute year +author = 'Neurodata Without Borders' + +release = '2.7.0' # Todo: read from Contents.m + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + "sphinx.ext.mathjax", # or other extensions you may need + 'sphinxcontrib.matlab', # generate docs for matlab functions + 'sphinx.ext.autodoc', # autogenerate docs + 'sphinx.ext.napoleon', # for parsing e.g google style parameter docstring + 'sphinx.ext.viewcode', + 'sphinx.ext.linkcode', + 'sphinx.ext.extlinks', # For maintaining external links + 'sphinx_copybutton', +] + +# -- Options that are MATLAB specific ---------------------------------------- + +highlight_language = 'matlab' + +primary_domain = "mat" + +# Get the absolute path of the script's directory +script_dir = os.path.dirname(os.path.abspath(__file__)) + +# Compute the absolute path two levels up from the script's directory +matlab_src_dir = os.path.abspath(os.path.join(script_dir, '..', '..')) + +matlab_class_signature = True +matlab_auto_link = "all" +matlab_show_property_default_value = True + + +def linkcode_resolve(domain, info): + filename = info['module'].replace('.', '/') + source_url = 'https://github.com/NeurodataWithoutBorders/matnwb/blob/master/'+filename+'/'+info['fullname']+'.m' + print(source_url) + return source_url + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "sphinx_rtd_theme" + +html_static_path = ['_static'] +html_logo = os.path.join(matlab_src_dir, 'logo', 'logo_matnwb_small.png') +html_favicon = os.path.join(matlab_src_dir, 'logo', 'logo_favicon.svg') + +html_theme_options = { + # "style_nav_header_background": "#AFD2E8" + "style_nav_header_background": "#000000" + } + # 'navigation_depth': 1, # Adjust the depth as needed + +templates_path = ['_templates'] +exclude_patterns = [] +html_css_files = [ + 'css/custom.css', +] + + +# External links used in the documentation +extlinks = { + 'incf_lesson': ('https://training.incf.org/lesson/%s', '%s'), + 'incf_collection': ('https://training.incf.org/collection/%s', '%s'), + 'nwb_extension': ('https://github.com/nwb-extensions/%s', '%s'), + 'pynwb': ('https://github.com/NeurodataWithoutBorders/pynwb/%s', '%s'), + 'nwb_overview': ('https://nwb-overview.readthedocs.io/en/latest/%s', '%s'), + 'hdmf-docs': ('https://hdmf.readthedocs.io/en/stable/%s', '%s'), + 'dandi': ('https://www.dandiarchive.org/%s', '%s'), + "nwbinspector": ("https://nwbinspector.readthedocs.io/en/dev/%s", "%s"), + 'hdmf-zarr': ('https://hdmf-zarr.readthedocs.io/en/latest/%s', '%s'), + 'matlab-online-tutorial': ('https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/%s.mlx', '%s'), + 'nwb-core-type-schema': ('https://nwb-schema.readthedocs.io/en/latest/format.html#%s', '%s'), + 'nwb-hdmf_common-type-schema': ('https://hdmf-common-schema.readthedocs.io/en/stable/format.html#%s', '%s'), + 'nwb-hdmf_experimental-type-schema': ('https://hdmf-common-schema.readthedocs.io/en/stable/format.html#%s', '%s') +} diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 00000000..6e25d53e --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,41 @@ +############## +NWB for MATLAB +############## + +MatNWB is a MATLAB package for working with NWB files. It provides a high-level +API for efficiently working with neurodata stored in the NWB format. If you are +new to NWB and would like to learn more, then please also visit the +:nwb_overview:`NWB Overview <>` website, which provides an entry point for +researchers and developers interested in using NWB. + + +******** +Contents +******** + +.. toctree:: + :maxdepth: 2 + :caption: Getting Started + + pages/getting_started/installation_users + pages/getting_started/important + pages/getting_started/file_read + pages/tutorials/index + pages/getting_started/overview_citing + +.. toctree:: + :maxdepth: 2 + :caption: MatNWB Documentation + + pages/functions/index + pages/neurodata_types/core/index + pages/neurodata_types/hdmf_common/index + pages/neurodata_types/hdmf_experimental/index + +.. toctree:: + :maxdepth: 2 + :caption: For Developers + + pages/developer/contributing + pages/developer/documentation + diff --git a/docs/source/pages/developer/contributing.rst b/docs/source/pages/developer/contributing.rst new file mode 100644 index 00000000..c28ab747 --- /dev/null +++ b/docs/source/pages/developer/contributing.rst @@ -0,0 +1,5 @@ +Contribution Guidelines +======================= + +Coming soon! + diff --git a/docs/source/pages/developer/documentation.rst b/docs/source/pages/developer/documentation.rst new file mode 100644 index 00000000..0c34b3f9 --- /dev/null +++ b/docs/source/pages/developer/documentation.rst @@ -0,0 +1,8 @@ +Documentation +============= + +.. toctree:: + :maxdepth: 1 + + documentation/formatting_docstrings + diff --git a/docs/source/pages/developer/documentation/formatting_docstrings.rst b/docs/source/pages/developer/documentation/formatting_docstrings.rst new file mode 100644 index 00000000..fe1998a9 --- /dev/null +++ b/docs/source/pages/developer/documentation/formatting_docstrings.rst @@ -0,0 +1,176 @@ +Writing a Properly Documented MATLAB Docstring +============================================== + +A well-documented MATLAB function should be structured to provide all necessary details about its purpose, usage, inputs, outputs, and examples in a clear and consistent format. This guide outlines the key sections and formatting rules to follow when documenting MATLAB functions, using ``NWBREAD`` as an example. + +1. Function Summary +------------------- + +Provide a one-line summary of the function's purpose at the very beginning of the docstring. Use uppercase function names followed by a concise description. + +**Example**:: + + % NWBREAD - Read an NWB file. + +2. Syntax Section +------------------ + +Document the different ways the function can be called (function signatures). Include all variations and briefly describe their purpose. Each syntax line should start with a code literal and describe what it does. + +**Example**:: + + % Syntax: + % nwb = NWBREAD(filename) Reads the nwb file at filename and returns an + % NWBFile object representing its contents. + % + % nwb = NWBREAD(filename, flags) Reads the nwb file using optional + % flags controlling the mode for how to read the file. See input + % arguments for a list of available flags. + % + % nwb = NWBREAD(filename, Name, Value) Reads the nwb file using optional + % name-value pairs controlling options for how to read the file. + +3. Input Arguments +------------------- + +Provide a detailed description of all input arguments. Use the following format for each input: +- Start with a ``-`` followed by the argument name. +- Add the argument type in parentheses (e.g., ``(string)``). +- Write a concise description on the same line or in an indented paragraph below. +- For optional or additional parameters, list their sub-arguments as indented items. + +**Example**:: + + % Input Arguments: + % - filename (string) - + % Filepath pointing to an NWB file. + % + % - flags (string) - + % Flag for setting the mode for the NWBREAD operation. Available options are: + % 'ignorecache'. If the 'ignorecache' flag is used, classes for NWB data types + % are not re-generated based on the embedded schemas in the file. + % + % - options (name-value pairs) - + % Optional name-value pairs. Available options: + % + % - savedir (string) - + % A folder to save generated classes for NWB types. + +4. Output Arguments +-------------------- + +Document all outputs of the function. Use a similar format as the input arguments: +- Start with a ``-`` followed by the output name. +- Add the output type in parentheses. +- Provide a brief description. + +**Example**:: + + % Output Arguments: + % - nwb (NwbFile) - Nwb file object + +5. Usage Examples +------------------ + +Provide practical examples of how to use the function. Each example should: +- Start with "Example X - Description" and be followed by a colon (``::``). +- Include MATLAB code blocks, indented with spaces. +- Add comments in the code to explain each step if necessary. + +**Example**:: + + % Usage: + % Example 1 - Read an NWB file:: + % + % nwb = nwbRead('data.nwb'); + % + % Example 2 - Read an NWB file without re-generating classes for NWB types:: + % + % nwb = nwbRead('data.nwb', 'ignorecache'); + % + % Note: This is a good option to use if you are reading several files + % which are created of the same version of the NWB schemas. + % + % Example 3 - Read an NWB file and generate classes for NWB types in the current working directory:: + % + % nwb = nwbRead('data.nwb', 'savedir', '.'); + +6. See Also +----------- + +Use the ``See also:`` section to reference related functions or objects. List each item separated by commas and include cross-references if applicable. + +**Example**:: + + % See also: + % generateCore, generateExtension, NwbFile, nwbExport + +7. Formatting Tips +------------------- + +- **Consistent Indentation**: + - Indent descriptions or additional information using two spaces. + +- **Bold Text**: + - Use ``**`` around key elements like argument names in the rendered documentation. + +- **Code Literals**: + - Use double backticks (``) for MATLAB code snippets in descriptions. + +- **Directives**: + - Use Sphinx-compatible directives for linking (``:class:``, ``:func:``, etc.) when writing in RST. + +8. Final Example +----------------- + +**Complete Example**:: + + % NWBREAD - Read an NWB file. + % + % Syntax: + % nwb = NWBREAD(filename) Reads the nwb file at filename and returns an + % NWBFile object representing its contents. + % + % nwb = NWBREAD(filename, flags) Reads the nwb file using optional + % flags controlling the mode for how to read the file. See input + % arguments for a list of available flags. + % + % nwb = NWBREAD(filename, Name, Value) Reads the nwb file using optional + % name-value pairs controlling options for how to read the file. + % + % Input Arguments: + % - filename (string) - + % Filepath pointing to an NWB file. + % + % - flags (string) - + % Flag for setting the mode for the NWBREAD operation. Available options are: + % 'ignorecache'. If the 'ignorecache' flag is used, classes for NWB data types + % are not re-generated based on the embedded schemas in the file. + % + % - options (name-value pairs) - + % Optional name-value pairs. Available options: + % + % - savedir (string) - + % A folder to save generated classes for NWB types. + % + % Output Arguments: + % - nwb (NwbFile) - Nwb file object + % + % Usage: + % Example 1 - Read an NWB file:: + % + % nwb = nwbRead('data.nwb'); + % + % Example 2 - Read an NWB file without re-generating classes for NWB types:: + % + % nwb = nwbRead('data.nwb', 'ignorecache'); + % + % Note: This is a good option to use if you are reading several files + % which are created of the same version of the NWB schemas. + % + % Example 3 - Read an NWB file and generate classes for NWB types in the current working directory:: + % + % nwb = nwbRead('data.nwb', 'savedir', '.'); + % + % See also: + % generateCore, generateExtension, NwbFile, nwbExport diff --git a/docs/source/pages/functions/NwbFile.rst b/docs/source/pages/functions/NwbFile.rst new file mode 100644 index 00000000..2327759d --- /dev/null +++ b/docs/source/pages/functions/NwbFile.rst @@ -0,0 +1,7 @@ +NwbFile +======= + +.. mat:module:: . +.. autoclass:: NwbFile + :members: + :show-inheritance: diff --git a/docs/source/pages/functions/generateCore.rst b/docs/source/pages/functions/generateCore.rst new file mode 100644 index 00000000..33eb05b5 --- /dev/null +++ b/docs/source/pages/functions/generateCore.rst @@ -0,0 +1,5 @@ +generateCore +============ + +.. mat:module:: . +.. autofunction:: generateCore diff --git a/docs/source/pages/functions/generateExtension.rst b/docs/source/pages/functions/generateExtension.rst new file mode 100644 index 00000000..42a93291 --- /dev/null +++ b/docs/source/pages/functions/generateExtension.rst @@ -0,0 +1,5 @@ +generateExtension +================= + +.. mat:module:: . +.. autofunction:: generateExtension diff --git a/docs/source/pages/functions/index.rst b/docs/source/pages/functions/index.rst new file mode 100644 index 00000000..35497918 --- /dev/null +++ b/docs/source/pages/functions/index.rst @@ -0,0 +1,15 @@ +MatNWB Functions +================ + +These are the main functions of the MatNWB API + +.. toctree:: + :maxdepth: 2 + :caption: Functions + + nwbRead + NwbFile + nwbExport + generateCore + generateExtension + nwbClearGenerated diff --git a/docs/source/pages/functions/nwbClearGenerated.rst b/docs/source/pages/functions/nwbClearGenerated.rst new file mode 100644 index 00000000..166e00fb --- /dev/null +++ b/docs/source/pages/functions/nwbClearGenerated.rst @@ -0,0 +1,5 @@ +nwbClearGenerated +================= + +.. mat:module:: . +.. autofunction:: nwbClearGenerated diff --git a/docs/source/pages/functions/nwbExport.rst b/docs/source/pages/functions/nwbExport.rst new file mode 100644 index 00000000..ee4e5c0e --- /dev/null +++ b/docs/source/pages/functions/nwbExport.rst @@ -0,0 +1,5 @@ +nwbExport +========= + +.. mat:module:: . +.. autofunction:: nwbExport diff --git a/docs/source/pages/functions/nwbRead.rst b/docs/source/pages/functions/nwbRead.rst new file mode 100644 index 00000000..94d455e5 --- /dev/null +++ b/docs/source/pages/functions/nwbRead.rst @@ -0,0 +1,5 @@ +nwbRead +======= + +.. mat:module:: . +.. autofunction:: nwbRead diff --git a/docs/source/pages/getting_started/file_read.rst b/docs/source/pages/getting_started/file_read.rst new file mode 100644 index 00000000..a3aa4616 --- /dev/null +++ b/docs/source/pages/getting_started/file_read.rst @@ -0,0 +1,21 @@ +Reading with MatNWB +=================== + +For most files, MatNWB only requires the :func:`nwbRead` call: + +.. code-block:: MATLAB + + nwb = nwbRead('path/to/filename.nwb'); + +This call will read the file, create the necessary NWB schema class files, as well as any extension schemata that is needed for the file itself. This is because both PyNWB and MatNWB embed a copy of the schema environment into the NWB file when it is written. + + +The returned object above is an :class:`NwbFile` object which serves as the root object with which you can use to browse the contents of the file. More detail about the NwbFile class can be found here: :ref:`matnwb-read-nwbfile-intro`. + +.. toctree:: + :maxdepth: 2 + + file_read/nwbfile + file_read/dynamictable + file_read/untyped + file_read/troubleshooting \ No newline at end of file diff --git a/docs/source/pages/getting_started/file_read/dynamictable.rst b/docs/source/pages/getting_started/file_read/dynamictable.rst new file mode 100644 index 00000000..2242468e --- /dev/null +++ b/docs/source/pages/getting_started/file_read/dynamictable.rst @@ -0,0 +1,37 @@ +.. _matnwb-read-dynamic-table-intro: + +The Dynamic Table Type +====================== + +The :class:`types.hdmf_common.DynamicTable` is a special NWB type composed of multiple ``VectorData`` objects which act like the columns of a table. The benefits of this type composition is that it allows us to add user-defined columns to a ``DynamicTable`` without having extend the NWB data schema and each column can be accessed independently using regular NWB syntax and modified as a regular dataset. + +With such an object hierarchy, however, there is no easy way to view the Dynamic Table data row by row. This is where ``getRow`` comes in. + +.. _matnwb-read-dynamic-table-row-view: + +Row-by-row viewing +~~~~~~~~~~~~~~~~~~ + +``getRow`` retrieves one or more rows of the dynamic table and produces a MATLAB `table `_ with a representation of the data. It should be noted that this returned table object is **readonly** and any changes to the returned table will not be reflected back into the NWB file. + +By default, you must provide the row index as an argument. This is 1-indexed and follows MATLAB indexing behavior. + +.. code-block:: MATLAB + + tableData = dynamicTable.getRow(); + +You can also filter your view by specifying what columns you wish to see. + +.. code-block:: MATLAB + + filteredData = dynamicTable.getRow(, 'columns', {'columnName'}); + +In the above example, the ``filteredData`` table will only have the "columnName" column data loaded. + +Finally, if you prefer to select using your custom ``id`` column, you can specify by setting the ``useId`` keyword. + +.. code-block:: MATLAB + + tableData = dynamicTable.getRow(, 'useId', true); + +For more information regarding Dynamic Tables in MatNWB as well as information regarding writing data to them, please see the `MatNWB DynamicTables Tutorial <../../tutorials/dynamic_tables.html>`_. diff --git a/docs/source/pages/getting_started/file_read/nwbfile.rst b/docs/source/pages/getting_started/file_read/nwbfile.rst new file mode 100644 index 00000000..106abca0 --- /dev/null +++ b/docs/source/pages/getting_started/file_read/nwbfile.rst @@ -0,0 +1,31 @@ +.. _matnwb-read-nwbfile-intro: + +Using the NwbFile Class +----------------------- + +The :class:`NwbFile` class represents the root object for the NWB file and consists of properties and values which map indirectly to the internal HDF5 dataset. + +.. image:: https://github.com/NeurodataWithoutBorders/nwb-overview/blob/main/docs/source/img/matnwb_NwbFile.png?raw=true + +In most cases, the types contained in these files were generated by the embedded NWB schema in the file (or separately if you opted to generate them separately). These types can be traversed using regular MATLAB syntax for accessing properties and their values. + +Aside from the generated Core and Extension types, there are "Untyped" utility Types which are covered in greater detail in :ref:`matnwb-read-untyped-intro`. + +.. _matnwb-read-nwbfile-searchfor: + +Searching by Type +~~~~~~~~~~~~~~~~~ + +The NwbFile also allows for searching the entire NWB file by type using it's :meth:`NwbFile.searchFor` method. + +You can search for only the class name: + +.. image:: https://github.com/NeurodataWithoutBorders/nwb-overview/blob/main/docs/source/img/matnwb_searchForExample.png?raw=true + +Or use the ``'includeSubClasses'`` optional argument to search all subclasses: + +.. image:: https://github.com/NeurodataWithoutBorders/nwb-overview/blob/main/docs/source/img/matnwb_searchForExample-withSubclassing.png?raw=true + +.. note:: + + As seen above, the keys of the Map returned by the :meth:`NwbFile.searchFor` method can be paired with the :meth:`NwbFile.resolve` method to effectively retrieve an object from any NwbFile. This is also true for internal HDF5 paths. diff --git a/docs/source/pages/getting_started/file_read/troubleshooting.rst b/docs/source/pages/getting_started/file_read/troubleshooting.rst new file mode 100644 index 00000000..cedff55b --- /dev/null +++ b/docs/source/pages/getting_started/file_read/troubleshooting.rst @@ -0,0 +1,61 @@ +.. _matnwb-read-troubleshooting-intro: + +Troubleshooting File Reads in MatNWB +==================================== + +Outlined below are the most common issues reported by users when they read a NWB file as well as common troubleshooting approaches to resolve them. + +.. _matnwb-read-troubleshooting-version-conflict: + +Schema Version Conflicts +~~~~~~~~~~~~~~~~~~~~~~~~ + +If you run into an error where reading a file appears to expect the wrong properties, you should first check if your MATLAB path is not pointing to other environments with the same packages. MATLAB's internal `which command `_ to check for unexpected class locations. + +.. _matnwb-read-troubleshooting-multiple-env: + +Multiple Schema Environments +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +By default, MatNWB generates all class files in its own installation directory. However, if your work requires you to manipulate multiple different schema versions or extension environments, you may want to generate the class files in a local directory so that MATLAB will default to those classes instead. + +To do this, you can use the optional ``savedir`` keyword argument with ``nwbRead`` which allows you to specify a directory location within which your generated class files will be saved. + +.. code-block:: MATLAB + + nwb = nwbRead('/path/to/matnwb/file.nwb', 'savedir', '.'); % write class files to current working directory. + +.. note:: + + Other generation functions ``generateCore`` and ``generateExtension`` also support the ``savedir`` option. + +.. _matnwb-read-troubleshooting-missing-schema: + +Missing Embedded Schemata +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Some older NWB files do not have an embedded schema. To read from these files you will need the API generation functions ``generateCore`` and ``generateExtension`` to generate the class files before calling ``nwbRead`` on them. You can also use the utility function ``util.getSchemaVersion`` to retrieve the correct Core schema for the file you are trying to read: + +.. code-block:: MATLAB + + schemaVersion = util.getSchemaVersion('/path/to/matnwb/file.nwb'); + generateCore(schemaVersion); + generateExtension(path/to/extension/namespace.yaml); + +.. _matnwb-read-troubleshooting-ignorecache: + +Avoiding Class Regeneration +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you wish to read your file multiple times, you may not want to regenerate your files every time since you know that your current environment is correct. For this case, you can use ``nwbRead``'s optional argument ``ignorecache`` which will ignore the embedded schema and attempt to read your file with the class files accessible from your MATLAB path or current working directory. + +.. code-block:: MATLAB + + nwb = nwbRead('path/to/matnwb/file.nwb', 'ignorecache'); + +.. _matnwb-read-troubleshooting-bottom: + +Bottom of the Barrel +~~~~~~~~~~~~~~~~~~~~ + +If you're here, you've probably reached your wit's end and wish for more specific help. In such times, you can always contact the NWB team either as a message on our `NWB HelpDesk `_, `Slack Workspace `_ or as an issue on `Github `_. diff --git a/docs/source/pages/getting_started/file_read/untyped.rst b/docs/source/pages/getting_started/file_read/untyped.rst new file mode 100644 index 00000000..9213a62d --- /dev/null +++ b/docs/source/pages/getting_started/file_read/untyped.rst @@ -0,0 +1,77 @@ +.. _matnwb-read-untyped-intro: + +Utility Types in MatNWB +======================= + +.. note:: + + Documentation for "untyped" types will be added soon + + +"Untyped" Utility types are tools which allow for both flexibility as well as limiting certain constraints that are imposed by the NWB schema. These types are commonly stored in the ``+types/+untyped/`` package directories in your MatNWB installation. + +.. _matnwb-read-untyped-sets-anons: + +Sets and Anons +~~~~~~~~~~~~~~ + +The **Set** (``types.untyped.Set`` or Constrained Sets) is used to capture a dynamic number of particular NWB-typed objects. They may contain certain type constraints on what types are allowable to be set. Set keys and values can be set and retrieved using their ``set`` and ``get`` methods: + +.. code-block:: MATLAB + + value = someSet.get('key name'); + +.. code-block:: MATLAB + + someSet.set('key name', value); + +.. note:: + + Sets also borrow ``containers.Map``'s ``keys`` and ``values`` methods to retrieve cell arrays of either. + +The **Anon** type (``types.untyped.Anon``) can be understood as a Set type with only a single key-value entry. This rarer type is only used for cases where the name for the stored object can be set by the user. Anon types may also hold NWB type constraints like Set. + +.. _matnwb-read-untyped-datastub-datapipe: + +DataStubs and DataPipes +~~~~~~~~~~~~~~~~~~~~~~~ + +**DataStubs** serves as a read-only link to your data. It allows for MATLAB-style indexing to retrieve the data stored on disk. + +.. image:: https://github.com/NeurodataWithoutBorders/nwb-overview/blob/main/docs/source/img/matnwb_datastub.png?raw=true + + +**DataPipes** are similar to DataStubs in that they allow you to load data from disk; however, they also provide a wide array of features that allow the user to write data to disk, either by streaming parts of data in at a time or by compressing the data before writing. The DataPipe is an advanced type and users looking to leverage DataPipe's capabilities to stream/iteratively write or compress data should read the `Advanced Data Write Tutorial <../../tutorials/dataPipe.html>`_. + +.. _matnwb-read-untyped-links-views: + +Links and Views +~~~~~~~~~~~~~~~ + +**Links** (either ``types.untyped.SoftLink`` or ``types.untyped.ExternalLink``) are views that point to another NWB object, either within the same file or in another external one. *SoftLinks* contain a path into the same NWB file while *ExternalLinks* additionally hold a ``filename`` field to point to an external NWB file. Both types use their ``deref`` methods to retrieve the NWB object that they point to though *SoftLinks* require the NwbFile object that was read in. + +.. code-block:: MATLAB + + referencedObject = softLink.deref(rootNwbFile); + +.. code-block:: MATLAB + + referencedObject = externalLink.deref(); + +.. note:: + + Links are not validated on write by default. It is entirely possible that a link will simply never resolve, either because the path to the NWB object is wrong, or because the external file is simply missing from the NWB distribution. + +**Views** (either ``types.untyped.ObjectView`` or ``types.untyped.RegionView``) are more advanced references which can point to NWB types as well as segments of raw data from a dataset. *ObjectViews* will point to NWB types while *RegionViews* will point to some subset of data. Both types use ``refresh`` to retrieve their referenced data. + +.. code-block:: MATLAB + + referencedObject = objectView.refresh(rootNwbFile); + +.. code-block:: MATLAB + + dataSubset = regionView.refresh(rootNwbFile); + +.. note:: + + Unlike *Links*, Views cannot point to NWB objects outside of their respective files. Views are also validated on write and will always point to a valid NWB object or raw data if written without errors. diff --git a/docs/source/pages/getting_started/important.rst b/docs/source/pages/getting_started/important.rst new file mode 100644 index 00000000..b458c5f8 --- /dev/null +++ b/docs/source/pages/getting_started/important.rst @@ -0,0 +1,104 @@ +Important +========= + +When using MatNWB, it is important to understand the differences in how array +dimensions are ordered in MATLAB versus HDF5. While the NWB documentation and +tutorials generally follow the NWB Schema Specifications, MatNWB requires data +to be added with dimensions in reverse order. + +For example, in the NWB Schema, the time dimension is specified as the first +dimension of a dataset. However, in MatNWB, the time dimension should always +be added as the last dimension. + +Data Dimensions +--------------- + +Dimension Ordering +^^^^^^^^^^^^^^^^^^ + +NWB files use the HDF5 format to store data. There are two main differences +between the way MATLAB and HDF5 represents dimensions. The first is that HDF5 +is C-ordered, which means it stores data is a rows-first pattern, and the +MATLAB is F-ordered, storing data in the reverse pattern, with the last +dimension of the array stored consecutively. The result is that the data in +HDF5 is effectively the transpose of the array in MATLAB. The second difference +is that HDF5 can store 1-D arrays, but in MATLAB the lowest dimensionality of +an array is 2-D. Due to differences in how MATLAB and HDF5 represent data, the +dimensions of datasets are flipped when writing to/from file in MatNWB. This +behavior differs depending on whether :class:`types.hdmf_common.VectorData` +use ``DataPipe`` objects to contain the data. It's important to keep in mind +the mappings below to make sure is written to and read from file as expected. + +Without DataPipes +^^^^^^^^^^^^^^^^^ + +See the documentation at the following link: +`without DataPipes <../tutorials/dimensionMapNoDataPipes.html>`_ + +**Writing to File** + +.. list-table:: + :header-rows: 1 + + * - Shape in MatNWB + - Shape in HDF5 + * - (M, 1) + - (M,) + * - (1, M) + - (M,) + * - (P, O, N, M) + - (M, N, O, P) + +**Reading from File** + +.. list-table:: + :header-rows: 1 + + * - Shape in HDF5 + - Shape in MatNWB + * - (M,) + - (M, 1) + * - (M, N, O, P) + - (P, O, N, M) + +.. note:: + + MATLAB does not support 1D datasets. HDF5 datasets of size (M,) are loaded into MATLAB as datasets of size (M,1). To avoid changes in dimensions when writing to/from file, use column vectors for 1D datasets. + +With DataPipes +^^^^^^^^^^^^^^ + +See the documentation at the following link: +`with DataPipes <../tutorials/dimensionMapWithDataPipes.html>`_ + +**Writing to File** + +.. list-table:: + :header-rows: 1 + + * - Shape in MatNWB + - Shape in HDF5 + * - (M, 1) + - (1, M) + * - (1, M) + - (M, 1) / (M,) ** + * - (P, O, N, M) + - (M, N, O, P) + +\*\* Use scalar as input to ``maxSize`` argument to write a dataset of shape (N,) + +**Reading from File** + +.. list-table:: + :header-rows: 1 + + * - Shape in HDF5 + - Shape in MatNWB + * - (M, 1) + - (1, M) + * - (1, M) + - (M, 1) + * - (M,) + - (M, 1) + * - (M, N, O, P) + - (P, O, N, M) diff --git a/docs/source/pages/getting_started/installation_users.rst b/docs/source/pages/getting_started/installation_users.rst new file mode 100644 index 00000000..ae7dd19f --- /dev/null +++ b/docs/source/pages/getting_started/installation_users.rst @@ -0,0 +1,21 @@ +Install MatNWB +============== + +Download the current release of MatNWB from the +`MatNWB releases page `_ +or from the `MATLAB's FileExchange `_. +You can also check out the latest development version via:: + + git clone https://github.com/NeurodataWithoutBorders/matnwb.git + +After downloading MatNWB, make sure to add it to MATLAB's search path: + +.. code-block:: matlab + + addpath("path/to/matnwb") + savepath() % Permanently add to search path + +Requirements +------------ + +Recommended to use MATLAB R2017b or later. diff --git a/docs/source/pages/getting_started/overview_citing.rst b/docs/source/pages/getting_started/overview_citing.rst new file mode 100644 index 00000000..7c67f175 --- /dev/null +++ b/docs/source/pages/getting_started/overview_citing.rst @@ -0,0 +1,30 @@ +Citing MatNWB +============= + +BibTeX entry +------------ + +If you use MatNWB in your research, please use the following citation: + +.. code-block:: bibtex + + @article {10.7554/eLife.78362, + article_type = {journal}, + title = {{The Neurodata Without Borders ecosystem for neurophysiological data science}}, + author = {R\"ubel, Oliver and Tritt, Andrew and Ly, Ryan and Dichter, Benjamin K. and + Ghosh, Satrajit and Niu, Lawrence and Baker, Pamela and Soltesz, Ivan and + Ng, Lydia and Svoboda, Karel and Frank, Loren and Bouchard, Kristofer E.}, + editor = {Colgin, Laura L and Jadhav, Shantanu P}, + volume = {11{, + year = {2022}, + month = {oct}, + pub_date = {2022-10-04}, + pages = {e78362}, + citation = {eLife 2022;11:e78362}, + doi = {10.7554/eLife.78362}, + url = {https://doi.org/10.7554/eLife.78362}, + keywords = {Neurophysiology, data ecosystem, data language, data standard, FAIR data, archive}, + journal = {eLife}, + issn = {2050-084X}, + publisher = {eLife Sciences Publications, Ltd}, + } diff --git a/docs/source/pages/neurodata_types/core/AbstractFeatureSeries.rst b/docs/source/pages/neurodata_types/core/AbstractFeatureSeries.rst new file mode 100644 index 00000000..07e4f992 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/AbstractFeatureSeries.rst @@ -0,0 +1,23 @@ +AbstractFeatureSeries +===================== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for AbstractFeatureSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.AbstractFeatureSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/AnnotationSeries.rst b/docs/source/pages/neurodata_types/core/AnnotationSeries.rst new file mode 100644 index 00000000..b1c9b0d2 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/AnnotationSeries.rst @@ -0,0 +1,23 @@ +AnnotationSeries +================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for AnnotationSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.AnnotationSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/BehavioralEpochs.rst b/docs/source/pages/neurodata_types/core/BehavioralEpochs.rst new file mode 100644 index 00000000..0e2117ad --- /dev/null +++ b/docs/source/pages/neurodata_types/core/BehavioralEpochs.rst @@ -0,0 +1,23 @@ +BehavioralEpochs +================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for BehavioralEpochs. + +.. mat:module:: types.core +.. autoclass:: types.core.BehavioralEpochs + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/BehavioralEvents.rst b/docs/source/pages/neurodata_types/core/BehavioralEvents.rst new file mode 100644 index 00000000..e63b194f --- /dev/null +++ b/docs/source/pages/neurodata_types/core/BehavioralEvents.rst @@ -0,0 +1,23 @@ +BehavioralEvents +================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for BehavioralEvents. + +.. mat:module:: types.core +.. autoclass:: types.core.BehavioralEvents + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/BehavioralTimeSeries.rst b/docs/source/pages/neurodata_types/core/BehavioralTimeSeries.rst new file mode 100644 index 00000000..d9e0d546 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/BehavioralTimeSeries.rst @@ -0,0 +1,23 @@ +BehavioralTimeSeries +==================== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for BehavioralTimeSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.BehavioralTimeSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/ClusterWaveforms.rst b/docs/source/pages/neurodata_types/core/ClusterWaveforms.rst new file mode 100644 index 00000000..f425b563 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/ClusterWaveforms.rst @@ -0,0 +1,23 @@ +ClusterWaveforms +================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for ClusterWaveforms. + +.. mat:module:: types.core +.. autoclass:: types.core.ClusterWaveforms + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/Clustering.rst b/docs/source/pages/neurodata_types/core/Clustering.rst new file mode 100644 index 00000000..a3851633 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/Clustering.rst @@ -0,0 +1,23 @@ +Clustering +========== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for Clustering. + +.. mat:module:: types.core +.. autoclass:: types.core.Clustering + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/CompassDirection.rst b/docs/source/pages/neurodata_types/core/CompassDirection.rst new file mode 100644 index 00000000..bc8da3f0 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/CompassDirection.rst @@ -0,0 +1,23 @@ +CompassDirection +================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for CompassDirection. + +.. mat:module:: types.core +.. autoclass:: types.core.CompassDirection + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/CorrectedImageStack.rst b/docs/source/pages/neurodata_types/core/CorrectedImageStack.rst new file mode 100644 index 00000000..5e3c9151 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/CorrectedImageStack.rst @@ -0,0 +1,23 @@ +CorrectedImageStack +=================== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for CorrectedImageStack. + +.. mat:module:: types.core +.. autoclass:: types.core.CorrectedImageStack + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/CurrentClampSeries.rst b/docs/source/pages/neurodata_types/core/CurrentClampSeries.rst new file mode 100644 index 00000000..313a119f --- /dev/null +++ b/docs/source/pages/neurodata_types/core/CurrentClampSeries.rst @@ -0,0 +1,23 @@ +CurrentClampSeries +================== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for CurrentClampSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.CurrentClampSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/CurrentClampStimulusSeries.rst b/docs/source/pages/neurodata_types/core/CurrentClampStimulusSeries.rst new file mode 100644 index 00000000..5dce9ba9 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/CurrentClampStimulusSeries.rst @@ -0,0 +1,23 @@ +CurrentClampStimulusSeries +========================== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for CurrentClampStimulusSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.CurrentClampStimulusSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/DecompositionSeries.rst b/docs/source/pages/neurodata_types/core/DecompositionSeries.rst new file mode 100644 index 00000000..7796c0f1 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/DecompositionSeries.rst @@ -0,0 +1,23 @@ +DecompositionSeries +=================== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for DecompositionSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.DecompositionSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/Device.rst b/docs/source/pages/neurodata_types/core/Device.rst new file mode 100644 index 00000000..d0ffa727 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/Device.rst @@ -0,0 +1,23 @@ +Device +====== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for Device. + +.. mat:module:: types.core +.. autoclass:: types.core.Device + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/DfOverF.rst b/docs/source/pages/neurodata_types/core/DfOverF.rst new file mode 100644 index 00000000..77720c0f --- /dev/null +++ b/docs/source/pages/neurodata_types/core/DfOverF.rst @@ -0,0 +1,23 @@ +DfOverF +======= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for DfOverF. + +.. mat:module:: types.core +.. autoclass:: types.core.DfOverF + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/ElectricalSeries.rst b/docs/source/pages/neurodata_types/core/ElectricalSeries.rst new file mode 100644 index 00000000..c3b7cfbc --- /dev/null +++ b/docs/source/pages/neurodata_types/core/ElectricalSeries.rst @@ -0,0 +1,23 @@ +ElectricalSeries +================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for ElectricalSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.ElectricalSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/ElectrodeGroup.rst b/docs/source/pages/neurodata_types/core/ElectrodeGroup.rst new file mode 100644 index 00000000..ae2a2c37 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/ElectrodeGroup.rst @@ -0,0 +1,23 @@ +ElectrodeGroup +============== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for ElectrodeGroup. + +.. mat:module:: types.core +.. autoclass:: types.core.ElectrodeGroup + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/EventDetection.rst b/docs/source/pages/neurodata_types/core/EventDetection.rst new file mode 100644 index 00000000..dc2ae86b --- /dev/null +++ b/docs/source/pages/neurodata_types/core/EventDetection.rst @@ -0,0 +1,23 @@ +EventDetection +============== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for EventDetection. + +.. mat:module:: types.core +.. autoclass:: types.core.EventDetection + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/EventWaveform.rst b/docs/source/pages/neurodata_types/core/EventWaveform.rst new file mode 100644 index 00000000..a0c3caae --- /dev/null +++ b/docs/source/pages/neurodata_types/core/EventWaveform.rst @@ -0,0 +1,23 @@ +EventWaveform +============= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for EventWaveform. + +.. mat:module:: types.core +.. autoclass:: types.core.EventWaveform + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/ExperimentalConditionsTable.rst b/docs/source/pages/neurodata_types/core/ExperimentalConditionsTable.rst new file mode 100644 index 00000000..106ae041 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/ExperimentalConditionsTable.rst @@ -0,0 +1,23 @@ +ExperimentalConditionsTable +=========================== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for ExperimentalConditionsTable. + +.. mat:module:: types.core +.. autoclass:: types.core.ExperimentalConditionsTable + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/EyeTracking.rst b/docs/source/pages/neurodata_types/core/EyeTracking.rst new file mode 100644 index 00000000..f8f03482 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/EyeTracking.rst @@ -0,0 +1,23 @@ +EyeTracking +=========== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for EyeTracking. + +.. mat:module:: types.core +.. autoclass:: types.core.EyeTracking + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/FeatureExtraction.rst b/docs/source/pages/neurodata_types/core/FeatureExtraction.rst new file mode 100644 index 00000000..f9fe65c0 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/FeatureExtraction.rst @@ -0,0 +1,23 @@ +FeatureExtraction +================= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for FeatureExtraction. + +.. mat:module:: types.core +.. autoclass:: types.core.FeatureExtraction + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/FilteredEphys.rst b/docs/source/pages/neurodata_types/core/FilteredEphys.rst new file mode 100644 index 00000000..9a8218e5 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/FilteredEphys.rst @@ -0,0 +1,23 @@ +FilteredEphys +============= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for FilteredEphys. + +.. mat:module:: types.core +.. autoclass:: types.core.FilteredEphys + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/Fluorescence.rst b/docs/source/pages/neurodata_types/core/Fluorescence.rst new file mode 100644 index 00000000..227e7162 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/Fluorescence.rst @@ -0,0 +1,23 @@ +Fluorescence +============ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for Fluorescence. + +.. mat:module:: types.core +.. autoclass:: types.core.Fluorescence + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/GrayscaleImage.rst b/docs/source/pages/neurodata_types/core/GrayscaleImage.rst new file mode 100644 index 00000000..c6cf392c --- /dev/null +++ b/docs/source/pages/neurodata_types/core/GrayscaleImage.rst @@ -0,0 +1,23 @@ +GrayscaleImage +============== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for GrayscaleImage. + +.. mat:module:: types.core +.. autoclass:: types.core.GrayscaleImage + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/IZeroClampSeries.rst b/docs/source/pages/neurodata_types/core/IZeroClampSeries.rst new file mode 100644 index 00000000..0dd8c742 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/IZeroClampSeries.rst @@ -0,0 +1,23 @@ +IZeroClampSeries +================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for IZeroClampSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.IZeroClampSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/Image.rst b/docs/source/pages/neurodata_types/core/Image.rst new file mode 100644 index 00000000..71d5d11b --- /dev/null +++ b/docs/source/pages/neurodata_types/core/Image.rst @@ -0,0 +1,23 @@ +Image +===== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for Image. + +.. mat:module:: types.core +.. autoclass:: types.core.Image + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/ImageMaskSeries.rst b/docs/source/pages/neurodata_types/core/ImageMaskSeries.rst new file mode 100644 index 00000000..cf817773 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/ImageMaskSeries.rst @@ -0,0 +1,23 @@ +ImageMaskSeries +=============== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for ImageMaskSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.ImageMaskSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/ImageReferences.rst b/docs/source/pages/neurodata_types/core/ImageReferences.rst new file mode 100644 index 00000000..7b468c61 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/ImageReferences.rst @@ -0,0 +1,23 @@ +ImageReferences +=============== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for ImageReferences. + +.. mat:module:: types.core +.. autoclass:: types.core.ImageReferences + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/ImageSegmentation.rst b/docs/source/pages/neurodata_types/core/ImageSegmentation.rst new file mode 100644 index 00000000..32355372 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/ImageSegmentation.rst @@ -0,0 +1,23 @@ +ImageSegmentation +================= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for ImageSegmentation. + +.. mat:module:: types.core +.. autoclass:: types.core.ImageSegmentation + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/ImageSeries.rst b/docs/source/pages/neurodata_types/core/ImageSeries.rst new file mode 100644 index 00000000..9e6c987d --- /dev/null +++ b/docs/source/pages/neurodata_types/core/ImageSeries.rst @@ -0,0 +1,23 @@ +ImageSeries +=========== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for ImageSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.ImageSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/Images.rst b/docs/source/pages/neurodata_types/core/Images.rst new file mode 100644 index 00000000..fab43149 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/Images.rst @@ -0,0 +1,23 @@ +Images +====== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for Images. + +.. mat:module:: types.core +.. autoclass:: types.core.Images + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/ImagingPlane.rst b/docs/source/pages/neurodata_types/core/ImagingPlane.rst new file mode 100644 index 00000000..57e6e6a2 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/ImagingPlane.rst @@ -0,0 +1,23 @@ +ImagingPlane +============ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for ImagingPlane. + +.. mat:module:: types.core +.. autoclass:: types.core.ImagingPlane + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/ImagingRetinotopy.rst b/docs/source/pages/neurodata_types/core/ImagingRetinotopy.rst new file mode 100644 index 00000000..22884930 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/ImagingRetinotopy.rst @@ -0,0 +1,23 @@ +ImagingRetinotopy +================= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for ImagingRetinotopy. + +.. mat:module:: types.core +.. autoclass:: types.core.ImagingRetinotopy + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/IndexSeries.rst b/docs/source/pages/neurodata_types/core/IndexSeries.rst new file mode 100644 index 00000000..a2996e15 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/IndexSeries.rst @@ -0,0 +1,23 @@ +IndexSeries +=========== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for IndexSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.IndexSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/IntervalSeries.rst b/docs/source/pages/neurodata_types/core/IntervalSeries.rst new file mode 100644 index 00000000..7b188a7e --- /dev/null +++ b/docs/source/pages/neurodata_types/core/IntervalSeries.rst @@ -0,0 +1,23 @@ +IntervalSeries +============== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for IntervalSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.IntervalSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/IntracellularElectrode.rst b/docs/source/pages/neurodata_types/core/IntracellularElectrode.rst new file mode 100644 index 00000000..a6808e52 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/IntracellularElectrode.rst @@ -0,0 +1,23 @@ +IntracellularElectrode +====================== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for IntracellularElectrode. + +.. mat:module:: types.core +.. autoclass:: types.core.IntracellularElectrode + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/IntracellularElectrodesTable.rst b/docs/source/pages/neurodata_types/core/IntracellularElectrodesTable.rst new file mode 100644 index 00000000..1f2ba25a --- /dev/null +++ b/docs/source/pages/neurodata_types/core/IntracellularElectrodesTable.rst @@ -0,0 +1,23 @@ +IntracellularElectrodesTable +============================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for IntracellularElectrodesTable. + +.. mat:module:: types.core +.. autoclass:: types.core.IntracellularElectrodesTable + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/IntracellularRecordingsTable.rst b/docs/source/pages/neurodata_types/core/IntracellularRecordingsTable.rst new file mode 100644 index 00000000..a14f1ff8 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/IntracellularRecordingsTable.rst @@ -0,0 +1,23 @@ +IntracellularRecordingsTable +============================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for IntracellularRecordingsTable. + +.. mat:module:: types.core +.. autoclass:: types.core.IntracellularRecordingsTable + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/IntracellularResponsesTable.rst b/docs/source/pages/neurodata_types/core/IntracellularResponsesTable.rst new file mode 100644 index 00000000..b44495e9 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/IntracellularResponsesTable.rst @@ -0,0 +1,23 @@ +IntracellularResponsesTable +=========================== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for IntracellularResponsesTable. + +.. mat:module:: types.core +.. autoclass:: types.core.IntracellularResponsesTable + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/IntracellularStimuliTable.rst b/docs/source/pages/neurodata_types/core/IntracellularStimuliTable.rst new file mode 100644 index 00000000..fc65405b --- /dev/null +++ b/docs/source/pages/neurodata_types/core/IntracellularStimuliTable.rst @@ -0,0 +1,23 @@ +IntracellularStimuliTable +========================= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for IntracellularStimuliTable. + +.. mat:module:: types.core +.. autoclass:: types.core.IntracellularStimuliTable + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/LFP.rst b/docs/source/pages/neurodata_types/core/LFP.rst new file mode 100644 index 00000000..a38d4277 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/LFP.rst @@ -0,0 +1,23 @@ +LFP +=== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for LFP. + +.. mat:module:: types.core +.. autoclass:: types.core.LFP + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/LabMetaData.rst b/docs/source/pages/neurodata_types/core/LabMetaData.rst new file mode 100644 index 00000000..91aaf6ae --- /dev/null +++ b/docs/source/pages/neurodata_types/core/LabMetaData.rst @@ -0,0 +1,23 @@ +LabMetaData +=========== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for LabMetaData. + +.. mat:module:: types.core +.. autoclass:: types.core.LabMetaData + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/MotionCorrection.rst b/docs/source/pages/neurodata_types/core/MotionCorrection.rst new file mode 100644 index 00000000..d7553e5e --- /dev/null +++ b/docs/source/pages/neurodata_types/core/MotionCorrection.rst @@ -0,0 +1,23 @@ +MotionCorrection +================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for MotionCorrection. + +.. mat:module:: types.core +.. autoclass:: types.core.MotionCorrection + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/NWBContainer.rst b/docs/source/pages/neurodata_types/core/NWBContainer.rst new file mode 100644 index 00000000..0f76f1bc --- /dev/null +++ b/docs/source/pages/neurodata_types/core/NWBContainer.rst @@ -0,0 +1,23 @@ +NWBContainer +============ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for NWBContainer. + +.. mat:module:: types.core +.. autoclass:: types.core.NWBContainer + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/NWBData.rst b/docs/source/pages/neurodata_types/core/NWBData.rst new file mode 100644 index 00000000..e9f91de3 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/NWBData.rst @@ -0,0 +1,23 @@ +NWBData +======= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for NWBData. + +.. mat:module:: types.core +.. autoclass:: types.core.NWBData + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/NWBDataInterface.rst b/docs/source/pages/neurodata_types/core/NWBDataInterface.rst new file mode 100644 index 00000000..bd071390 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/NWBDataInterface.rst @@ -0,0 +1,23 @@ +NWBDataInterface +================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for NWBDataInterface. + +.. mat:module:: types.core +.. autoclass:: types.core.NWBDataInterface + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/NWBFile.rst b/docs/source/pages/neurodata_types/core/NWBFile.rst new file mode 100644 index 00000000..497cfda0 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/NWBFile.rst @@ -0,0 +1,23 @@ +NWBFile +======= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for NWBFile. + +.. mat:module:: types.core +.. autoclass:: types.core.NWBFile + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/OnePhotonSeries.rst b/docs/source/pages/neurodata_types/core/OnePhotonSeries.rst new file mode 100644 index 00000000..2125676f --- /dev/null +++ b/docs/source/pages/neurodata_types/core/OnePhotonSeries.rst @@ -0,0 +1,23 @@ +OnePhotonSeries +=============== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for OnePhotonSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.OnePhotonSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/OpticalChannel.rst b/docs/source/pages/neurodata_types/core/OpticalChannel.rst new file mode 100644 index 00000000..90a1609c --- /dev/null +++ b/docs/source/pages/neurodata_types/core/OpticalChannel.rst @@ -0,0 +1,23 @@ +OpticalChannel +============== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for OpticalChannel. + +.. mat:module:: types.core +.. autoclass:: types.core.OpticalChannel + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/OpticalSeries.rst b/docs/source/pages/neurodata_types/core/OpticalSeries.rst new file mode 100644 index 00000000..9253a1bc --- /dev/null +++ b/docs/source/pages/neurodata_types/core/OpticalSeries.rst @@ -0,0 +1,23 @@ +OpticalSeries +============= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for OpticalSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.OpticalSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/OptogeneticSeries.rst b/docs/source/pages/neurodata_types/core/OptogeneticSeries.rst new file mode 100644 index 00000000..4db93ef4 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/OptogeneticSeries.rst @@ -0,0 +1,23 @@ +OptogeneticSeries +================= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for OptogeneticSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.OptogeneticSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/OptogeneticStimulusSite.rst b/docs/source/pages/neurodata_types/core/OptogeneticStimulusSite.rst new file mode 100644 index 00000000..1128fa1a --- /dev/null +++ b/docs/source/pages/neurodata_types/core/OptogeneticStimulusSite.rst @@ -0,0 +1,23 @@ +OptogeneticStimulusSite +======================= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for OptogeneticStimulusSite. + +.. mat:module:: types.core +.. autoclass:: types.core.OptogeneticStimulusSite + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/PatchClampSeries.rst b/docs/source/pages/neurodata_types/core/PatchClampSeries.rst new file mode 100644 index 00000000..db4819eb --- /dev/null +++ b/docs/source/pages/neurodata_types/core/PatchClampSeries.rst @@ -0,0 +1,23 @@ +PatchClampSeries +================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for PatchClampSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.PatchClampSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/PlaneSegmentation.rst b/docs/source/pages/neurodata_types/core/PlaneSegmentation.rst new file mode 100644 index 00000000..3c5cb1eb --- /dev/null +++ b/docs/source/pages/neurodata_types/core/PlaneSegmentation.rst @@ -0,0 +1,23 @@ +PlaneSegmentation +================= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for PlaneSegmentation. + +.. mat:module:: types.core +.. autoclass:: types.core.PlaneSegmentation + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/Position.rst b/docs/source/pages/neurodata_types/core/Position.rst new file mode 100644 index 00000000..38a70ea5 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/Position.rst @@ -0,0 +1,23 @@ +Position +======== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for Position. + +.. mat:module:: types.core +.. autoclass:: types.core.Position + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/ProcessingModule.rst b/docs/source/pages/neurodata_types/core/ProcessingModule.rst new file mode 100644 index 00000000..557fb5fb --- /dev/null +++ b/docs/source/pages/neurodata_types/core/ProcessingModule.rst @@ -0,0 +1,23 @@ +ProcessingModule +================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for ProcessingModule. + +.. mat:module:: types.core +.. autoclass:: types.core.ProcessingModule + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/PupilTracking.rst b/docs/source/pages/neurodata_types/core/PupilTracking.rst new file mode 100644 index 00000000..445c7c26 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/PupilTracking.rst @@ -0,0 +1,23 @@ +PupilTracking +============= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for PupilTracking. + +.. mat:module:: types.core +.. autoclass:: types.core.PupilTracking + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/RGBAImage.rst b/docs/source/pages/neurodata_types/core/RGBAImage.rst new file mode 100644 index 00000000..4560ff55 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/RGBAImage.rst @@ -0,0 +1,23 @@ +RGBAImage +========= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for RGBAImage. + +.. mat:module:: types.core +.. autoclass:: types.core.RGBAImage + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/RGBImage.rst b/docs/source/pages/neurodata_types/core/RGBImage.rst new file mode 100644 index 00000000..34d4786a --- /dev/null +++ b/docs/source/pages/neurodata_types/core/RGBImage.rst @@ -0,0 +1,23 @@ +RGBImage +======== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for RGBImage. + +.. mat:module:: types.core +.. autoclass:: types.core.RGBImage + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/RepetitionsTable.rst b/docs/source/pages/neurodata_types/core/RepetitionsTable.rst new file mode 100644 index 00000000..5457d172 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/RepetitionsTable.rst @@ -0,0 +1,23 @@ +RepetitionsTable +================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for RepetitionsTable. + +.. mat:module:: types.core +.. autoclass:: types.core.RepetitionsTable + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/RoiResponseSeries.rst b/docs/source/pages/neurodata_types/core/RoiResponseSeries.rst new file mode 100644 index 00000000..f90e5a86 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/RoiResponseSeries.rst @@ -0,0 +1,23 @@ +RoiResponseSeries +================= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for RoiResponseSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.RoiResponseSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/ScratchData.rst b/docs/source/pages/neurodata_types/core/ScratchData.rst new file mode 100644 index 00000000..adba6c51 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/ScratchData.rst @@ -0,0 +1,23 @@ +ScratchData +=========== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for ScratchData. + +.. mat:module:: types.core +.. autoclass:: types.core.ScratchData + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/SequentialRecordingsTable.rst b/docs/source/pages/neurodata_types/core/SequentialRecordingsTable.rst new file mode 100644 index 00000000..cf645af2 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/SequentialRecordingsTable.rst @@ -0,0 +1,23 @@ +SequentialRecordingsTable +========================= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for SequentialRecordingsTable. + +.. mat:module:: types.core +.. autoclass:: types.core.SequentialRecordingsTable + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/SimultaneousRecordingsTable.rst b/docs/source/pages/neurodata_types/core/SimultaneousRecordingsTable.rst new file mode 100644 index 00000000..ff8807aa --- /dev/null +++ b/docs/source/pages/neurodata_types/core/SimultaneousRecordingsTable.rst @@ -0,0 +1,23 @@ +SimultaneousRecordingsTable +=========================== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for SimultaneousRecordingsTable. + +.. mat:module:: types.core +.. autoclass:: types.core.SimultaneousRecordingsTable + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/SpatialSeries.rst b/docs/source/pages/neurodata_types/core/SpatialSeries.rst new file mode 100644 index 00000000..6c2d0bb9 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/SpatialSeries.rst @@ -0,0 +1,23 @@ +SpatialSeries +============= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for SpatialSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.SpatialSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/SpikeEventSeries.rst b/docs/source/pages/neurodata_types/core/SpikeEventSeries.rst new file mode 100644 index 00000000..6e6187b4 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/SpikeEventSeries.rst @@ -0,0 +1,23 @@ +SpikeEventSeries +================ + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for SpikeEventSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.SpikeEventSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/Subject.rst b/docs/source/pages/neurodata_types/core/Subject.rst new file mode 100644 index 00000000..2c1520c6 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/Subject.rst @@ -0,0 +1,23 @@ +Subject +======= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for Subject. + +.. mat:module:: types.core +.. autoclass:: types.core.Subject + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/SweepTable.rst b/docs/source/pages/neurodata_types/core/SweepTable.rst new file mode 100644 index 00000000..ddcaa975 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/SweepTable.rst @@ -0,0 +1,23 @@ +SweepTable +========== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for SweepTable. + +.. mat:module:: types.core +.. autoclass:: types.core.SweepTable + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/TimeIntervals.rst b/docs/source/pages/neurodata_types/core/TimeIntervals.rst new file mode 100644 index 00000000..681b1b4b --- /dev/null +++ b/docs/source/pages/neurodata_types/core/TimeIntervals.rst @@ -0,0 +1,23 @@ +TimeIntervals +============= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for TimeIntervals. + +.. mat:module:: types.core +.. autoclass:: types.core.TimeIntervals + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/TimeSeries.rst b/docs/source/pages/neurodata_types/core/TimeSeries.rst new file mode 100644 index 00000000..d6df57c0 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/TimeSeries.rst @@ -0,0 +1,23 @@ +TimeSeries +========== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for TimeSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.TimeSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/TimeSeriesReferenceVectorData.rst b/docs/source/pages/neurodata_types/core/TimeSeriesReferenceVectorData.rst new file mode 100644 index 00000000..59e348b3 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/TimeSeriesReferenceVectorData.rst @@ -0,0 +1,23 @@ +TimeSeriesReferenceVectorData +============================= + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for TimeSeriesReferenceVectorData. + +.. mat:module:: types.core +.. autoclass:: types.core.TimeSeriesReferenceVectorData + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/TwoPhotonSeries.rst b/docs/source/pages/neurodata_types/core/TwoPhotonSeries.rst new file mode 100644 index 00000000..45c3d66f --- /dev/null +++ b/docs/source/pages/neurodata_types/core/TwoPhotonSeries.rst @@ -0,0 +1,23 @@ +TwoPhotonSeries +=============== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for TwoPhotonSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.TwoPhotonSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/Units.rst b/docs/source/pages/neurodata_types/core/Units.rst new file mode 100644 index 00000000..56f89ea1 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/Units.rst @@ -0,0 +1,23 @@ +Units +===== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for Units. + +.. mat:module:: types.core +.. autoclass:: types.core.Units + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/VoltageClampSeries.rst b/docs/source/pages/neurodata_types/core/VoltageClampSeries.rst new file mode 100644 index 00000000..6df7ef95 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/VoltageClampSeries.rst @@ -0,0 +1,23 @@ +VoltageClampSeries +================== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for VoltageClampSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.VoltageClampSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/VoltageClampStimulusSeries.rst b/docs/source/pages/neurodata_types/core/VoltageClampStimulusSeries.rst new file mode 100644 index 00000000..66adddef --- /dev/null +++ b/docs/source/pages/neurodata_types/core/VoltageClampStimulusSeries.rst @@ -0,0 +1,23 @@ +VoltageClampStimulusSeries +========================== + +See also: + :nwb-core-type-schema:`Format Specification` + or :nwb-core-type-schema:`Source Specification` for VoltageClampStimulusSeries. + +.. mat:module:: types.core +.. autoclass:: types.core.VoltageClampStimulusSeries + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/core/index.rst b/docs/source/pages/neurodata_types/core/index.rst new file mode 100644 index 00000000..e084a566 --- /dev/null +++ b/docs/source/pages/neurodata_types/core/index.rst @@ -0,0 +1,84 @@ +Core Neurodata Types +==================== + +These are the MatNWB neurodata types from the core schema specification. + +.. toctree:: + :maxdepth: 2 + :caption: Functions + + AbstractFeatureSeries + AnnotationSeries + BehavioralEpochs + BehavioralEvents + BehavioralTimeSeries + ClusterWaveforms + Clustering + CompassDirection + CorrectedImageStack + CurrentClampSeries + CurrentClampStimulusSeries + DecompositionSeries + Device + DfOverF + ElectricalSeries + ElectrodeGroup + EventDetection + EventWaveform + ExperimentalConditionsTable + EyeTracking + FeatureExtraction + FilteredEphys + Fluorescence + GrayscaleImage + IZeroClampSeries + Image + ImageMaskSeries + ImageReferences + ImageSegmentation + ImageSeries + Images + ImagingPlane + ImagingRetinotopy + IndexSeries + IntervalSeries + IntracellularElectrode + IntracellularElectrodesTable + IntracellularRecordingsTable + IntracellularResponsesTable + IntracellularStimuliTable + LFP + LabMetaData + MotionCorrection + NWBContainer + NWBData + NWBDataInterface + NWBFile + OnePhotonSeries + OpticalChannel + OpticalSeries + OptogeneticSeries + OptogeneticStimulusSite + PatchClampSeries + PlaneSegmentation + Position + ProcessingModule + PupilTracking + RGBAImage + RGBImage + RepetitionsTable + RoiResponseSeries + ScratchData + SequentialRecordingsTable + SimultaneousRecordingsTable + SpatialSeries + SpikeEventSeries + Subject + SweepTable + TimeIntervals + TimeSeries + TimeSeriesReferenceVectorData + TwoPhotonSeries + Units + VoltageClampSeries + VoltageClampStimulusSeries diff --git a/docs/source/pages/neurodata_types/hdmf_common/AlignedDynamicTable.rst b/docs/source/pages/neurodata_types/hdmf_common/AlignedDynamicTable.rst new file mode 100644 index 00000000..39928c83 --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_common/AlignedDynamicTable.rst @@ -0,0 +1,23 @@ +AlignedDynamicTable +=================== + +See also: + :nwb-hdmf_common-type-schema:`Format Specification` + or :nwb-hdmf_common-type-schema:`Source Specification` for AlignedDynamicTable. + +.. mat:module:: types.hdmf_common +.. autoclass:: types.hdmf_common.AlignedDynamicTable + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/hdmf_common/CSRMatrix.rst b/docs/source/pages/neurodata_types/hdmf_common/CSRMatrix.rst new file mode 100644 index 00000000..6c6df90d --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_common/CSRMatrix.rst @@ -0,0 +1,23 @@ +CSRMatrix +========= + +See also: + :nwb-hdmf_common-type-schema:`Format Specification` + or :nwb-hdmf_common-type-schema:`Source Specification` for CSRMatrix. + +.. mat:module:: types.hdmf_common +.. autoclass:: types.hdmf_common.CSRMatrix + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/hdmf_common/Container.rst b/docs/source/pages/neurodata_types/hdmf_common/Container.rst new file mode 100644 index 00000000..97493f3c --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_common/Container.rst @@ -0,0 +1,23 @@ +Container +========= + +See also: + :nwb-hdmf_common-type-schema:`Format Specification` + or :nwb-hdmf_common-type-schema:`Source Specification` for Container. + +.. mat:module:: types.hdmf_common +.. autoclass:: types.hdmf_common.Container + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/hdmf_common/Data.rst b/docs/source/pages/neurodata_types/hdmf_common/Data.rst new file mode 100644 index 00000000..722d18ad --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_common/Data.rst @@ -0,0 +1,23 @@ +Data +==== + +See also: + :nwb-hdmf_common-type-schema:`Format Specification` + or :nwb-hdmf_common-type-schema:`Source Specification` for Data. + +.. mat:module:: types.hdmf_common +.. autoclass:: types.hdmf_common.Data + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/hdmf_common/DynamicTable.rst b/docs/source/pages/neurodata_types/hdmf_common/DynamicTable.rst new file mode 100644 index 00000000..2bb92720 --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_common/DynamicTable.rst @@ -0,0 +1,23 @@ +DynamicTable +============ + +See also: + :nwb-hdmf_common-type-schema:`Format Specification` + or :nwb-hdmf_common-type-schema:`Source Specification` for DynamicTable. + +.. mat:module:: types.hdmf_common +.. autoclass:: types.hdmf_common.DynamicTable + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/hdmf_common/DynamicTableRegion.rst b/docs/source/pages/neurodata_types/hdmf_common/DynamicTableRegion.rst new file mode 100644 index 00000000..d553ec16 --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_common/DynamicTableRegion.rst @@ -0,0 +1,23 @@ +DynamicTableRegion +================== + +See also: + :nwb-hdmf_common-type-schema:`Format Specification` + or :nwb-hdmf_common-type-schema:`Source Specification` for DynamicTableRegion. + +.. mat:module:: types.hdmf_common +.. autoclass:: types.hdmf_common.DynamicTableRegion + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/hdmf_common/ElementIdentifiers.rst b/docs/source/pages/neurodata_types/hdmf_common/ElementIdentifiers.rst new file mode 100644 index 00000000..fb340e1c --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_common/ElementIdentifiers.rst @@ -0,0 +1,23 @@ +ElementIdentifiers +================== + +See also: + :nwb-hdmf_common-type-schema:`Format Specification` + or :nwb-hdmf_common-type-schema:`Source Specification` for ElementIdentifiers. + +.. mat:module:: types.hdmf_common +.. autoclass:: types.hdmf_common.ElementIdentifiers + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/hdmf_common/SimpleMultiContainer.rst b/docs/source/pages/neurodata_types/hdmf_common/SimpleMultiContainer.rst new file mode 100644 index 00000000..28fb6335 --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_common/SimpleMultiContainer.rst @@ -0,0 +1,23 @@ +SimpleMultiContainer +==================== + +See also: + :nwb-hdmf_common-type-schema:`Format Specification` + or :nwb-hdmf_common-type-schema:`Source Specification` for SimpleMultiContainer. + +.. mat:module:: types.hdmf_common +.. autoclass:: types.hdmf_common.SimpleMultiContainer + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/hdmf_common/VectorData.rst b/docs/source/pages/neurodata_types/hdmf_common/VectorData.rst new file mode 100644 index 00000000..5c82d2b3 --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_common/VectorData.rst @@ -0,0 +1,23 @@ +VectorData +========== + +See also: + :nwb-hdmf_common-type-schema:`Format Specification` + or :nwb-hdmf_common-type-schema:`Source Specification` for VectorData. + +.. mat:module:: types.hdmf_common +.. autoclass:: types.hdmf_common.VectorData + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/hdmf_common/VectorIndex.rst b/docs/source/pages/neurodata_types/hdmf_common/VectorIndex.rst new file mode 100644 index 00000000..4d7abc1f --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_common/VectorIndex.rst @@ -0,0 +1,23 @@ +VectorIndex +=========== + +See also: + :nwb-hdmf_common-type-schema:`Format Specification` + or :nwb-hdmf_common-type-schema:`Source Specification` for VectorIndex. + +.. mat:module:: types.hdmf_common +.. autoclass:: types.hdmf_common.VectorIndex + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/hdmf_common/index.rst b/docs/source/pages/neurodata_types/hdmf_common/index.rst new file mode 100644 index 00000000..b746d1be --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_common/index.rst @@ -0,0 +1,19 @@ +HDMF-Common Data Types +====================== + +These are the MatNWB neurodata types from the hdmf_common schema specification. + +.. toctree:: + :maxdepth: 2 + :caption: Functions + + AlignedDynamicTable + CSRMatrix + Container + Data + DynamicTable + DynamicTableRegion + ElementIdentifiers + SimpleMultiContainer + VectorData + VectorIndex diff --git a/docs/source/pages/neurodata_types/hdmf_experimental/EnumData.rst b/docs/source/pages/neurodata_types/hdmf_experimental/EnumData.rst new file mode 100644 index 00000000..156ab9bc --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_experimental/EnumData.rst @@ -0,0 +1,23 @@ +EnumData +======== + +See also: + :nwb-hdmf_experimental-type-schema:`Format Specification` + or :nwb-hdmf_experimental-type-schema:`Source Specification` for EnumData. + +.. mat:module:: types.hdmf_experimental +.. autoclass:: types.hdmf_experimental.EnumData + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/hdmf_experimental/HERD.rst b/docs/source/pages/neurodata_types/hdmf_experimental/HERD.rst new file mode 100644 index 00000000..76bd4664 --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_experimental/HERD.rst @@ -0,0 +1,23 @@ +HERD +==== + +See also: + :nwb-hdmf_experimental-type-schema:`Format Specification` + or :nwb-hdmf_experimental-type-schema:`Source Specification` for HERD. + +.. mat:module:: types.hdmf_experimental +.. autoclass:: types.hdmf_experimental.HERD + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/docs/source/pages/neurodata_types/hdmf_experimental/index.rst b/docs/source/pages/neurodata_types/hdmf_experimental/index.rst new file mode 100644 index 00000000..51f35113 --- /dev/null +++ b/docs/source/pages/neurodata_types/hdmf_experimental/index.rst @@ -0,0 +1,11 @@ +HDMF-Experimental Data Types +============================ + +These are the MatNWB neurodata types from the hdmf_experimental schema specification. + +.. toctree:: + :maxdepth: 2 + :caption: Functions + + EnumData + HERD diff --git a/docs/source/pages/tutorials/basicUsage.rst b/docs/source/pages/tutorials/basicUsage.rst new file mode 100644 index 00000000..06623710 --- /dev/null +++ b/docs/source/pages/tutorials/basicUsage.rst @@ -0,0 +1,20 @@ +Basic Usage of MatNWB +===================== + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/basicUsage.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/basicUsage.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/behavior.rst b/docs/source/pages/tutorials/behavior.rst new file mode 100644 index 00000000..cc39da74 --- /dev/null +++ b/docs/source/pages/tutorials/behavior.rst @@ -0,0 +1,20 @@ +Behavior Data +============= + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/behavior.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/behavior.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/convertTrials.rst b/docs/source/pages/tutorials/convertTrials.rst new file mode 100644 index 00000000..13f80e9d --- /dev/null +++ b/docs/source/pages/tutorials/convertTrials.rst @@ -0,0 +1,20 @@ +Converting Trials to NWB Format +=============================== + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/convertTrials.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/convertTrials.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/dataPipe.rst b/docs/source/pages/tutorials/dataPipe.rst new file mode 100644 index 00000000..e8b9dbaf --- /dev/null +++ b/docs/source/pages/tutorials/dataPipe.rst @@ -0,0 +1,20 @@ +Advanced Writing Using DataPipes +================================ + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/dataPipe.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/dataPipe.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/dimensionMapNoDataPipes.rst b/docs/source/pages/tutorials/dimensionMapNoDataPipes.rst new file mode 100644 index 00000000..d69bce1f --- /dev/null +++ b/docs/source/pages/tutorials/dimensionMapNoDataPipes.rst @@ -0,0 +1,20 @@ +Mapping Dimensions without DataPipes +==================================== + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/dimensionMapNoDataPipes.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/dimensionMapNoDataPipes.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/dimensionMapWithDataPipes.rst b/docs/source/pages/tutorials/dimensionMapWithDataPipes.rst new file mode 100644 index 00000000..62788817 --- /dev/null +++ b/docs/source/pages/tutorials/dimensionMapWithDataPipes.rst @@ -0,0 +1,20 @@ +Mapping Dimensions with DataPipes +================================= + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/dimensionMapWithDataPipes.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/dimensionMapWithDataPipes.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/dynamic_tables.rst b/docs/source/pages/tutorials/dynamic_tables.rst new file mode 100644 index 00000000..f719c0f9 --- /dev/null +++ b/docs/source/pages/tutorials/dynamic_tables.rst @@ -0,0 +1,20 @@ +Using Dynamic Tables in MatNWB +============================== + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/dynamic_tables.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/dynamic_tables.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/dynamically_loaded_filters.rst b/docs/source/pages/tutorials/dynamically_loaded_filters.rst new file mode 100644 index 00000000..f8e81746 --- /dev/null +++ b/docs/source/pages/tutorials/dynamically_loaded_filters.rst @@ -0,0 +1,20 @@ +Implementing Dynamically Loaded Filters +======================================= + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/dynamically_loaded_filters.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/dynamically_loaded_filters.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/ecephys.rst b/docs/source/pages/tutorials/ecephys.rst new file mode 100644 index 00000000..39ef6dc7 --- /dev/null +++ b/docs/source/pages/tutorials/ecephys.rst @@ -0,0 +1,22 @@ +Extracellular Electrophysiology +=============================== + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/ecephys.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/ecephys.html + :alt: View full page +.. image:: https://img.shields.io/badge/View-Youtube-red + :target: https://www.youtube.com/watch?v=W8t4_quIl1k&ab_channel=NeurodataWithoutBorders + :alt: View tutorial on YouTube + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/icephys.rst b/docs/source/pages/tutorials/icephys.rst new file mode 100644 index 00000000..3d6ff227 --- /dev/null +++ b/docs/source/pages/tutorials/icephys.rst @@ -0,0 +1,20 @@ +Intracellular Electrophysiology +=============================== + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/icephys.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/icephys.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/images.rst b/docs/source/pages/tutorials/images.rst new file mode 100644 index 00000000..1b4e5a75 --- /dev/null +++ b/docs/source/pages/tutorials/images.rst @@ -0,0 +1,20 @@ +Image Data +========== + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/images.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/images.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/index.rst b/docs/source/pages/tutorials/index.rst new file mode 100644 index 00000000..a93692f9 --- /dev/null +++ b/docs/source/pages/tutorials/index.rst @@ -0,0 +1,37 @@ +Tutorials +========= + +General Tutorials +----------------- +.. toctree:: + :maxdepth: 1 + + intro + read_demo + basicUsage + dimensionMapNoDataPipes + dimensionMapWithDataPipes + convertTrials + dynamic_tables + scratch + +Domain-Specific Tutorials +------------------------- +.. toctree:: + :maxdepth: 1 + + behavior + ecephys + icephys + images + ogen + ophys + +Advanced I/O +------------ +.. toctree:: + :maxdepth: 1 + + dataPipe + dynamically_loaded_filters + remote_read diff --git a/docs/source/pages/tutorials/intro.rst b/docs/source/pages/tutorials/intro.rst new file mode 100644 index 00000000..fe87f281 --- /dev/null +++ b/docs/source/pages/tutorials/intro.rst @@ -0,0 +1,20 @@ +Getting Started with MatNWB +=========================== + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/intro.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/intro.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/ogen.rst b/docs/source/pages/tutorials/ogen.rst new file mode 100644 index 00000000..90ac0345 --- /dev/null +++ b/docs/source/pages/tutorials/ogen.rst @@ -0,0 +1,20 @@ +Optogenetics +============ + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/ogen.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/ogen.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/ophys.rst b/docs/source/pages/tutorials/ophys.rst new file mode 100644 index 00000000..75f6b552 --- /dev/null +++ b/docs/source/pages/tutorials/ophys.rst @@ -0,0 +1,22 @@ +Calcium Imaging +=============== + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/ophys.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/ophys.html + :alt: View full page +.. image:: https://img.shields.io/badge/View-Youtube-red + :target: https://www.youtube.com/watch?v=OBidHdocnTc&ab_channel=NeurodataWithoutBorders + :alt: View tutorial on YouTube + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/read_demo.rst b/docs/source/pages/tutorials/read_demo.rst new file mode 100644 index 00000000..a493b2a7 --- /dev/null +++ b/docs/source/pages/tutorials/read_demo.rst @@ -0,0 +1,20 @@ +Reading NWB Files with MatNWB +============================= + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/read_demo.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/read_demo.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/remote_read.rst b/docs/source/pages/tutorials/remote_read.rst new file mode 100644 index 00000000..e4c52b5d --- /dev/null +++ b/docs/source/pages/tutorials/remote_read.rst @@ -0,0 +1,20 @@ +Reading NWB Files from Remote Locations +======================================= + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/remote_read.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/remote_read.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/pages/tutorials/scratch.rst b/docs/source/pages/tutorials/scratch.rst new file mode 100644 index 00000000..3c1fa861 --- /dev/null +++ b/docs/source/pages/tutorials/scratch.rst @@ -0,0 +1,20 @@ +Working with Scratch Space in MatNWB +==================================== + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/scratch.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: ../../_static/html/tutorials/scratch.html + :alt: View full page + + +.. raw:: html + + + diff --git a/docs/source/sphinx_extensions/_util.py b/docs/source/sphinx_extensions/_util.py new file mode 100644 index 00000000..eca81a2f --- /dev/null +++ b/docs/source/sphinx_extensions/_util.py @@ -0,0 +1,24 @@ +import os + +def list_neurodata_types(namespace_name): + # Get the absolute path of the script's directory + script_dir = os.path.dirname(os.path.abspath(__file__)) + + # Compute the absolute path two levels up from the script's directory + matnwb_src_dir = os.path.abspath(os.path.join(script_dir, '..', '..', '..')) + + # Construct the path to the namespace's types directory + types_dir = os.path.join(matnwb_src_dir, '+types', f"+{namespace_name}") + + # List to store the file names without extension + neurodata_types = [] + + # Check if the directory exists + if os.path.isdir(types_dir): + # Iterate through all .m files in the directory + for file_name in os.listdir(types_dir): + if file_name.endswith('.m'): + # Remove the file extension and add to the list + neurodata_types.append(os.path.splitext(file_name)[0]) + + return neurodata_types diff --git a/docs/source/sphinx_extensions/custom_roles.py b/docs/source/sphinx_extensions/custom_roles.py new file mode 100644 index 00000000..9b565137 --- /dev/null +++ b/docs/source/sphinx_extensions/custom_roles.py @@ -0,0 +1,88 @@ +import os +from sphinx.roles import XRefRole +from docutils import nodes, utils +from pprint import pprint +from _util import list_neurodata_types + + +class MatClassRole(XRefRole): + def process_link(self, env, refnode, has_explicit_title, title, target): + """ + Process the link for fundamental matlab classes. + """ + + # External class handling + base_url = "https://www.mathworks.com/help/matlab/ref/" + refnode["refuri"] = f"{base_url}{target}.html" + #refnode["mat:class"] = env.temp_data.get("mat:class") + #refnode["reftype"] = "class" + #print(refnode["mat:class"]) + #print(refnode["refuri"]) + if not has_explicit_title: + title = target + return title, target + + # Could this method be a way to fix the transformation to a file:// uri for the link target? + #def result_nodes(self, document, env, node, is_ref): + # pprint(vars(node)) + # return [node], [] + + +def register_matlab_types(app, env, docname): + """ + Register MATLAB types in the 'mat' domain with external links. + """ + + # MATLAB types and their corresponding external URLs + matlab_types = { + "double": "https://www.mathworks.com/help/matlab/ref/double.html", + "single": "https://www.mathworks.com/help/matlab/ref/single.html", + "int8": "https://www.mathworks.com/help/matlab/ref/int8.html", + "uint8": "https://www.mathworks.com/help/matlab/ref/uint8.html", + "int16": "https://www.mathworks.com/help/matlab/ref/int16.html", + "uint16": "https://www.mathworks.com/help/matlab/ref/uint16.html", + "int32": "https://www.mathworks.com/help/matlab/ref/int32.html", + "uint32": "https://www.mathworks.com/help/matlab/ref/uint32.html", + "int64": "https://www.mathworks.com/help/matlab/ref/int64.html", + "uint64": "https://www.mathworks.com/help/matlab/ref/uint64.html", + "logical": "https://www.mathworks.com/help/matlab/ref/logical.html", + "char": "https://www.mathworks.com/help/matlab/ref/char.html", + "cell": "https://www.mathworks.com/help/matlab/ref/cell.html", + "struct": "https://www.mathworks.com/help/matlab/ref/struct.html", + "table": "https://www.mathworks.com/help/matlab/ref/table.html", + "categorical": "https://www.mathworks.com/help/matlab/ref/categorical.html", + "datetime": "https://www.mathworks.com/help/matlab/ref/datetime.html", + "duration": "https://www.mathworks.com/help/matlab/ref/duration.html", + "calendarDuration": "https://www.mathworks.com/help/matlab/ref/calendarduration.html", + "function_handle": "https://www.mathworks.com/help/matlab/ref/function_handle.html", + "string": "https://www.mathworks.com/help/matlab/ref/string.html", + "complex": "https://www.mathworks.com/help/matlab/ref/complex.html", + } + + # Add MATLAB types to the mat domain + if "objects" not in env.domaindata["mat"]: + env.domaindata["mat"]["objects"] = {} + + for type_name, url in matlab_types.items(): + # Register the type with a special 'external' object type + env.domaindata["mat"]["objects"][type_name] = (url, "matclass") + + +def register_type_short_names(app, env, docname): +# register_type_short_names - Register short names for neurodata types as classes + + if "objects" not in env.domaindata["mat"]: + env.domaindata["mat"]["objects"] = {} + + # List of modules to process + modules = ["core", "hdmf_common", "hdmf_experimental"] + + # Loop through the modules + for module in modules: + # List the neurodata types for the current module + nwb_types = list_neurodata_types(module) + for type_name in nwb_types: + # Register the type with as a 'class' object type + docname = f"pages/neurodata_types/{module}/{type_name}" + env.domaindata["mat"]["objects"][type_name] = (docname, "class") + diff --git a/docs/source/sphinx_extensions/docstring_processors.py b/docs/source/sphinx_extensions/docstring_processors.py new file mode 100644 index 00000000..4bf0221a --- /dev/null +++ b/docs/source/sphinx_extensions/docstring_processors.py @@ -0,0 +1,242 @@ +import os +import re +from _util import list_neurodata_types + + +def process_matlab_docstring(app, what, name, obj, options, lines): + _format_matlab_type_as_code_literal(lines) + _format_nwbtype_shortnames(lines) + _format_required_properties(lines) + _replace_class_contructor_method_role_with_class_role(lines) + _make_syntax_examples_code_literals(lines) + _format_input_arguments(lines) + _split_and_format_example_lines(lines) + + +def _format_matlab_type_as_code_literal(lines): + # Full list of MATLAB base types + matlab_types = { + "double", "single", "int8", "uint8", "int16", "uint16", + "int32", "uint32", "int64", "uint64", "logical", "char", + "cell", "struct", "table", "categorical", "datetime", + "duration", "calendarDuration", "function_handle", + "string", "complex" + } + + type_pattern = re.compile( + rf"(?P\()" + rf"(?P{'|'.join(re.escape(t) for t in matlab_types)})" + rf"(?P\))" + ) + + for i, line in enumerate(lines): + # Replace matches with inline code formatting, preserving parentheses + # lines[i] = type_pattern.sub( + # lambda match: ( + # f"{match.group('before') or ''}" + # f"``{match.group('type')}``" + # f"{match.group('after') or ''}" + # ), + # line + # ) + lines[i] = type_pattern.sub( + lambda match: ( + f"{match.group('before') or ''}" + f":matclass:`{match.group('type')}`" + f"{match.group('after') or ''}" + ), + line + ) + + +def _format_required_properties(lines): + """ + Process lines to find the 'Required Properties' section and format its values. + + Args: + lines (list of str): Lines from a docstring to process. + """ + + try: + # Find the index of the "Required Properties:" line + required_idx = next(i for i, line in enumerate(lines) if "Required Properties:" in line) + + if not required_idx: + return + + lines[required_idx] = lines[required_idx].replace("Required Properties:", "Required Properties\ `*`__:") + + # Process the line following "Required Properties:" + required_line = lines[required_idx + 1] + + values = required_line.strip().split(", ") + + # Format the values + formatted_values = [ + f":attr:`{value.strip()}`" if value.lower() != "none" else f"``{value.strip()}``" + for value in values + ] + # Update the line with required properties. Add single preceding space + # for proper indentation + lines[required_idx + 1] = " " + ", ".join(formatted_values) + + except (StopIteration, IndexError): + # If "Required Properties:" or the following line is not found, return the original lines + pass + + +def _replace_class_contructor_method_role_with_class_role(lines): + """ + Process docstrings to replace `:meth:` expressions with `:class:`. + + Args: + lines: List of lines in the docstring. + """ + + # Regular expression to match the `:meth:` pattern + pattern = re.compile( + r":meth:`([^`]+)\s*<([a-zA-Z0-9_.]+)\.([a-zA-Z0-9_]+)\.([a-zA-Z0-9_]+)>`" + ) + + # Replacement function for re.sub + def replace_meth_with_class(match): + display_name = match.group(1).replace("()", "").strip() # The displayed name, e.g., "AbstractFeatureSeries" + namespace_prefix = match.group(2) # The module path, e.g., "types.core" + class_name = match.group(3) # The class name, e.g., "AbstractFeatureSeries" + # Construct the new :class: pattern + return f":class:`{display_name} <{namespace_prefix}.{class_name}>`" + + # Update lines in place + for i, line in enumerate(lines): + lines[i] = pattern.sub(replace_meth_with_class, line) + + +def _make_syntax_examples_code_literals(lines): + """ + Process a MATLAB docstring to wrap expressions in the Syntax section with double backticks. + + Args: + lines (str): The original MATLAB docstring lines. + """ + + in_syntax_section = False + + # Regex to match MATLAB expressions + matlab_expr_pattern = re.compile( + r"^\s*((?:\[[\w,\s]*\]\s*=\s*|[\w]+\s*=\s*)?[A-Za-z][\w\.]*\([^)]*\))" + ) + + for i, line in enumerate(lines): + # Check if the current line starts the Syntax section + if line.strip().lower().startswith("syntax:"): + in_syntax_section = True + continue + + # Check if the current line is another section header + if in_syntax_section and _is_section_header(line) and not line.strip().lower().startswith("syntax:"): + in_syntax_section = False + + if in_syntax_section: + # Wrap MATLAB expressions in double backticks + match = matlab_expr_pattern.search(line) + if match: + # Need group 1 as group 0 contains the leading whitespace...? + line = matlab_expr_pattern.sub(lambda m: f"``{m.group(1)}``", line) + # Need to prepend a leading space, no idea why. + lines[i] = " " + line + + +def _format_input_arguments(lines): + """ + Format the 'Input Arguments' section to add double ** around item names + and `` around types in parentheses. + + Args: + lines (list of str): List of lines in the Input Arguments section. + """ + + # Regex pattern for list item names with optional types in parentheses + input_arg_pattern = re.compile( + r"(?P^\s*)-\s*(?P\w+)" # Match the name of the argument + r"(?:\s*\((?P.*?)\))?" # Optionally match the type in parentheses + ) + + for i, line in enumerate(lines): + # Apply formatting to each matching line + lines[i] = input_arg_pattern.sub( + lambda match: ( + f"{match.group('indent')}- **{match.group('name').strip()}**" + # Name + ( # Optional type + f" ({match.group('type').strip()})" # Preserve existing formatting + if match.group('type') and ( + match.group('type').strip().startswith("``") or # Already backtick-formatted + match.group('type').strip().startswith(":") # Sphinx directive + ) + else f" (``{match.group('type').strip()}``)" # Add backticks if unformatted + ) if match.group('type') else "" # No type provided + ), + line + ) + + +def _split_and_format_example_lines(lines): + """ + Split and format example lines like: + 'Example 1 - Export an NWB file:' + into two lines: + '**Example 1.**' + '**Export an NWB file**::' + + Modifies the `lines` list in place. + + Args: + lines (list of str): List of lines in the Usage section. + """ + + # Regex pattern to match example lines with descriptions + example_pattern = re.compile( + r"^\s*(Example\s+\d+)\s*-\s*(.*)::\s*$" # Matches 'Example X - Description:' + ) + + i = 0 + while i < len(lines): + # Check if the current line matches the "Example X - Description:" format + match = example_pattern.match(lines[i]) + if match: + example, description = match.groups() + # Replace the original line with two formatted lines + lines[i] = f" **{example} -**" # Important: add one space at beginning of line for proper rst indent + lines.insert(i + 1, f" **{description}**::") # Important: add one space at beginning of line for proper rst indent + i += 2 # Skip over the newly added line + else: + i += 1 # Move to the next line if no match + + +def _format_nwbtype_shortnames(lines): + """ + Preprocesses a list of docstring lines to replace occurrences of patterns + with their respective :class:`pattern` references. + + Modifies the list of lines in place. + + Parameters: + lines (list of str): The docstring lines to preprocess. + """ + + patterns = list_neurodata_types('core') + list_neurodata_types('hdmf_common') + # Create a dictionary for replacements + replacements = {pattern: f"(:class:`{pattern}`)" for pattern in patterns} + + # Compile a regex that matches any of the patterns + regex = re.compile(r'\((' + '|'.join(map(re.escape, patterns)) + r')\)') + + # Iterate over the lines and replace matches in place + for i in range(len(lines)): + lines[i] = regex.sub(lambda match: replacements[match.group(1)], lines[i]) + + +def _is_section_header(line): + # Regex to identify section headers + section_header_pattern = re.compile(r"^\s*%?\s*[A-Za-z ]+:") + + return section_header_pattern.match(line) diff --git a/generateCore.m b/generateCore.m deleted file mode 100644 index 7349b8e1..00000000 --- a/generateCore.m +++ /dev/null @@ -1,52 +0,0 @@ -function generateCore(version, options) - % GENERATECORE Generate Matlab classes from NWB core schema files - % GENERATECORE() Generate classes (Matlab m-files) from the - % NWB core namespace file. By default, generates off of the most recent nwb-schema - % release. - % - % GENERATECORE(version) Generate classes for the - % core namespace of the listed version. - % - % A cache of schema data is generated in the 'namespaces' subdirectory in - % the current working directory. This is for allowing cross-referencing - % classes between multiple namespaces. - % - % Output files are generated placed in a '+types' subdirectory in the - % current working directory. - % - % GENERATECORE(__, 'savedir', saveDirectory) Generates the core class - % files in the specified directory. - % - % Example: - % generateCore(); - % generateCore('2.2.3'); - % - % See also GENERATEEXTENSION - - arguments - version (1,1) string {matnwb.common.mustBeValidSchemaVersion} = "latest" - options.savedir (1,1) string = misc.getMatnwbDir() - end - - if version == "latest" - version = matnwb.common.findLatestSchemaVersion(); - end - - schemaPath = fullfile(misc.getMatnwbDir(), "nwb-schema", version); - corePath = fullfile(schemaPath, "core", "nwb.namespace.yaml"); - commonPath = fullfile(schemaPath, ... - "hdmf-common-schema", ... - "common", ... - "namespace.yaml"); - assert(isfile(corePath), ... - 'NWB:GenerateCore:MissingCoreSchema', ... - 'Cannot find suitable core namespace for schema version `%s`', ... - version); - - namespaceFiles = corePath; - if isfile(commonPath) - % Important: generate common before core if common is available - namespaceFiles = [commonPath, namespaceFiles]; - end - generateExtension(namespaceFiles{:}, 'savedir', options.savedir); -end diff --git a/logo/logo_favicon.svg b/logo/logo_favicon.svg new file mode 100644 index 00000000..8e88e7c4 --- /dev/null +++ b/logo/logo_favicon.svg @@ -0,0 +1,584 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/nwbExport.m b/nwbExport.m deleted file mode 100644 index 655d891d..00000000 --- a/nwbExport.m +++ /dev/null @@ -1,32 +0,0 @@ -function nwbExport(nwbFileObjects, filePaths, mode) - %NWBEXPORT Writes an NWB file. - % nwbRead(nwb, filename) Writes the nwb object to a file at filename. - % - % Example: - % % Generate Matlab code for the NWB objects from the core schema. - % % This only needs to be done once. - % generateCore('schema\core\nwb.namespace.yaml'); - % % Create some fake fata and write - % nwb = NwbFile; - % nwb.session_start_time = datetime('now'); - % nwb.identifier = 'EMPTY'; - % nwb.session_description = 'empty test file'; - % nwbExport(nwb, 'empty.nwb'); - % - % See also GENERATECORE, GENERATEEXTENSION, NWBFILE, NWBREAD - - arguments - nwbFileObjects (1,:) NwbFile {mustBeNonempty} - filePaths (1,:) string {mustBeNonzeroLengthText} - mode (1,1) string {mustBeMember(mode, ["edit", "overwrite"])} = "edit" - end - - assert(length(nwbFileObjects) == length(filePaths), ... - 'NWB:Export:FilepathLengthMismatch', ... - 'Lists of NWB objects to export and list of file paths must be the same length.') - - for iFiles = 1:length(nwbFileObjects) - filePath = char(filePaths(iFiles)); - nwbFileObjects(iFiles).export(filePath, mode); - end -end diff --git a/tools/documentation/_rst_templates/class.rst.template b/tools/documentation/_rst_templates/class.rst.template new file mode 100644 index 00000000..76a025ee --- /dev/null +++ b/tools/documentation/_rst_templates/class.rst.template @@ -0,0 +1,7 @@ +{{ function_name }} +{{ function_header_underline }} + +.. mat:module:: {{ module_name }} +.. autoclass:: {{ full_function_name }} + :members: + :show-inheritance: diff --git a/tools/documentation/_rst_templates/function.rst.template b/tools/documentation/_rst_templates/function.rst.template new file mode 100644 index 00000000..ef77bf98 --- /dev/null +++ b/tools/documentation/_rst_templates/function.rst.template @@ -0,0 +1,5 @@ +{{ function_name }} +{{ function_header_underline }} + +.. mat:module:: . +.. autofunction:: {{ full_function_name }} diff --git a/tools/documentation/_rst_templates/index_functions.rst.template b/tools/documentation/_rst_templates/index_functions.rst.template new file mode 100644 index 00000000..102aa5f4 --- /dev/null +++ b/tools/documentation/_rst_templates/index_functions.rst.template @@ -0,0 +1,10 @@ +MatNWB Functions +================ + +These are the main functions of the MatNWB API + +.. toctree:: + :maxdepth: 2 + :caption: Functions + +{{ function_list }} diff --git a/tools/documentation/_rst_templates/index_nwb_types.rst.template b/tools/documentation/_rst_templates/index_nwb_types.rst.template new file mode 100644 index 00000000..3248802b --- /dev/null +++ b/tools/documentation/_rst_templates/index_nwb_types.rst.template @@ -0,0 +1,10 @@ +{{ section_title }} +{{ section_title_underline }} + +These are the MatNWB neurodata types from the {{ namespace_name }} schema specification. + +.. toctree:: + :maxdepth: 2 + :caption: Functions + +{{ function_list }} diff --git a/tools/documentation/_rst_templates/index_tutorials.rst.template b/tools/documentation/_rst_templates/index_tutorials.rst.template new file mode 100644 index 00000000..987ebf45 --- /dev/null +++ b/tools/documentation/_rst_templates/index_tutorials.rst.template @@ -0,0 +1,8 @@ +Tutorials +========= + +.. toctree:: + :maxdepth: 1 + :caption: Tutorials + +{{ file_list }} diff --git a/tools/documentation/_rst_templates/neurodata_class.rst.template b/tools/documentation/_rst_templates/neurodata_class.rst.template new file mode 100644 index 00000000..1697b13a --- /dev/null +++ b/tools/documentation/_rst_templates/neurodata_class.rst.template @@ -0,0 +1,23 @@ +{{ class_name }} +{{ class_name_header_underline }} + +See also: + :nwb-{{ namespace_name }}-type-schema:`Format Specification<{{ lower_class_name }}>` + or :nwb-{{ namespace_name }}-type-schema:`Source Specification` for {{ class_name }}. + +.. mat:module:: {{ module_name }} +.. autoclass:: {{ full_class_name }} + :members: + :show-inheritance: + + +.. + Add an anonymous reference target which should be linked to from the + Required Properties section + +__ + +.. tip:: + + **\*** If a required property link is not functional, the property may be + defined in a superclass. Please refer to the superclass documentation. diff --git a/tools/documentation/_rst_templates/tutorial.rst.template b/tools/documentation/_rst_templates/tutorial.rst.template new file mode 100644 index 00000000..938de7ad --- /dev/null +++ b/tools/documentation/_rst_templates/tutorial.rst.template @@ -0,0 +1,20 @@ +{{tutorial_title}} +{{tutorial_title_underline}} + +.. image:: https://www.mathworks.com/images/responsive/global/open-in-matlab-online.svg + :target: https://matlab.mathworks.com/open/github/v1?repo=NeurodataWithoutBorders/matnwb&file=tutorials/{{tutorial_name}}.mlx + :alt: Open in MATLAB Online +.. image:: https://img.shields.io/badge/View-Full_Page-blue + :target: {{static_html_path}} + :alt: View full page +{{youtube_badge_block}} + +.. raw:: html + + + diff --git a/tools/documentation/_rst_templates/youtube_badge.rst.template b/tools/documentation/_rst_templates/youtube_badge.rst.template new file mode 100644 index 00000000..2338c72a --- /dev/null +++ b/tools/documentation/_rst_templates/youtube_badge.rst.template @@ -0,0 +1,3 @@ +.. image:: https://img.shields.io/badge/View-Youtube-red + :target: {{youtube_url}} + :alt: View tutorial on YouTube \ No newline at end of file diff --git a/tools/documentation/matnwb_exportTutorials.m b/tools/documentation/matnwb_exportTutorials.m index b514a56f..72157c22 100644 --- a/tools/documentation/matnwb_exportTutorials.m +++ b/tools/documentation/matnwb_exportTutorials.m @@ -18,17 +18,22 @@ function matnwb_exportTutorials(options) options.IgnoreFiles (1,:) string = ["basicUsage", "read_demo", "remote_read"]; options.RunLivescript (1,1) logical = true end + + EXPORT_FOLDERS = dictionary(... + '.m', fullfile(misc.getMatnwbDir, "tutorials", "private", "mcode"), ... + '.html', fullfile(misc.getMatnwbDir, "docs", "source", "_static", "html", "tutorials") ); [exportFormat, targetFolderNames] = deal(options.ExportFormat); targetFolderNames = extractAfter(targetFolderNames, "."); - targetFolderNames(strcmp(targetFolderNames, "m")) = fullfile("private", "mcode"); - nwbTutorialDir = fullfile(misc.getMatnwbDir, "tutorials"); targetFolderPaths = fullfile(nwbTutorialDir, targetFolderNames); - - for folderPath = targetFolderPaths - if ~isfolder(folderPath); mkdir(folderPath); end + + for i = 1:numel(exportFormat) + if isKey(EXPORT_FOLDERS, exportFormat(i)) + targetFolderPaths(i) = EXPORT_FOLDERS(exportFormat(i)); + end + if ~isfolder(targetFolderPaths(i)); mkdir(targetFolderPaths(i)); end end if isempty(options.FilePaths) @@ -78,9 +83,12 @@ function matnwb_exportTutorials(options) end for j = 1:numel(exportFormat) - targetPath = fullfile(targetFolderPaths(j), fileNames(i) + exportFormat(j)); + targetFilePath = fullfile(targetFolderPaths(j), fileNames(i) + exportFormat(j)); fprintf('Exporting livescript "%s" to "%s"\n', fileNames(i), exportFormat(j)) - export(sourcePath, strrep(targetPath, '.mlx', exportFormat(j))); + export(sourcePath, targetFilePath); + if strcmp(exportFormat(j), '.html') + postProcessLivescriptHtml(targetFilePath) + end end end end diff --git a/tools/documentation/matnwb_generateRstFilesFromCode.m b/tools/documentation/matnwb_generateRstFilesFromCode.m new file mode 100644 index 00000000..8ff91811 --- /dev/null +++ b/tools/documentation/matnwb_generateRstFilesFromCode.m @@ -0,0 +1,7 @@ +function matnwb_generateRstFilesFromCode() + generateRstForTutorials() + generateRstForNwbFunctions() + generateRstForNeurodataTypeClasses('core') + generateRstForNeurodataTypeClasses('hdmf_common') + generateRstForNeurodataTypeClasses('hdmf_experimental') +end \ No newline at end of file diff --git a/tools/documentation/private/filewrite.m b/tools/documentation/private/filewrite.m new file mode 100644 index 00000000..ab8868b1 --- /dev/null +++ b/tools/documentation/private/filewrite.m @@ -0,0 +1,5 @@ +function filewrite(filePath, text) + fid = fopen(filePath, 'wt'); + fwrite(fid, text); + fclose(fid); +end \ No newline at end of file diff --git a/tools/documentation/private/fillTemplate.m b/tools/documentation/private/fillTemplate.m new file mode 100644 index 00000000..71d72104 --- /dev/null +++ b/tools/documentation/private/fillTemplate.m @@ -0,0 +1,9 @@ +function template = fillTemplate(template, data) + fields = fieldnames(data); + for i = 1:numel(fields) + if ~isstruct(data.(fields{i})) && ~iscell(data.(fields{i})) + placeholder = sprintf('{{ %s }}', fields{i}); + template = strrep(template, placeholder, string(data.(fields{i}))); + end + end +end diff --git a/tools/documentation/private/generateRstForNeurodataTypeClasses.m b/tools/documentation/private/generateRstForNeurodataTypeClasses.m new file mode 100644 index 00000000..ed4c085d --- /dev/null +++ b/tools/documentation/private/generateRstForNeurodataTypeClasses.m @@ -0,0 +1,60 @@ +function generateRstForNeurodataTypeClasses(namespaceName) +% generateRstForNeurodataTypeClasses Generate rst files for each Neurodata matnwb class + + arguments + namespaceName (1,1) string + end + namespaceName = char(namespaceName); + + rootDir = misc.getMatnwbDir(); + classFiles = dir(fullfile(rootDir, '+types', ['+', namespaceName], '*.m')); + + docsSourceRootDir = fullfile(misc.getMatnwbDir, 'docs', 'source'); + exportDir = fullfile(docsSourceRootDir, 'pages', 'neurodata_types', namespaceName); + if ~isfolder(exportDir); mkdir(exportDir); end + + functionTemplate = fileread( getRstTemplateFile('function') ); + classTemplate = fileread( getRstTemplateFile('neurodata_class') ); + + for i = 1:numel(classFiles) + iFile = fullfile(classFiles(i).folder, classFiles(i).name); + [~, fileName] = fileparts(iFile); + + data.module_name = sprintf('types.%s', namespaceName); + data.namespace_name = namespaceName; + data.class_name = fileName; + data.lower_class_name = lower(data.class_name); + data.class_name_header_underline = repmat('=', 1, numel(data.class_name)); + data.full_class_name = sprintf('types.%s.%s', namespaceName, fileName); + + mc = meta.class.fromName(data.full_class_name); + if isempty(mc) + currentTemplate = functionTemplate; + else + currentTemplate = classTemplate; + end + + thisRst = fillTemplate(currentTemplate, data); + rstFilePath = fullfile(exportDir, [fileName, '.rst']); + filewrite(rstFilePath, thisRst); + end + + % Create index + indexTemplate = fileread( getRstTemplateFile('index_nwb_types') ); + [~, functionNames] = fileparts(string({classFiles.name})); + data.function_list = strjoin(" "+functionNames, newline); + + switch namespaceName + case 'core' + data.section_title = "Core Neurodata Types"; + case 'hdmf_common' + data.section_title = "HDMF-Common Data Types"; + case 'hdmf_experimental' + data.section_title = "HDMF-Experimental Data Types"; + end + data.section_title_underline = repmat('=', 1, strlength(data.section_title)); + + thisRst = fillTemplate(indexTemplate, data); + rstFilePath = fullfile(exportDir, ['index', '.rst']); + filewrite(rstFilePath, thisRst); +end diff --git a/tools/documentation/private/generateRstForNwbFunctions.m b/tools/documentation/private/generateRstForNwbFunctions.m new file mode 100644 index 00000000..d6f1dd68 --- /dev/null +++ b/tools/documentation/private/generateRstForNwbFunctions.m @@ -0,0 +1,52 @@ +function generateRstForNwbFunctions() +% generateRstForNwbFunctions + + % List, filter and sort files to generate. Todo: simplify + rootDir = misc.getMatnwbDir(); + rootFiles = dir(rootDir); + rootFileNames = {rootFiles.name}; + rootWhitelist = {'nwbRead.m', 'NwbFile.m', 'nwbExport.m', 'generateCore.m', 'generateExtension.m', 'nwbClearGenerated.m'};%, 'nwbInstallExtension.m'}; + isWhitelisted = ismember(rootFileNames, rootWhitelist); + + rootFiles(~isWhitelisted) = []; + + [~, ~, iC] = intersect(rootWhitelist, {rootFiles.name}, 'stable'); + rootFiles = rootFiles(iC); + + docsSourceRootDir = fullfile(misc.getMatnwbDir, 'docs', 'source'); + exportDir = fullfile(docsSourceRootDir, 'pages', 'functions'); + if ~isfolder(exportDir); mkdir(exportDir); end + + functionTemplate = fileread( getRstTemplateFile('function') ); + classTemplate = fileread( getRstTemplateFile('class') ); + + for i = 1:numel(rootFiles) + iFile = fullfile(rootFiles(i).folder, rootFiles(i).name); + [~, functionName] = fileparts(iFile); + + mc = meta.class.fromName(functionName); + if isempty(mc) + currentTemplate = functionTemplate; + else + currentTemplate = classTemplate; + end + + data.function_name = functionName; + data.module_name = '.'; + data.function_header_underline = repmat('=', 1, numel(functionName)); + data.full_function_name = functionName; + + thisRst = fillTemplate(currentTemplate, data); + rstFilePath = fullfile(exportDir, [functionName, '.rst']); + filewrite(rstFilePath, thisRst); + end + + % Create index + indexTemplate = fileread( getRstTemplateFile('index_functions') ); + [~, functionNames] = fileparts(string({rootFiles.name})); + data.function_list = strjoin(" "+functionNames, newline); + + thisRst = fillTemplate(indexTemplate, data); + rstFilePath = fullfile(exportDir, ['index', '.rst']); + filewrite(rstFilePath, thisRst); +end diff --git a/tools/documentation/private/generateRstForTutorials.m b/tools/documentation/private/generateRstForTutorials.m new file mode 100644 index 00000000..ea5fdc84 --- /dev/null +++ b/tools/documentation/private/generateRstForTutorials.m @@ -0,0 +1,53 @@ +function generateRstForTutorials() +% generateRstForTutorials - Generate rst files for all the tutorial HTML files + + docsSourceRootDir = fullfile(misc.getMatnwbDir, 'docs', 'source'); + + tutorialHtmlSourceDir = fullfile(docsSourceRootDir, '_static', 'html', 'tutorials'); + tutorialRstTargetDir = fullfile(docsSourceRootDir, 'pages', 'tutorials'); + if ~isfolder(tutorialRstTargetDir); mkdir(tutorialRstTargetDir); end + + tutorialConfigFilePath = fullfile(docsSourceRootDir, '_config', 'tutorial_config.json'); + S = jsondecode(fileread(tutorialConfigFilePath)); + + rstTemplate = fileread( getRstTemplateFile('tutorial') ); + + % List all html files in source dir + L = dir(fullfile(tutorialHtmlSourceDir, '*.html')); + + for i = 1:numel(L) + thisFilePath = fullfile(L(i).folder, L(i).name); + relPath = strrep(thisFilePath, docsSourceRootDir, '../..'); + + [~, name] = fileparts(relPath); + title = S.titles.(name); + + rstOutput = replace(rstTemplate, '{{static_html_path}}', relPath); + rstOutput = replace(rstOutput, '{{tutorial_name}}', name); + rstOutput = replace(rstOutput, '{{tutorial_title}}', title); + rstOutput = replace(rstOutput, '{{tutorial_title_underline}}', repmat('=', 1, numel(title))); + + % Add the youtube badge block if the tutorial has a corresponding youtube video + if isfield(S.youtube, name) + youtubeBadge = fileread( getRstTemplateFile('youtube_badge') ); + youtubeBadge = replace(youtubeBadge, '{{youtube_url}}', S.youtube.(name)); + else + youtubeBadge = ''; + end + rstOutput = replace(rstOutput, '{{youtube_badge_block}}', youtubeBadge); + + rstOutputFile = fullfile(tutorialRstTargetDir, [name, '.rst']); + fid = fopen(rstOutputFile, 'wt'); + fwrite(fid, rstOutput); + fclose(fid); + end + + % Create index + indexTemplate = fileread( getRstTemplateFile('index_tutorials') ); + [~, fileNames] = fileparts(string({L.name})); + data.file_list = strjoin(" "+fileNames, newline); + + thisRst = fillTemplate(indexTemplate, data); + rstFilePath = fullfile(tutorialRstTargetDir, ['index', '.rst']); + %filewrite(rstFilePath, thisRst); +end \ No newline at end of file diff --git a/tools/documentation/private/getRstTemplateFile.m b/tools/documentation/private/getRstTemplateFile.m new file mode 100644 index 00000000..9db04ca0 --- /dev/null +++ b/tools/documentation/private/getRstTemplateFile.m @@ -0,0 +1,16 @@ +function templateFilePath = getRstTemplateFile(templateName) + arguments + templateName (1,1) string + end + fileName = templateName + ".rst.template"; + + templateFilePath = fullfile(... + misc.getMatnwbDir, ... + "tools", ... + "documentation", ... + "_rst_templates", ... + fileName ); + + assert(isfile(templateFilePath), ... + 'Template filepath not found for template with name "%s"', templateName) +end \ No newline at end of file diff --git a/tools/documentation/private/postProcessLivescriptHtml.m b/tools/documentation/private/postProcessLivescriptHtml.m new file mode 100644 index 00000000..8ab8647e --- /dev/null +++ b/tools/documentation/private/postProcessLivescriptHtml.m @@ -0,0 +1,81 @@ +function postProcessLivescriptHtml(htmlFile) + %POSTPROCESSLIVESCRIPHTML Update links in an HTML file to open in the top frame + % + % This function reads an HTML file and updates all tags with an + % href attribute starting with "https:" by adding or updating the + % target attribute to "top". The modified HTML content is written + % back to the same file. + % + % Syntax: + % postProcessLivescriptHtml(htmlFile) + % + % Input: + % htmlFile - (1,1) string: Path to the HTML file to process. + % + % Example: + % postProcessLivescriptHtml("example.html"); + % + % This will ensure that links in "example.html" with href="https:" + % open in the top frame when clicked. + + % The purpose of this function is to ensure links open in the top frame + % and not an iframe if tutorial htmls are embedded in an iframe. + + arguments + htmlFile (1,1) string {mustBeFile} + end + + % Read the content of the HTML file + htmlContent = fileread(htmlFile); + + % % Add target="top" to links with href starting with https + % updatedHtmlContent = regexprep(htmlContent, ... + % '", ... + sprintf("
%s", str)); + + % Update links: type classes + for namespaceName = ["core", "hdmf_common", "hdmf_experimental"] + updatedHtmlContent = strrep(updatedHtmlContent, ... + sprintf('https://neurodatawithoutborders.github.io/matnwb/doc/+types/+%s/',namespaceName), ... + sprintf('https://matnwb.readthedocs.io/en/latest/pages/neurodata_types/%s/',namespaceName) ); + end + + % Update links: Nwb functions + for functionName = ["NwbFile", "nwbExport", "nwbRead", "generateCore", "generateExtension"] + updatedHtmlContent = strrep(updatedHtmlContent, ... + sprintf('https://neurodatawithoutborders.github.io/matnwb/doc/%s.html', functionName), ... + sprintf('https://matnwb.readthedocs.io/en/latest/pages/functions/%s.html', functionName) ); + end + + % Update links: tutorials + updatedHtmlContent = strrep(updatedHtmlContent, ... + 'https://neurodatawithoutborders.github.io/matnwb/tutorials/html/', ... + 'https://matnwb.readthedocs.io/en/latest/pages/tutorials/' ); + + % Update links: api documentation + updatedHtmlContent = strrep(updatedHtmlContent, ... + 'https://neurodatawithoutborders.github.io/matnwb/doc/index.html', ... + 'https://matnwb.readthedocs.io/en/latest/index.html' ); + + + % Write the modified content back to the HTML file + try + fid = fopen(htmlFile, 'wt'); + if fid == -1 + error('Could not open the file for writing: %s', htmlFile); + end + fwrite(fid, updatedHtmlContent, 'char'); + fclose(fid); + catch + error('Could not write to the file: %s', htmlFile); + end +end diff --git a/tools/documentation/private/update_link_target_js.html b/tools/documentation/private/update_link_target_js.html new file mode 100644 index 00000000..27f8b8c5 --- /dev/null +++ b/tools/documentation/private/update_link_target_js.html @@ -0,0 +1,9 @@ + diff --git a/nwbtest.m b/tools/nwbtest.m similarity index 100% rename from nwbtest.m rename to tools/nwbtest.m diff --git a/+tests/+fixtures/ResetGeneratedTypesFixture.m b/tools/tests/+tests/+fixtures/ResetGeneratedTypesFixture.m similarity index 100% rename from +tests/+fixtures/ResetGeneratedTypesFixture.m rename to tools/tests/+tests/+fixtures/ResetGeneratedTypesFixture.m diff --git a/+tests/+sanity/GenerationTest.m b/tools/tests/+tests/+sanity/GenerationTest.m similarity index 100% rename from +tests/+sanity/GenerationTest.m rename to tools/tests/+tests/+sanity/GenerationTest.m diff --git a/+tests/+system/AlignedSpikeTimesUtilityTest.m b/tools/tests/+tests/+system/AlignedSpikeTimesUtilityTest.m similarity index 100% rename from +tests/+system/AlignedSpikeTimesUtilityTest.m rename to tools/tests/+tests/+system/AlignedSpikeTimesUtilityTest.m diff --git a/+tests/+system/AmendTest.m b/tools/tests/+tests/+system/AmendTest.m similarity index 100% rename from +tests/+system/AmendTest.m rename to tools/tests/+tests/+system/AmendTest.m diff --git a/+tests/+system/DynamicTableTest.m b/tools/tests/+tests/+system/DynamicTableTest.m similarity index 100% rename from +tests/+system/DynamicTableTest.m rename to tools/tests/+tests/+system/DynamicTableTest.m diff --git a/+tests/+system/ElectricalSeriesIOTest.m b/tools/tests/+tests/+system/ElectricalSeriesIOTest.m similarity index 100% rename from +tests/+system/ElectricalSeriesIOTest.m rename to tools/tests/+tests/+system/ElectricalSeriesIOTest.m diff --git a/+tests/+system/ElectrodeGroupIOTest.m b/tools/tests/+tests/+system/ElectrodeGroupIOTest.m similarity index 100% rename from +tests/+system/ElectrodeGroupIOTest.m rename to tools/tests/+tests/+system/ElectrodeGroupIOTest.m diff --git a/+tests/+system/ImagingPlaneIOTest.m b/tools/tests/+tests/+system/ImagingPlaneIOTest.m similarity index 100% rename from +tests/+system/ImagingPlaneIOTest.m rename to tools/tests/+tests/+system/ImagingPlaneIOTest.m diff --git a/+tests/+system/NWBFileIOTest.m b/tools/tests/+tests/+system/NWBFileIOTest.m similarity index 100% rename from +tests/+system/NWBFileIOTest.m rename to tools/tests/+tests/+system/NWBFileIOTest.m diff --git a/+tests/+system/NwbTestInterface.m b/tools/tests/+tests/+system/NwbTestInterface.m similarity index 100% rename from +tests/+system/NwbTestInterface.m rename to tools/tests/+tests/+system/NwbTestInterface.m diff --git a/+tests/+system/PhotonSeriesIOTest.m b/tools/tests/+tests/+system/PhotonSeriesIOTest.m similarity index 100% rename from +tests/+system/PhotonSeriesIOTest.m rename to tools/tests/+tests/+system/PhotonSeriesIOTest.m diff --git a/+tests/+system/PyNWBIOTest.m b/tools/tests/+tests/+system/PyNWBIOTest.m similarity index 100% rename from +tests/+system/PyNWBIOTest.m rename to tools/tests/+tests/+system/PyNWBIOTest.m diff --git a/+tests/+system/PyNWBIOTest.py b/tools/tests/+tests/+system/PyNWBIOTest.py similarity index 100% rename from +tests/+system/PyNWBIOTest.py rename to tools/tests/+tests/+system/PyNWBIOTest.py diff --git a/+tests/+system/RoundTripTest.m b/tools/tests/+tests/+system/RoundTripTest.m similarity index 100% rename from +tests/+system/RoundTripTest.m rename to tools/tests/+tests/+system/RoundTripTest.m diff --git a/+tests/+system/TimeSeriesIOTest.m b/tools/tests/+tests/+system/TimeSeriesIOTest.m similarity index 100% rename from +tests/+system/TimeSeriesIOTest.m rename to tools/tests/+tests/+system/TimeSeriesIOTest.m diff --git a/+tests/+system/UnitTimesIOTest.m b/tools/tests/+tests/+system/UnitTimesIOTest.m similarity index 100% rename from +tests/+system/UnitTimesIOTest.m rename to tools/tests/+tests/+system/UnitTimesIOTest.m diff --git a/+tests/+system/smokeTest.m b/tools/tests/+tests/+system/smokeTest.m similarity index 100% rename from +tests/+system/smokeTest.m rename to tools/tests/+tests/+system/smokeTest.m diff --git a/+tests/+unit/+common/ValidatorTest.m b/tools/tests/+tests/+unit/+common/ValidatorTest.m similarity index 100% rename from +tests/+unit/+common/ValidatorTest.m rename to tools/tests/+tests/+unit/+common/ValidatorTest.m diff --git a/+tests/+unit/+file/CloneNwbTest.m b/tools/tests/+tests/+unit/+file/CloneNwbTest.m similarity index 100% rename from +tests/+unit/+file/CloneNwbTest.m rename to tools/tests/+tests/+unit/+file/CloneNwbTest.m diff --git a/+tests/+unit/+io/IsBoolTest.m b/tools/tests/+tests/+unit/+io/IsBoolTest.m similarity index 100% rename from +tests/+unit/+io/IsBoolTest.m rename to tools/tests/+tests/+unit/+io/IsBoolTest.m diff --git a/+tests/+unit/+io/PathPartsTest.m b/tools/tests/+tests/+unit/+io/PathPartsTest.m similarity index 100% rename from +tests/+unit/+io/PathPartsTest.m rename to tools/tests/+tests/+unit/+io/PathPartsTest.m diff --git a/+tests/+unit/+io/SpaceTest.m b/tools/tests/+tests/+unit/+io/SpaceTest.m similarity index 100% rename from +tests/+unit/+io/SpaceTest.m rename to tools/tests/+tests/+unit/+io/SpaceTest.m diff --git a/+tests/+unit/+io/TypeConversionTest.m b/tools/tests/+tests/+unit/+io/TypeConversionTest.m similarity index 100% rename from +tests/+unit/+io/TypeConversionTest.m rename to tools/tests/+tests/+unit/+io/TypeConversionTest.m diff --git a/+tests/+unit/+io/WriteTest.m b/tools/tests/+tests/+unit/+io/WriteTest.m similarity index 100% rename from +tests/+unit/+io/WriteTest.m rename to tools/tests/+tests/+unit/+io/WriteTest.m diff --git a/+tests/+unit/+io/testCreateParsedType.m b/tools/tests/+tests/+unit/+io/testCreateParsedType.m similarity index 100% rename from +tests/+unit/+io/testCreateParsedType.m rename to tools/tests/+tests/+unit/+io/testCreateParsedType.m diff --git a/+tests/+unit/+io/testTimestampToDatetime.m b/tools/tests/+tests/+unit/+io/testTimestampToDatetime.m similarity index 100% rename from +tests/+unit/+io/testTimestampToDatetime.m rename to tools/tests/+tests/+unit/+io/testTimestampToDatetime.m diff --git a/+tests/+unit/+types/FunctionTests.m b/tools/tests/+tests/+unit/+types/FunctionTests.m similarity index 100% rename from +tests/+unit/+types/FunctionTests.m rename to tools/tests/+tests/+unit/+types/FunctionTests.m diff --git a/+tests/+unit/FunctionTests.m b/tools/tests/+tests/+unit/FunctionTests.m similarity index 100% rename from +tests/+unit/FunctionTests.m rename to tools/tests/+tests/+unit/FunctionTests.m diff --git a/+tests/+unit/PynwbTutorialTest.m b/tools/tests/+tests/+unit/PynwbTutorialTest.m similarity index 100% rename from +tests/+unit/PynwbTutorialTest.m rename to tools/tests/+tests/+unit/PynwbTutorialTest.m diff --git a/+tests/+unit/TutorialTest.m b/tools/tests/+tests/+unit/TutorialTest.m similarity index 100% rename from +tests/+unit/TutorialTest.m rename to tools/tests/+tests/+unit/TutorialTest.m diff --git a/+tests/+unit/aberrantValuesTest.m b/tools/tests/+tests/+unit/aberrantValuesTest.m similarity index 100% rename from +tests/+unit/aberrantValuesTest.m rename to tools/tests/+tests/+unit/aberrantValuesTest.m diff --git a/+tests/+unit/anonSchema/anon.anonymous.yaml b/tools/tests/+tests/+unit/anonSchema/anon.anonymous.yaml similarity index 100% rename from +tests/+unit/anonSchema/anon.anonymous.yaml rename to tools/tests/+tests/+unit/anonSchema/anon.anonymous.yaml diff --git a/+tests/+unit/anonSchema/anon.namespace.yaml b/tools/tests/+tests/+unit/anonSchema/anon.namespace.yaml similarity index 100% rename from +tests/+unit/anonSchema/anon.namespace.yaml rename to tools/tests/+tests/+unit/anonSchema/anon.namespace.yaml diff --git a/+tests/+unit/anonTest.m b/tools/tests/+tests/+unit/anonTest.m similarity index 100% rename from +tests/+unit/anonTest.m rename to tools/tests/+tests/+unit/anonTest.m diff --git a/+tests/+unit/boolSchema/bool.bools.yaml b/tools/tests/+tests/+unit/boolSchema/bool.bools.yaml similarity index 100% rename from +tests/+unit/boolSchema/bool.bools.yaml rename to tools/tests/+tests/+unit/boolSchema/bool.bools.yaml diff --git a/+tests/+unit/boolSchema/bool.namespace.yaml b/tools/tests/+tests/+unit/boolSchema/bool.namespace.yaml similarity index 100% rename from +tests/+unit/boolSchema/bool.namespace.yaml rename to tools/tests/+tests/+unit/boolSchema/bool.namespace.yaml diff --git a/+tests/+unit/boolTest.m b/tools/tests/+tests/+unit/boolTest.m similarity index 100% rename from +tests/+unit/boolTest.m rename to tools/tests/+tests/+unit/boolTest.m diff --git a/+tests/+unit/compoundSchema/cs.compoundtypes.yaml b/tools/tests/+tests/+unit/compoundSchema/cs.compoundtypes.yaml similarity index 100% rename from +tests/+unit/compoundSchema/cs.compoundtypes.yaml rename to tools/tests/+tests/+unit/compoundSchema/cs.compoundtypes.yaml diff --git a/+tests/+unit/compoundSchema/cs.namespace.yaml b/tools/tests/+tests/+unit/compoundSchema/cs.namespace.yaml similarity index 100% rename from +tests/+unit/compoundSchema/cs.namespace.yaml rename to tools/tests/+tests/+unit/compoundSchema/cs.namespace.yaml diff --git a/+tests/+unit/dataPipeTest.m b/tools/tests/+tests/+unit/dataPipeTest.m similarity index 100% rename from +tests/+unit/dataPipeTest.m rename to tools/tests/+tests/+unit/dataPipeTest.m diff --git a/+tests/+unit/dataStubTest.m b/tools/tests/+tests/+unit/dataStubTest.m similarity index 100% rename from +tests/+unit/dataStubTest.m rename to tools/tests/+tests/+unit/dataStubTest.m diff --git a/+tests/+unit/dynamicTableTest.m b/tools/tests/+tests/+unit/dynamicTableTest.m similarity index 100% rename from +tests/+unit/dynamicTableTest.m rename to tools/tests/+tests/+unit/dynamicTableTest.m diff --git a/+tests/+unit/linkTest.m b/tools/tests/+tests/+unit/linkTest.m similarity index 100% rename from +tests/+unit/linkTest.m rename to tools/tests/+tests/+unit/linkTest.m diff --git a/+tests/+unit/multipleConstrainedSchema/mcs.manyset.yaml b/tools/tests/+tests/+unit/multipleConstrainedSchema/mcs.manyset.yaml similarity index 100% rename from +tests/+unit/multipleConstrainedSchema/mcs.manyset.yaml rename to tools/tests/+tests/+unit/multipleConstrainedSchema/mcs.manyset.yaml diff --git a/+tests/+unit/multipleConstrainedSchema/mcs.namespace.yaml b/tools/tests/+tests/+unit/multipleConstrainedSchema/mcs.namespace.yaml similarity index 100% rename from +tests/+unit/multipleConstrainedSchema/mcs.namespace.yaml rename to tools/tests/+tests/+unit/multipleConstrainedSchema/mcs.namespace.yaml diff --git a/+tests/+unit/multipleConstrainedTest.m b/tools/tests/+tests/+unit/multipleConstrainedTest.m similarity index 100% rename from +tests/+unit/multipleConstrainedTest.m rename to tools/tests/+tests/+unit/multipleConstrainedTest.m diff --git a/+tests/+unit/multipleShapesSchema/mss.multishapes.yaml b/tools/tests/+tests/+unit/multipleShapesSchema/mss.multishapes.yaml similarity index 100% rename from +tests/+unit/multipleShapesSchema/mss.multishapes.yaml rename to tools/tests/+tests/+unit/multipleShapesSchema/mss.multishapes.yaml diff --git a/+tests/+unit/multipleShapesSchema/mss.namespace.yaml b/tools/tests/+tests/+unit/multipleShapesSchema/mss.namespace.yaml similarity index 100% rename from +tests/+unit/multipleShapesSchema/mss.namespace.yaml rename to tools/tests/+tests/+unit/multipleShapesSchema/mss.namespace.yaml diff --git a/+tests/+unit/multipleShapesTest.m b/tools/tests/+tests/+unit/multipleShapesTest.m similarity index 100% rename from +tests/+unit/multipleShapesTest.m rename to tools/tests/+tests/+unit/multipleShapesTest.m diff --git a/+tests/+unit/nwbExportTest.m b/tools/tests/+tests/+unit/nwbExportTest.m similarity index 100% rename from +tests/+unit/nwbExportTest.m rename to tools/tests/+tests/+unit/nwbExportTest.m diff --git a/+tests/+unit/read_nwbfile_with_pynwb.py b/tools/tests/+tests/+unit/read_nwbfile_with_pynwb.py similarity index 100% rename from +tests/+unit/read_nwbfile_with_pynwb.py rename to tools/tests/+tests/+unit/read_nwbfile_with_pynwb.py diff --git a/+tests/+unit/regionReferenceSchema/rrs.namespace.yaml b/tools/tests/+tests/+unit/regionReferenceSchema/rrs.namespace.yaml similarity index 100% rename from +tests/+unit/regionReferenceSchema/rrs.namespace.yaml rename to tools/tests/+tests/+unit/regionReferenceSchema/rrs.namespace.yaml diff --git a/+tests/+unit/regionReferenceSchema/rrs.regref.yaml b/tools/tests/+tests/+unit/regionReferenceSchema/rrs.regref.yaml similarity index 100% rename from +tests/+unit/regionReferenceSchema/rrs.regref.yaml rename to tools/tests/+tests/+unit/regionReferenceSchema/rrs.regref.yaml diff --git a/+tests/+unit/regionViewTest.m b/tools/tests/+tests/+unit/regionViewTest.m similarity index 100% rename from +tests/+unit/regionViewTest.m rename to tools/tests/+tests/+unit/regionViewTest.m diff --git a/+tests/+unit/searchTest.m b/tools/tests/+tests/+unit/searchTest.m similarity index 100% rename from +tests/+unit/searchTest.m rename to tools/tests/+tests/+unit/searchTest.m diff --git a/+tests/+unit/untypedSetTest.m b/tools/tests/+tests/+unit/untypedSetTest.m similarity index 100% rename from +tests/+unit/untypedSetTest.m rename to tools/tests/+tests/+unit/untypedSetTest.m diff --git a/+tests/+util/addFolderToPythonPath.m b/tools/tests/+tests/+util/addFolderToPythonPath.m similarity index 100% rename from +tests/+util/addFolderToPythonPath.m rename to tools/tests/+tests/+util/addFolderToPythonPath.m diff --git a/+tests/+util/getProjectDirectory.m b/tools/tests/+tests/+util/getProjectDirectory.m similarity index 100% rename from +tests/+util/getProjectDirectory.m rename to tools/tests/+tests/+util/getProjectDirectory.m diff --git a/+tests/+util/getPythonPath.m b/tools/tests/+tests/+util/getPythonPath.m similarity index 100% rename from +tests/+util/getPythonPath.m rename to tools/tests/+tests/+util/getPythonPath.m diff --git a/+tests/+util/verifyContainerEqual.m b/tools/tests/+tests/+util/verifyContainerEqual.m similarity index 100% rename from +tests/+util/verifyContainerEqual.m rename to tools/tests/+tests/+util/verifyContainerEqual.m diff --git a/+tests/+util/verifySetEqual.m b/tools/tests/+tests/+util/verifySetEqual.m similarity index 100% rename from +tests/+util/verifySetEqual.m rename to tools/tests/+tests/+util/verifySetEqual.m diff --git a/+tests/requirements.txt b/tools/tests/requirements.txt similarity index 100% rename from +tests/requirements.txt rename to tools/tests/requirements.txt diff --git a/tutorials/html/behavior.html b/tutorials/html/behavior.html deleted file mode 100644 index 4f70f592..00000000 --- a/tutorials/html/behavior.html +++ /dev/null @@ -1,369 +0,0 @@ - -Behavior Data

Behavior Data

This tutorial will guide you in writing behavioral data to NWB.

Creating an NWB File

Create an NWBFile object with the required fields (session_description, identifier, and session_start_time) and additional metadata.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'general_experimenter', 'My Name', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', 'DOI:10.1016/j.neuron.2016.12.011'); % optional
nwb
nwb =
NwbFile with properties: - - nwb_version: '2.7.0' - file_create_date: [] - identifier: 'Mouse5_Day3' - session_description: 'mouse in open exploration' - session_start_time: {[2018-04-25T02:30:03.000000+02:00]} - timestamps_reference_time: [] - acquisition: [0×1 types.untyped.Set] - analysis: [0×1 types.untyped.Set] - general: [0×1 types.untyped.Set] - general_data_collection: '' - general_devices: [0×1 types.untyped.Set] - general_experiment_description: '' - general_experimenter: 'My Name' - general_extracellular_ephys: [0×1 types.untyped.Set] - general_extracellular_ephys_electrodes: [] - general_institution: 'University of My Institution' - general_intracellular_ephys: [0×1 types.untyped.Set] - general_intracellular_ephys_experimental_conditions: [] - general_intracellular_ephys_filtering: '' - general_intracellular_ephys_intracellular_recordings: [] - general_intracellular_ephys_repetitions: [] - general_intracellular_ephys_sequential_recordings: [] - general_intracellular_ephys_simultaneous_recordings: [] - general_intracellular_ephys_sweep_table: [] - general_keywords: '' - general_lab: '' - general_notes: '' - general_optogenetics: [0×1 types.untyped.Set] - general_optophysiology: [0×1 types.untyped.Set] - general_pharmacology: '' - general_protocol: '' - general_related_publications: 'DOI:10.1016/j.neuron.2016.12.011' - general_session_id: 'session_1234' - general_slices: '' - general_source_script: '' - general_source_script_file_name: '' - general_stimulus: '' - general_subject: [] - general_surgery: '' - general_virus: '' - intervals: [0×1 types.untyped.Set] - intervals_epochs: [] - intervals_invalid_times: [] - intervals_trials: [] - processing: [0×1 types.untyped.Set] - scratch: [0×1 types.untyped.Set] - stimulus_presentation: [0×1 types.untyped.Set] - stimulus_templates: [0×1 types.untyped.Set] - units: [] - -Warning: The following required properties are missing for instance for type "NwbFile": - timestamps_reference_time

SpatialSeries: Storing continuous spatial data

SpatialSeries is a subclass of TimeSeries that represents data in space, such as the spatial direction e.g., of gaze or travel or position of an animal over time.
Create data that corresponds to x, y position over time.
position_data = [linspace(0, 10, 50); linspace(0, 8, 50)]; % 2 x nT array
In SpatialSeries data, the first dimension is always time (in seconds), the second dimension represents the x, y position. However, as described in the dimensionMapNoDataPipes tutorial, when a MATLAB array is exported to HDF5, the array is transposed. Therefore, in order to correctly export the data, in MATLAB the last dimension of an array should be time. SpatialSeries data should be stored as one continuous stream as it is acquired, not by trials as is often reshaped for analysis. Data can be trial-aligned on-the-fly using the trials table. See the trials tutorial for further information.
For position data reference_frame indicates the zero-position, e.g. the 0,0 point might be the bottom-left corner of an enclosure, as viewed from the tracking camera.
timestamps = linspace(0, 50, 50)/ 200;
position_spatial_series = types.core.SpatialSeries( ...
'description', 'Postion (x, y) in an open field.', ...
'data', position_data, ...
'timestamps', timestamps, ...
'reference_frame', '(0,0) is the bottom left corner.' ...
)
position_spatial_series =
SpatialSeries with properties: - - reference_frame: '(0,0) is the bottom left corner.' - starting_time_unit: 'seconds' - timestamps_interval: 1 - timestamps_unit: 'seconds' - data: [2×50 double] - comments: 'no comments' - control: [] - control_description: '' - data_continuity: '' - data_conversion: 1 - data_offset: 0 - data_resolution: -1 - data_unit: 'meters' - description: 'Postion (x, y) in an open field.' - starting_time: [] - starting_time_rate: [] - timestamps: [0 0.0051 0.0102 0.0153 0.0204 0.0255 0.0306 0.0357 0.0408 0.0459 0.0510 0.0561 0.0612 0.0663 0.0714 0.0765 0.0816 0.0867 0.0918 0.0969 0.1020 0.1071 0.1122 0.1173 0.1224 0.1276 0.1327 0.1378 0.1429 0.1480 0.1531 … ] (1×50 double) -

Position: Storing position measured over time

To help data analysis and visualization tools know that this SpatialSeries object represents the position of the subject, store the SpatialSeries object inside a Position object, which can hold one or more SpatialSeries objects.
position = types.core.Position();
position.spatialseries.set('SpatialSeries', position_spatial_series);

Create a Behavior Processing Module

Create a processing module called "behavior" for storing behavioral data in the NWBFile, then add the Position object to the processing module.
behavior_processing_module = types.core.ProcessingModule('description', 'stores behavioral data.');
behavior_processing_module.nwbdatainterface.set("Position", position);
nwb.processing.set("behavior", behavior_processing_module);

CompassDirection: Storing view angle measured over time

Analogous to how position can be stored, we can create a SpatialSeries object for representing the view angle of the subject.
For direction data reference_frame indicates the zero direction, for instance in this case "straight ahead" is 0 radians.
view_angle_data = linspace(0, 4, 50);
direction_spatial_series = types.core.SpatialSeries( ...
'description', 'View angle of the subject measured in radians.', ...
'data', view_angle_data, ...
'timestamps', timestamps, ...
'reference_frame', 'straight ahead', ...
'data_unit', 'radians' ...
);
direction = types.core.CompassDirection();
direction.spatialseries.set('spatial_series', direction_spatial_series);
We can add a CompassDirection object to the behavior processing module the same way we have added the position data.
%behavior_processing_module = types.core.ProcessingModule("stores behavioral data."); % if you have not already created it
behavior_processing_module.nwbdatainterface.set('CompassDirection', direction);
%nwb.processing.set('behavior', behavior_processing_module); % if you have not already added it

BehaviorTimeSeries: Storing continuous behavior data

BehavioralTimeSeries is an interface for storing continuous behavior data, such as the speed of a subject.
speed_data = linspace(0, 0.4, 50);
 
speed_time_series = types.core.TimeSeries( ...
'data', speed_data, ...
'starting_time', 1.0, ... % NB: Important to set starting_time when using starting_time_rate
'starting_time_rate', 10.0, ... % Hz
'description', 'he speed of the subject measured over time.', ...
'data_unit', 'm/s' ...
);
 
behavioral_time_series = types.core.BehavioralTimeSeries();
behavioral_time_series.timeseries.set('speed', speed_time_series);
 
%behavior_processing_module = types.core.ProcessingModule("stores behavioral data."); % if you have not already created it
behavior_processing_module.nwbdatainterface.set('BehavioralTimeSeries', behavioral_time_series);
%nwb.processing.set('behavior', behavior_processing_module); % if you have not already added it

BehavioralEvents: Storing behavioral events

BehavioralEvents is an interface for storing behavioral events. We can use it for storing the timing and amount of rewards (e.g. water amount) or lever press times.
reward_amount = [1.0, 1.5, 1.0, 1.5];
event_timestamps = [1.0, 2.0, 5.0, 6.0];
 
time_series = types.core.TimeSeries( ...
'data', reward_amount, ...
'timestamps', event_timestamps, ...
'description', 'The water amount the subject received as a reward.', ...
'data_unit', 'ml' ...
);
 
behavioral_events = types.core.BehavioralEvents();
behavioral_events.timeseries.set('lever_presses', time_series);
 
%behavior_processing_module = types.core.ProcessingModule("stores behavioral data."); % if you have not already created it
behavior_processing_module.nwbdatainterface.set('BehavioralEvents', behavioral_events);
%nwb.processing.set('behavior', behavior_processing_module); % if you have not already added it
Storing only the timestamps of the events is possible with the ndx-events NWB extension. You can also add labels associated with the events with this extension. You can find information about installation and example usage here.

BehavioralEpochs: Storing intervals of behavior data

BehavioralEpochs is for storing intervals of behavior data. BehavioralEpochs uses IntervalSeries to represent the time intervals. Create an IntervalSeries object that represents the time intervals when the animal was running. IntervalSeries uses 1 to indicate the beginning of an interval and -1 to indicate the end.
run_intervals = types.core.IntervalSeries( ...
'description', 'Intervals when the animal was running.', ...
'data', [1, -1, 1, -1, 1, -1], ...
'timestamps', [0.5, 1.5, 3.5, 4.0, 7.0, 7.3] ...
);
 
behavioral_epochs = types.core.BehavioralEpochs();
behavioral_epochs.intervalseries.set('running', run_intervals);
You can add more than one IntervalSeries to a BehavioralEpochs object.
sleep_intervals = types.core.IntervalSeries( ...
'description', 'Intervals when the animal was sleeping', ...
'data', [1, -1, 1, -1], ...
'timestamps', [15.0, 30.0, 60.0, 95.0] ...
);
behavioral_epochs.intervalseries.set('sleeping', sleep_intervals);
 
% behavior_processing_module = types.core.ProcessingModule("stores behavioral data.");
% behavior_processing_module.nwbdatainterface.set('BehavioralEvents', behavioral_events);
% nwb.processing.set('behavior', behavior_processing_module);

Another approach: TimeIntervals

Using TimeIntervals to represent time intervals is often preferred over BehavioralEpochs and IntervalSeries. TimeIntervals is a subclass of DynamicTable, which offers flexibility for tabular data by allowing the addition of optional columns which are not defined in the standard DynamicTable class.
sleep_intervals = types.core.TimeIntervals( ...
'description', 'Intervals when the animal was sleeping.', ...
'colnames', {'start_time', 'stop_time', 'stage'} ...
);
 
sleep_intervals.addRow('start_time', 0.3, 'stop_time', 0.35, 'stage', 1);
sleep_intervals.addRow('start_time', 0.7, 'stop_time', 0.9, 'stage', 2);
sleep_intervals.addRow('start_time', 1.3, 'stop_time', 3.0, 'stage', 3);
 
nwb.intervals.set('sleep_intervals', sleep_intervals);

EyeTracking: Storing continuous eye-tracking data of gaze direction

EyeTracking is for storing eye-tracking data which represents direction of gaze as measured by an eye tracking algorithm. An EyeTracking object holds one or more SpatialSeries objects that represent the gaze direction over time extracted from a video.
eye_position_data = [linspace(-20, 30, 50); linspace(30, -20, 50)];
 
right_eye_position = types.core.SpatialSeries( ...
'description', 'The position of the right eye measured in degrees.', ...
'data', eye_position_data, ...
'starting_time', 1.0, ... % NB: Important to set starting_time when using starting_time_rate
'starting_time_rate', 50.0, ... % Hz
'reference_frame', '(0,0) is middle', ...
'data_unit', 'degrees' ...
);
 
left_eye_position = types.core.SpatialSeries( ...
'description', 'The position of the right eye measured in degrees.', ...
'data', eye_position_data, ...
'starting_time', 1.0, ... % NB: Important to set starting_time when using starting_time_rate
'starting_time_rate', 50.0, ... % Hz
'reference_frame', '(0,0) is middle', ...
'data_unit', 'degrees' ...
);
 
eye_tracking = types.core.EyeTracking();
eye_tracking.spatialseries.set('right_eye_position', right_eye_position);
eye_tracking.spatialseries.set('left_eye_position', left_eye_position);
 
% behavior_processing_module = types.core.ProcessingModule("stores behavioral data.");
behavior_processing_module.nwbdatainterface.set('EyeTracking', eye_tracking);
% nwb.processing.set('behavior', behavior_processing_module);

PupilTracking: Storing continuous eye-tracking data of pupil size

PupilTracking is for storing eye-tracking data which represents pupil size. PupilTracking holds one or more TimeSeries objects that can represent different features such as the dilation of the pupil measured over time by a pupil tracking algorithm.
pupil_diameter = types.core.TimeSeries( ...
'description', 'Pupil diameter extracted from the video of the right eye.', ...
'data', linspace(0.001, 0.002, 50), ...
'starting_time', 1.0, ... % NB: Important to set starting_time when using starting_time_rate
'starting_time_rate', 20.0, ... % Hz
'data_unit', 'meters' ...
);
 
pupil_tracking = types.core.PupilTracking();
pupil_tracking.timeseries.set('pupil_diameter', pupil_diameter);
 
% behavior_processing_module = types.core.ProcessingModule("stores behavioral data.");
behavior_processing_module.nwbdatainterface.set('PupilTracking', pupil_tracking);
% nwb.processing.set('behavior', behavior_processing_module);

Writing the behavior data to an NWB file

All of the above commands build an NWBFile object in-memory. To write this file, use nwbExport.
% Save to tutorials/tutorial_nwb_files folder
nwbFilePath = misc.getTutorialNwbFilePath('behavior_tutorial.nwb');
nwbExport(nwb, nwbFilePath);
fprintf('Exported NWB file to "%s"\n', 'behavior_tutorial.nwb')
Exported NWB file to "behavior_tutorial.nwb"
-
- -
\ No newline at end of file diff --git a/tutorials/html/dynamic_tables.html b/tutorials/html/dynamic_tables.html deleted file mode 100644 index af3f6c88..00000000 --- a/tutorials/html/dynamic_tables.html +++ /dev/null @@ -1,577 +0,0 @@ - -DynamicTables Tutorial

DynamicTables Tutorial

This is a user guide to interacting with DynamicTable objects in MatNWB.

MatNWB Setup

Start by setting up your MATLAB workspace. The code below adds the directory containing the MatNWB package to the MATLAB search path. MatNWB works by automatically creating API classes based on a defined schema.
%{
path_to_matnwb = '~/Repositories/matnwb'; % change to your own path location
addpath(genpath(pwd));
%}

Constructing a table with initialized columns

The DynamicTable class represents a column-based table to which you can add custom columns. It consists of a description, a list of columns , and a list of row IDs. You can create a DynamicTable by first defining the VectorData objects that will make up the columns of the table. Each VectorData object must contain the same number of rows. A list of rows IDs may be passed to the DynamicTable using the id argument. Row IDs are a useful way to access row information independent of row location index. The list of row IDs must be cast as an ElementIdentifiers object before being passed to the DynamicTable object. If no value is passed to id, an ElementIdentifiers object with 0-indexed row IDs will be created for you automatically.
MATLAB Syntax Note: Using column vectors is crucial to properly build vectors and tables. When defining individual values, make sure to use semi-colon (;) instead of instead of comma (,) when defining the data fields of these.
col1 = types.hdmf_common.VectorData( ...
'description', 'column #1', ...
'data', [1;2] ...
);
 
col2 = types.hdmf_common.VectorData( ...
'description', 'column #2', ...
'data', {'a';'b'} ...
);
 
my_table = types.hdmf_common.DynamicTable( ...
'description', 'an example table', ...
'colnames', {'col1', 'col2'}, ...
'col1', col1, ...
'col2', col2, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0;1]) ... % 0-indexed, for compatibility with Python
);
my_table
my_table =
DynamicTable with properties: - - id: [1×1 types.hdmf_common.ElementIdentifiers] - colnames: {'col1' 'col2'} - description: 'an example table' - vectordata: [2×1 types.untyped.Set] -

Adding rows

You can add rows to an existing DynamicTable using the object's addRow method. One way of using this method is to pass in the names of columns as parameter names followed by the elements to append. The class of the elements of the column must match the elements to append.
my_table.addRow('col1', 3, 'col2', {'c'}, 'id', 2);

Adding columns

You can add new columns to an existing DynamicTable object using the addColumn method. One way of using this method is to pass in the names of each new column followed by the corresponding values for each new column. The height of the new columns must match the height of the table.
col3 = types.hdmf_common.VectorData('description', 'column #3', ...
'data', [100; 200; 300]);
col4 = types.hdmf_common.VectorData('description', 'column #4', ...
'data', {'a1'; 'b2'; 'c3'});
 
my_table.addColumn('col3', col3,'col4', col4);

Create MATLAB table and convert to dynamic table

As an alternative to building a dynamic table using the DynamicTable and VectorData data types, it is also possible to create a MATLAB table and convert it to a dynamic table. Lets create the same table as before, but using MATLAB's table class:
% Create a table with two variables (columns):
T = table([1;2], {'a';'b'}, 'VariableNames', {'col1', 'col2'});
T.Properties.VariableDescriptions = {'column #1', 'column #2'};

Adding rows

T(end+1, :) = {3, 'c'};

Adding variables (columns)

T = addvars(T, [100;200;300], 'NewVariableNames',{'col3'});
T.Properties.VariableDescriptions{3} = 'column #3';
 
% Alternatively, a new variable can be added directly using dot syntax.
T.col4 = {'a1'; 'b2'; 'c3'};
T.Properties.VariableDescriptions{4} = 'column #4';
T
T = 3×4 table
 col1col2col3col4
11'a'100'a1'
22'b'200'b2'
33'c'300'c3'

Convert to dynamic table

dynamic_table = util.table2nwb(T, 'A MATLAB table that was converted to a dynamic table')
dynamic_table =
DynamicTable with properties: - - id: [1×1 types.hdmf_common.ElementIdentifiers] - colnames: {'col1' 'col2' 'col3' 'col4'} - description: 'A MATLAB table that was converted to a dynamic table' - vectordata: [4×1 types.untyped.Set] -

Enumerated (categorical) data

EnumData is a special type of column for storing an enumerated data type. This way each unique value is stored once, and the data references those values by index. Using this method is more efficient than storing a single value many times, and has the advantage of communicating to downstream tools that the data is categorical in nature.

Warning Regarding EnumData

EnumData is currently an experimental feature and as such should not be used in a production environment.
CellTypeElements = types.hdmf_common.VectorData(...
'description', 'fixed set of elements referenced by cell_type' ...
, 'data', {'aa', 'bb', 'cc'} ... % the enumerated elements
);
CellType = types.hdmf_experimental.EnumData( ...
'description', 'this column holds categorical variables' ... % properties derived from VectorData
, 'data', [0, 1, 2, 1, 0] ... % zero-indexed offset to elements.
, 'elements', types.untyped.ObjectView(CellTypeElements) ...
);
 
MyTable = types.hdmf_common.DynamicTable('description', 'an example table');
MyTable.vectordata.set('cell_type_elements', CellTypeElements); % the *_elements format is required for compatibility with pynwb
MyTable.addColumn('cell_type', CellType);

Ragged array columns

A table column with a different number of elements for each row is called a "ragged array column." To define a table with a ragged array column, pass both the VectorData and the corresponding VectorIndex as columns of the DynamicTable object. The VectorData columns will contain the data values. The VectorIndex column serves to indicate how to arrange the data across rows. By convention the VectorIndex object corresponding to a particular column must have have the same name with the addition of the '_index' suffix.
Below, the VectorIndex values indicate to place the 1st to 3rd (inclusive) elements of the VectorData into the first row and 4th element into the second row. The resulting table will have the cell {'1a'; '1b'; '1c'} in the first row and the cell {'2a'} in the second row.
 
col1 = types.hdmf_common.VectorData( ...
'description', 'column #1', ...
'data', {'1a'; '1b'; '1c'; '2a'} ...
);
 
col1_index = types.hdmf_common.VectorIndex( ...
'description', 'column #1 index', ...
'target',types.untyped.ObjectView(col1), ... % object view of target column
'data', [3; 4] ...
);
 
table_ragged_col = types.hdmf_common.DynamicTable( ...
'description', 'an example table', ...
'colnames', {'col1'}, ...
'col1', col1, ...
'col1_index', col1_index, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0; 1]) ... % 0-indexed, for compatibility with Python
);

Adding ragged array rows

You can add a new row to the ragged array column. Under the hood, the addRow method will add the appropriate value to the VectorIndex column to maintain proper formatting.
table_ragged_col.addRow('col1', {'3a'; '3b'; '3c'}, 'id', 2);

Accessing row elements

You can access data from entire rows of a DynamicTable object by calling the getRow method for the corresponding object. You can supply either an individual row number or a list of row numbers.
my_table.getRow(1)
ans = 1×4 table
 col1col2col3col4
11'a'100'a1'
If you want to access values for just a subset of columns you can pass in the 'columns' argument along with a cell array with the desired column names
my_table.getRow(1:3, 'columns', {'col1'})
ans = 3×1 table
 col1
11
22
33
You can also access specific rows by their corresponding row ID's, if they have been defined, by supplying a 'true' Boolean to the 'useId' parameter
my_table.getRow(1, 'useId', true)
ans = 1×4 table
 col1col2col3col4
12'b'200'b2'
For a ragged array columns, the getRow method will return a cell with different number of elements for each row
table_ragged_col.getRow(1:2)
ans = 2×1 table
 col1
1[{'1a'};{'1b'};{'1c'}]
21×1 cell

Accessing column elements

To access all rows from a particular column use the .get method on the vectordata field of the DynamicTable object
 
my_table.vectordata.get('col2').data
ans = 3×1 cell
'a'
'b'
'c'

Referencing rows of other tables

You can create a column that references rows of other tables by adding a DynamicTableRegion object as a column of a DynamicTable. This is analogous to a foreign key in a relational database. The DynamicTableRegion class takes in an ObjectView object as argument. ObjectView objects create links from one object type referencing another.
dtr_col = types.hdmf_common.DynamicTableRegion( ...
'description', 'references multiple rows of earlier table', ...
'data', [0; 1; 1; 0], ... # 0-indexed
'table',types.untyped.ObjectView(my_table) ... % object view of target table
);
 
data_col = types.hdmf_common.VectorData( ...
'description', 'data column', ...
'data', {'a'; 'b'; 'c'; 'd'} ...
);
 
dtr_table = types.hdmf_common.DynamicTable( ...
'description', 'test table with DynamicTableRegion', ...
'colnames', {'data_col', 'dtr_col'}, ...
'dtr_col', dtr_col, ...
'data_col',data_col, ...
'id',types.hdmf_common.ElementIdentifiers('data', [0; 1; 2; 3]) ...
);

Converting a DynamicTable to a MATLAB table

You can convert a DynamicTable object to a MATLAB table by making use of the object's toTable method. This is a useful way to view the whole table in a human-readable format.
my_table.toTable()
ans = 3×5 table
 idcol1col2col3col4
101'a'100'a1'
212'b'200'b2'
323'c'300'c3'
When the DynamicTable object contains a column that references other tables, you can pass in a Boolean to indicate whether to include just the row indices of the referenced table. Passing in false will result in inclusion of the referenced rows as nested tables.
dtr_table.toTable(false)
ans = 4×3 table
 iddata_coldtr_col
10'a'1×4 table
21'b'1×4 table
32'c'1×4 table
43'd'1×4 table

Creating an expandable table

When using the default HDF5 backend, each column of these tables is an HDF5 Dataset, which by default are set to an unchangeable size. This means that once a file is written, it is not possible to add a new row. If you want to be able to save this file, load it, and add more rows to the table, you will need to set this up when you create the VectorData and ElementIdentifiers columns of a DynamicTable. Specifically, you must wrap the column data with a DataPipe object. The DataPipe class takes in maxSize and axis as arguments to indicate the maximum desired size for each axis and the axis to which to append to, respectively. For example, creating a DataPipe object with a maxSize value equal to [Inf, 1] indicates that the number of rows may increase indifinetely. In contrast, setting maxSize equal to [8, 1] would allow the column to grow to a maximum height of 8.
% create NwbFile object with required fields
file= NwbFile( ...
'session_start_time', datetime('2021-01-01 00:00:00', 'TimeZone', 'local'), ...
'identifier', 'ident1', ...
'session_description', 'ExpandableTableTutorial' ...
);
 
% create VectorData objects with DataPipe objects
start_time_exp = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', [1, 2], ... # data must be numerical
'maxSize', Inf ...
) ...
);
 
stop_time_exp = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', [2, 3], ... #data must be numerical
'maxSize', Inf ...
) ...
);
 
random_exp = types.hdmf_common.VectorData( ...
'description', 'random data column', ...
'data', types.untyped.DataPipe( ...
'data', rand(5, 2), ... #data must be numerical
'maxSize', [5, Inf], ...
'axis', 2 ...
) ...
);
 
ids_exp = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', int32([0; 1]), ... # data must be numerical
'maxSize', Inf ...
) ...
);
% create expandable table
colnames = {'start_time', 'stop_time', 'randomvalues'};
file.intervals_trials = types.core.TimeIntervals( ...
'description', 'test expdandable dynamic table', ...
'colnames', colnames, ...
'start_time', start_time_exp, ...
'stop_time', stop_time_exp, ...
'randomvalues', random_exp, ...
'id', ids_exp ...
);
% export file
nwbExport(file, 'expandableTableTestFile.nwb');
Now, you can read in the file, add more rows, and save again to file
readFile = nwbRead('expandableTableTestFile.nwb', 'ignorecache');
readFile.intervals_trials.addRow( ...
'start_time', 3, ...
'stop_time', 4, ...
'randomvalues', rand(5,1), ...
'id', 2 ...
)
nwbExport(readFile, 'expandableTableTestFile.nwb');
Note: DataPipe objects change how the dimension of the datasets for each column map onto the shape of HDF5 datasets. See README for more details.

Multidimensional Columns

The order of dimensions of multidimensional columns in MatNWB is reversed relative to the Python HDMF package (see README for detailed explanation). Therefore, the height of a multidimensional column belonging to a DynamicTable object is defined by the shape of its last dimension. A valid DynamicTable must have matched height across columns.

Constructing multidimensional columns

% Define 1D column
simple_col = types.hdmf_common.VectorData( ...
'description', '1D column',...
'data', rand(10,1) ...
);
% Define ND column
multi_col = types.hdmf_common.VectorData( ...
'description', 'multidimensional column',...
'data', rand(3,2,10) ...
);
% construct table
multi_dim_table = types.hdmf_common.DynamicTable( ...
'description','test table', ...
'colnames', {'simple','multi'}, ...
'simple', simple_col, ...
'multi', multi_col, ...
'id', types.hdmf_common.ElementIdentifiers('data', (0:9)') ... % 0-indexed, for compatibility with Python
);
 

Multidimensional ragged array columns

DynamicTable objects with multidimensional ragged array columns can be constructed by passing in the corresponding VectorIndex column
% Define column with data
multi_ragged_col = types.hdmf_common.VectorData( ...
'description', 'multidimensional ragged array column',...
'data', rand(2,3,5) ...
);
% Define column with VectorIndex
multi_ragged_index = types.hdmf_common.VectorIndex( ...
'description', 'index to multi_ragged_col', ...
'target', types.untyped.ObjectView(multi_ragged_col),'data', [2; 3; 5] ...
);
 
multi_ragged_table = types.hdmf_common.DynamicTable( ...
'description','test table', ...
'colnames', {'multi_ragged'}, ...
'multi_ragged', multi_ragged_col, ...
'multi_ragged_index', multi_ragged_index, ...
'id', types.hdmf_common.ElementIdentifiers('data', [0; 1; 2]) ... % 0-indexed, for compatibility with Python
);

Adding rows to multidimensional array columns

DynamicTable objects with multidimensional array columns can also be constructed by adding a single row at a time. This method makes use of DataPipe objects due to the fact that MATLAB doesn't support singleton dimensions for arrays with more than 2 dimensions. The code block below demonstrates how to build a DynamicTable object with a mutidimensional raaged array column in this manner.
% Create file
file = NwbFile( ...
'session_start_time', datetime('2021-01-01 00:00:00', 'TimeZone', 'local'), ...
'identifier', 'ident1', ...
'session_description', 'test_file' ...
);
 
% Define Vector Data Objects with first row of table
start_time_exp = types.hdmf_common.VectorData( ...
'description', 'start times column', ...
'data', types.untyped.DataPipe( ...
'data', 1, ...
'maxSize', Inf ...
) ...
);
stop_time_exp = types.hdmf_common.VectorData( ...
'description', 'stop times column', ...
'data', types.untyped.DataPipe( ...
'data', 10, ...
'maxSize', Inf ...
) ...
);
random_exp = types.hdmf_common.VectorData( ...
'description', 'random data column', ...
'data', types.untyped.DataPipe( ...
'data', rand(3,2,5), ... #random data
'maxSize', [3, 2, Inf], ...
'axis', 3 ...
) ...
);
random_exp_index = types.hdmf_common.VectorIndex( ...
'description', 'index to random data column', ...
'target',types.untyped.ObjectView(random_exp), ...
'data', types.untyped.DataPipe( ...
'data', uint64(5), ...
'maxSize', Inf ...
) ...
);
ids_exp = types.hdmf_common.ElementIdentifiers( ...
'data', types.untyped.DataPipe( ...
'data', int64(0), ... # data must be numerical
'maxSize', Inf ...
) ...
);
% Create expandable table
colnames = {'start_time', 'stop_time', 'randomvalues'};
file.intervals_trials = types.core.TimeIntervals( ...
'description', 'test expdandable dynamic table', ...
'colnames', colnames, ...
'start_time', start_time_exp, ...
'stop_time', stop_time_exp, ...
'randomvalues', random_exp, ...
'randomvalues_index', random_exp_index, ...
'id', ids_exp ...
);
% Export file
nwbExport(file, 'multiRaggedExpandableTableTest.nwb');
% Read in file
read_file = nwbRead('multiRaggedExpandableTableTest.nwb', 'ignorecache');
% add individual rows
read_file.intervals_trials.addRow( ...
'start_time', 2, ...
'stop_time', 20, ...
'randomvalues', rand(3,2,6), ...
'id', 1 ...
);
read_file.intervals_trials.addRow( ...
'start_time', 3, ...
'stop_time', 30, ...
'randomvalues', rand(3,2,3), ...
'id', 2 ...
);
read_file.intervals_trials.addRow( ...
'start_time', 4, ...
'stop_time', 40, ...
'randomvalues', rand(3,2,8), ...
'id', 3 ...
);
 

Learn More!

Python Tutorial

-
- -
\ No newline at end of file diff --git a/tutorials/html/ecephys.html b/tutorials/html/ecephys.html deleted file mode 100644 index 6b51b3ff..00000000 --- a/tutorials/html/ecephys.html +++ /dev/null @@ -1,1502 +0,0 @@ - -Neurodata Without Borders Extracellular Electrophysiology Tutorial

Neurodata Without Borders Extracellular Electrophysiology Tutorial

About This Tutorial

This tutorial describes storage of hypothetical data from extracellular electrophysiology experiments in NWB for the following data categories:
  • Raw voltage recording
  • Local field potential (LFP) and filtered electrical signals
  • Spike times

Before You Begin

It is recommended to first work through the Introduction to MatNWB tutorial, which demonstrates installing MatNWB and creating an NWB file with subject information, animal position, and trials, as well as writing and reading NWB files in MATLAB.
Important: The dimensions of timeseries data in MatNWB should be defined in the opposite order of how it is defined in the nwb-schemas. In NWB, time is always stored in the first dimension of the data, whereas in MatNWB time should be stored in the last dimension of the data. This is explained in more detail here: MatNWB <-> HDF5 Dimension Mapping.

Setting up the NWB File

An NWB file represents a single session of an experiment. Each file must have a session_description, identifier, and session_start_time. Create a new NWBFile object these required fields along with any additional metadata. In MatNWB, arguments are specified using MATLAB's keyword argument pair convention, where each argument name is followed by its value.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'timestamps_reference_time', datetime(2018, 4, 25, 3, 0, 45, 'TimeZone', 'local'), ...
'general_experimenter', 'Last Name, First Name', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', {'DOI:10.1016/j.neuron.2016.12.011'}); % optional
nwb
nwb =
NwbFile with properties: - - nwb_version: '2.8.0' - file_create_date: [] - identifier: 'Mouse5_Day3' - session_description: 'mouse in open exploration' - session_start_time: {[2018-04-25T02:30:03.000000+02:00]} - timestamps_reference_time: {[2018-04-25T03:00:45.000000+02:00]} - acquisition: [0×1 types.untyped.Set] - analysis: [0×1 types.untyped.Set] - general: [0×1 types.untyped.Set] - general_data_collection: '' - general_devices: [0×1 types.untyped.Set] - general_experiment_description: '' - general_experimenter: 'Last Name, First Name' - general_extracellular_ephys: [0×1 types.untyped.Set] - general_extracellular_ephys_electrodes: [] - general_institution: 'University of My Institution' - general_intracellular_ephys: [0×1 types.untyped.Set] - general_intracellular_ephys_experimental_conditions: [] - general_intracellular_ephys_filtering: '' - general_intracellular_ephys_intracellular_recordings: [] - general_intracellular_ephys_repetitions: [] - general_intracellular_ephys_sequential_recordings: [] - general_intracellular_ephys_simultaneous_recordings: [] - general_intracellular_ephys_sweep_table: [] - general_keywords: '' - general_lab: '' - general_notes: '' - general_optogenetics: [0×1 types.untyped.Set] - general_optophysiology: [0×1 types.untyped.Set] - general_pharmacology: '' - general_protocol: '' - general_related_publications: {'DOI:10.1016/j.neuron.2016.12.011'} - general_session_id: 'session_1234' - general_slices: '' - general_source_script: '' - general_source_script_file_name: '' - general_stimulus: '' - general_subject: [] - general_surgery: '' - general_virus: '' - general_was_generated_by: '' - intervals: [0×1 types.untyped.Set] - intervals_epochs: [] - intervals_invalid_times: [] - intervals_trials: [] - processing: [0×1 types.untyped.Set] - scratch: [0×1 types.untyped.Set] - stimulus_presentation: [0×1 types.untyped.Set] - stimulus_templates: [0×1 types.untyped.Set] - units: [] -

Electrode Information

In order to store extracellular electrophysiology data, you first must create an electrodes table describing the electrodes that generated this data. Extracellular electrodes are stored in an electrodes table, which is also a DynamicTable. electrodes has several required fields: x, y, z, impedance, location, filtering, and electrode_group.
The electrodes table references a required ElectrodeGroup, which is used to represent a group of electrodes. Before creating an ElectrodeGroup, you must define a Device object. The fields description, manufacturer, model_number, model_name, and serial_number are optional, but recommended.
device = types.core.Device(...
'description', 'A 12-channel array with 4 shanks and 3 channels per shank', ...
'manufacturer', 'Array Technologies', ...
'model_number', 'PRB_1_4_0480_123', ...
'model_name', 'Neurovoxels 0.99', ...
'serial_number', '1234567890' ...
);
 
% Add device to nwb object
nwb.general_devices.set('array', device);

Electrodes Table

Since this is a DynamicTable, we can add additional metadata fields. We will be adding a "label" column to the table.
numShanks = 4;
numChannelsPerShank = 3;
numChannels = numShanks * numChannelsPerShank;
 
electrodesDynamicTable = types.hdmf_common.DynamicTable(...
'colnames', {'location', 'group', 'group_name', 'label'}, ...
'description', 'all electrodes');
 
for iShank = 1:numShanks
shankGroupName = sprintf('shank%d', iShank);
electrodeGroup = types.core.ElectrodeGroup( ...
'description', sprintf('electrode group for %s', shankGroupName), ...
'location', 'brain area', ...
'device', types.untyped.SoftLink(device) ...
);
nwb.general_extracellular_ephys.set(shankGroupName, electrodeGroup);
for iElectrode = 1:numChannelsPerShank
electrodesDynamicTable.addRow( ...
'location', 'unknown', ...
'group', types.untyped.ObjectView(electrodeGroup), ...
'group_name', shankGroupName, ...
'label', sprintf('%s-electrode%d', shankGroupName, iElectrode));
end
end
electrodesDynamicTable.toTable() % Display the table
ans = 12×5 table
 idlocationgroupgroup_namelabel
10'unknown'1×1 ObjectView'shank1''shank1-electrode1'
21'unknown'1×1 ObjectView'shank1''shank1-electrode2'
32'unknown'1×1 ObjectView'shank1''shank1-electrode3'
43'unknown'1×1 ObjectView'shank2''shank2-electrode1'
54'unknown'1×1 ObjectView'shank2''shank2-electrode2'
65'unknown'1×1 ObjectView'shank2''shank2-electrode3'
76'unknown'1×1 ObjectView'shank3''shank3-electrode1'
87'unknown'1×1 ObjectView'shank3''shank3-electrode2'
98'unknown'1×1 ObjectView'shank3''shank3-electrode3'
109'unknown'1×1 ObjectView'shank4''shank4-electrode1'
1110'unknown'1×1 ObjectView'shank4''shank4-electrode2'
1211'unknown'1×1 ObjectView'shank4''shank4-electrode3'
nwb.general_extracellular_ephys_electrodes = electrodesDynamicTable;

Links

In the above loop, we create ElectrodeGroup objects. The electrodes table then uses an ObjectView in each row to link to the corresponding ElectrodeGroup object. An ObjectView is a construct that enables linking one neurodata type to another, allowing a neurodata type to reference another within the NWB file.

Recorded Extracellular Signals

Voltage data are stored using the ElectricalSeries class, a subclass of the TimeSeries class specialized for voltage data.

Referencing Electrodes

In order to create our ElectricalSeries object, we first need to reference a set of rows in the electrodes table to indicate which electrode (channel) each entry in the electrical series were recorded from. We will do this by creating a DynamicTableRegion, which is a type of link that allows you to reference specific rows of a DynamicTable, such as the electrodes table, using row indices.
Create a DynamicTableRegion that references all rows of the electrodes table.
electrode_table_region = types.hdmf_common.DynamicTableRegion( ...
'table', types.untyped.ObjectView(electrodesDynamicTable), ...
'description', 'all electrodes', ...
'data', (0:length(electrodesDynamicTable.id.data)-1)');

Raw Voltage Data

Now create an ElectricalSeries object to hold acquisition data collected during the experiment.
raw_electrical_series = types.core.ElectricalSeries( ...
'starting_time', 0.0, ... % seconds
'starting_time_rate', 30000., ... % Hz
'data', randn(numChannels, 3000), ... % nChannels x nTime
'electrodes', electrode_table_region, ...
'data_unit', 'volts');
This is the voltage data recorded directly from our electrodes, so it goes in the acquisition group.
nwb.acquisition.set('ElectricalSeries', raw_electrical_series);

Processed Extracellular Electrical Signals

LFP

LFP refers to data that has been low-pass filtered, typically below 300 Hz. This data may also be downsampled. Because it is filtered and potentially resampled, it is categorized as processed data. LFP data would also be stored in an ElectricalSeries. To help data analysis and visualization tools know that this ElectricalSeries object represents LFP data, we store it inside an LFP object and then place the LFP object in a ProcessingModule named 'ecephys'. This is analogous to how we stored the SpatialSeries object inside of a Position object and stored the Position object in a ProcessingModule named 'behavior' in the behavior tutorial
lfp_electrical_series = types.core.ElectricalSeries( ...
'starting_time', 0.0, ... % seconds
'starting_time_rate', 1000., ... % Hz
'data', randn(numChannels, 100), ... nChannels x nTime
'filtering', 'Low-pass filter at 300 Hz', ...
'electrodes', electrode_table_region, ...
'data_unit', 'volts');
 
lfp = types.core.LFP('ElectricalSeries', lfp_electrical_series);
 
ecephys_module = types.core.ProcessingModule(...
'description', 'extracellular electrophysiology');
 
ecephys_module.nwbdatainterface.set('LFP', lfp);
nwb.processing.set('ecephys', ecephys_module);

Other Types of Filtered Electrical Signals

If your acquired data is filtered for frequency ranges other than LFP—such as Gamma or Theta—you can store the result in an ElectricalSeries and encapsulate it within a FilteredEphys object instead of the LFP object.
% Generate filtered data
filtered_data = randn(50, 12); % 50 time points, 12 channels
filtered_data = permute(filtered_data, [2, 1]); % permute timeseries for matnwb
 
% Create an ElectricalSeries object
filtered_electrical_series = types.core.ElectricalSeries( ...
'description', 'Data filtered in the theta range', ...
'data', filtered_data, ...
'electrodes', electrode_table_region, ...
'filtering', 'Band-pass filtered between 4 and 8 Hz', ...
'starting_time', 0.0, ...
'starting_time_rate', 200.0 ...
);
 
% Create a FilteredEphys object and add the filtered electrical series
filtered_ephys = types.core.FilteredEphys();
filtered_ephys.electricalseries.set('FilteredElectricalSeries', filtered_electrical_series);
 
% Add the FilteredEphys object to the ecephys module
ecephys_module.nwbdatainterface.set('FilteredEphys', filtered_ephys);

Decomposition of LFP Data into Frequency Bands

In some cases, you may want to further process the LFP data and decompose the signal into different frequency bands for additional downstream analyses. You can then store the processed data from these spectral analyses using a DecompositionSeries object. This object allows you to include metadata about the frequency bands and metric used (e.g., power, phase, amplitude), as well as link the decomposed data to the original TimeSeries signal the data was derived from.
In this tutorial, the examples for FilteredEphys and DecompositionSeries may appear similar. However, the key difference is that DecompositionSeries is specialized for storing the results of spectral analyses of timeseries data in general, whereas FilteredEphys is defined specifically as a container for filtered electrical signals.
Note: When adding data to a DecompositionSeries, the data argument is assumed to be 3D where the first dimension is time, the second dimension is channels, and the third dimension is bands. As mentioned in the beginning of this tutorial, in MatNWB the data needs to be permuted because the dimensions are written to file in reverse order (See the dimensionMapNoDataPipes tutorial)
% Define the frequency bands of interest (in Hz):
band_names = {'theta'; 'beta'; 'gamma'};
band_mean = [8; 21; 55];
band_stdev = [2; 4.5; 12.5];
band_limits = [band_mean - 2*band_stdev, band_mean + 2*band_stdev];
 
% The bands should be added to the DecompositionSeries as a dynamic table
bands = table(band_names, band_mean, band_stdev, band_limits, ...
'VariableNames', {'band_name', 'band_mean', 'band_stdev', 'band_limits'})
bands = 3×4 table
 band_nameband_meanband_stdevband_limits
12
1'theta'82412
2'beta'214.50001230
3'gamma'5512.50003080
 
bands = util.table2nwb( bands );
 
% Generate random phase data for the demonstration.
phase_data = randn(50, 12, numel(band_names)); % 50 samples, 12 channels, 3 frequency bands
phase_data = permute(phase_data, [3,2,1]); % See dimensionMapNoDataPipes tutorial
 
decomp_series = types.core.DecompositionSeries(...
'data', phase_data, ...
'bands', bands, ...
'metric', 'phase', ...
'starting_time', 0.0, ... % seconds
'starting_time_rate', 1000.0, ... % Hz
'source_channels', electrode_table_region, ...
'source_timeseries', lfp_electrical_series);
 
% Add decomposition series to ecephys module
ecephys_module.nwbdatainterface.set('theta', decomp_series);

Spike Times and Extracellular Events

Sorted Spike Times

Spike times are stored in a Units table, a specialization of the DynamicTable class. The default Units table is located at /units in the HDF5 file. You can add columns to the Units table just like you did for electrodes and trials (see convertTrials). Here, we generate some random spike data and populate the table.
num_cells = 10;
spikes = cell(1, num_cells);
for iShank = 1:num_cells
spikes{iShank} = rand(1, randi([16, 28]));
end
spikes
spikes = 1×10 cell
 12345678910
11×18 double1×24 double1×22 double1×19 double1×16 double1×28 double1×19 double1×27 double1×26 double1×19 double

Ragged Arrays

Spike times are an example of a ragged array- it's like a matrix, but each row has a different number of elements. We can represent this type of data as an indexed column of the Units table. These indexed columns have two components, the VectorData object that holds the data and the VectorIndex object that holds the indices in the vector that indicate the row breaks. You can use the convenience function util.create_indexed_column to create these objects. For more information about ragged arrays, we refer you to the "Ragged Array Columns" section of the dynamic table tutorial.
[spike_times_vector, spike_times_index] = util.create_indexed_column(spikes);
 
nwb.units = types.core.Units( ...
'colnames', {'spike_times'}, ...
'description', 'units table', ...
'spike_times', spike_times_vector, ...
'spike_times_index', spike_times_index ...
);
 
nwb.units.toTable
ans = 10×2 table
 idspike_times
1118×1 double
2224×1 double
3322×1 double
4419×1 double
5516×1 double
6628×1 double
7719×1 double
8827×1 double
9926×1 double
101019×1 double

Unsorted Spike Times

While the Units table is used to store spike times and waveform data for spike-sorted, single-unit activity, you may also want to store spike times and waveform snippets of unsorted spiking activity. This is useful for recording multi-unit activity detected via threshold crossings during data acquisition. Such information can be stored using SpikeEventSeries objects.
% In the SpikeEventSeries the dimensions should be ordered as
% [num_events, num_channels, num_samples].
% Define spike snippets: 20 events, 3 channels, 40 samples per event.
spike_snippets = rand(20, 3, 40);
% Permute spike snippets (See dimensionMapNoDataPipes tutorial)
spike_snippets = permute(spike_snippets, [3,2,1])
spike_snippets =
spike_snippets(:,:,1) = - - 0.8840 0.4104 0.0773 - 0.2895 0.4434 0.7276 - 0.7282 0.2321 0.1961 - 0.8443 0.0524 0.5851 - 0.3713 0.0241 0.4969 - 0.5494 0.6839 0.7355 - 0.1517 0.7949 0.5979 - 0.6635 0.3582 0.3362 - 0.2304 0.5858 0.8594 - 0.3852 0.2620 0.3264 - 0.3118 0.3305 0.5870 - 0.4677 0.4784 0.8121 - 0.5636 0.3811 0.2998 - 0.5963 0.1882 0.1086 - 0.6762 0.3520 0.0824 - 0.0599 0.0252 0.1052 - 0.1278 0.7587 0.7405 - 0.0422 0.7312 0.6667 - 0.2422 0.8595 0.1119 - 0.6675 0.0296 0.8458 - 0.2531 0.6969 0.2860 - 0.8549 0.6434 0.0730 - 0.9224 0.1372 0.5922 - 0.8046 0.2026 0.3796 - 0.0283 0.4356 0.2686 - 0.0220 0.7392 0.6113 - 0.4983 0.7065 0.8138 - 0.1084 0.7089 0.0159 - 0.4491 0.8796 0.8570 - 0.2666 0.8124 0.1619 - 0.8874 0.7056 0.0604 - 0.7425 0.2605 0.2477 - 0.4856 0.2008 0.1097 - 0.1616 0.0121 0.0490 - 0.1388 0.9127 0.5684 - 0.1459 0.9058 0.1248 - 0.1711 0.1007 0.3990 - 0.8393 0.6710 0.8213 - 0.6889 0.1315 0.7412 - 0.3700 0.8719 0.8248 - - -spike_snippets(:,:,2) = - - 0.3587 0.9811 0.2971 - 0.7277 0.8138 0.2456 - 0.7136 0.7200 0.9779 - 0.8198 0.6446 0.2123 - 0.7078 0.3514 0.5493 - 0.8665 0.6074 0.3950 - 0.9498 0.9263 0.6231 - 0.4755 0.7082 0.7975 - 0.1605 0.2079 0.8562 - 0.0512 0.8677 0.3511 - 0.1897 0.9574 0.7471 - 0.6744 0.6732 0.0225 - 0.6024 0.5067 0.0387 - 0.8992 0.5272 0.5283 - 0.5722 0.9761 0.7628 - 0.7082 0.5364 0.8801 - 0.1330 0.8471 0.4737 - 0.6811 0.2800 0.9775 - 0.5363 0.9029 0.5870 - 0.7977 0.0398 0.4918 - 0.4657 0.2871 0.0126 - 0.9780 0.3657 0.7575 - 0.8543 0.7694 0.6129 - 0.3676 0.3227 0.3590 - 0.4902 0.7841 0.7379 - 0.7595 0.7320 0.6265 - 0.2180 0.4572 0.1383 - 0.3310 0.0001 0.5000 - 0.0245 0.9370 0.0438 - 0.7992 0.6612 0.7073 - 0.3443 0.9668 0.3462 - 0.9261 0.6006 0.5526 - 0.4018 0.0765 0.4504 - 0.0286 0.3908 0.4233 - 0.6364 0.7242 0.6814 - 0.4236 0.0721 0.5432 - 0.5599 0.7267 0.6644 - 0.1459 0.2461 0.6893 - 0.2536 0.3381 0.0641 - 0.6326 0.7257 0.8844 - - -spike_snippets(:,:,3) = - - 0.1362 0.1425 0.3644 - 0.2234 0.6658 0.9637 - 0.4727 0.6631 0.2876 - 0.0085 0.0553 0.4585 - 0.9502 0.0977 0.6954 - 0.2312 0.1565 0.5087 - 0.2705 0.3535 0.5659 - 0.2368 0.0900 0.8627 - 0.1081 0.7041 0.3725 - 0.6482 0.0005 0.1544 - 0.2362 0.6108 0.6114 - 0.8803 0.9088 0.2091 - 0.0856 0.8648 0.1002 - 0.3896 0.2870 0.4510 - 0.0470 0.1266 0.1895 - 0.9096 0.4791 0.3934 - 0.8571 0.5306 0.4322 - 0.5716 0.3844 0.8254 - 0.8413 0.8704 0.3305 - 0.5318 0.7753 0.7935 - 0.4743 0.8884 0.4022 - 0.8342 0.4738 0.8089 - 0.8143 0.2582 0.8370 - 0.8134 0.4044 0.0324 - 0.2955 0.2959 0.2695 - 0.6966 0.4155 0.6720 - 0.4360 0.3938 0.8744 - 0.4421 0.9780 0.2194 - 0.2874 0.1777 0.7541 - 0.8277 0.5363 0.0101 - 0.1075 0.8672 0.3565 - 0.5107 0.4739 0.3066 - 0.9965 0.3262 0.0391 - 0.6357 0.1903 0.3097 - 0.2875 0.9710 0.5432 - 0.2359 0.5163 0.9214 - 0.8104 0.6590 0.3723 - 0.2666 0.0595 0.3578 - 0.8609 0.1882 0.3043 - 0.6504 0.3113 0.0771 - - -spike_snippets(:,:,4) = - - 0.8050 0.2022 0.5860 - 0.5796 0.4442 0.5489 - 0.3618 0.1478 0.7466 - 0.9093 0.7029 0.2356 - 0.0460 0.1836 0.8784 - 0.2005 0.4046 0.1156 - 0.4469 0.5336 0.6318 - 0.9771 0.7806 0.7635 - 0.4451 0.4810 0.1803 - 0.1741 0.2258 0.4179 - 0.7091 0.6490 0.2747 - 0.4820 0.6451 0.5042 - 0.6605 0.6045 0.9469 - 0.0161 0.7395 0.0059 - 0.4782 0.2793 0.8853 - 0.4091 0.9862 0.4804 - 0.8455 0.2913 0.5672 - 0.0649 0.6610 0.3551 - 0.9321 0.3362 0.8347 - 0.4598 0.4687 0.1846 - 0.7372 0.5780 0.8394 - 0.3125 0.0901 0.3703 - 0.4837 0.2241 0.0313 - 0.0275 0.7825 0.1160 - 0.6096 0.8158 0.9808 - 0.4574 0.8434 0.0143 - 0.1786 0.6356 0.4819 - 0.8691 0.1741 0.4058 - 0.0887 0.9107 0.3661 - 0.9826 0.7998 0.8946 - 0.9549 0.2801 0.6857 - 0.9784 0.3684 0.1300 - 0.6258 0.8026 0.6570 - 0.5787 0.1236 0.1830 - 0.5000 0.9966 0.2871 - 0.0470 0.1272 0.4818 - 0.8635 0.1213 0.7935 - 0.8270 0.6987 0.0980 - 0.0250 0.9571 0.9466 - 0.4383 0.4128 0.4984 - - -spike_snippets(:,:,5) = - - 0.5913 0.8706 0.2520 - 0.7161 0.0618 0.4407 - 0.5908 0.8786 0.0151 - 0.6327 0.6154 0.5395 - 0.1973 0.7438 0.3234 - 0.0870 0.2944 0.4277 - 0.4847 0.1451 0.3072 - 0.3449 0.5248 0.3223 - 0.5565 0.7196 0.2811 - 0.3149 0.0960 0.2301 - 0.3564 0.1228 0.4846 - 0.5336 0.1059 0.1092 - 0.2556 0.8666 0.5237 - 0.2334 0.4155 0.5159 - 0.4103 0.3421 0.3923 - 0.2627 0.2856 0.4128 - 0.6458 0.6598 0.5874 - 0.5963 0.0843 0.2972 - 0.0502 0.6843 0.4237 - 0.3206 0.3378 0.9895 - 0.5622 0.4088 0.9491 - 0.5645 0.6614 0.4398 - 0.3181 0.7743 0.4310 - 0.5152 0.8521 0.4793 - 0.6703 0.7846 0.1971 - 0.4306 0.8096 0.6323 - 0.1371 0.0387 0.8975 - 0.8258 0.9360 0.5596 - 0.8419 0.4748 0.2337 - 0.0046 0.9465 0.5231 - 0.8142 0.1619 0.4782 - 0.8274 0.6917 0.5729 - 0.0124 0.7184 0.6751 - 0.2896 0.4857 0.0238 - 0.1628 0.7326 0.8865 - 0.8080 0.9400 0.3402 - 0.8077 0.0893 0.3769 - 0.9437 0.2364 0.8215 - 0.6212 0.9167 0.2558 - 0.0951 0.8936 0.9542 - - -spike_snippets(:,:,6) = - - 0.8355 0.5637 0.7755 - 0.2274 0.3269 0.3431 - 0.3749 0.3844 0.4181 - 0.6306 0.5490 0.0857 - 0.3787 0.4608 0.9933 - 0.5684 0.2314 0.9910 - 0.8612 0.8184 0.2795 - 0.0594 0.1986 0.1672 - 0.4588 0.4749 0.5851 - 0.2411 0.0279 0.5333 - 0.2000 0.8661 0.3145 - 0.1040 0.8509 0.0429 - 0.4510 0.3810 0.4695 - 0.6253 0.1327 0.9410 - 0.2672 0.1820 0.3754 - 0.5340 0.0297 0.2965 - 0.1050 0.1508 0.4010 - 0.8741 0.9200 0.5657 - 0.4239 0.5883 0.4612 - 0.5997 0.0074 0.4411 - 0.2558 0.1972 0.9108 - 0.7621 0.7016 0.3741 - 0.1205 0.0695 0.5496 - 0.2904 0.0255 0.0484 - 0.8102 0.7424 0.2196 - 0.1073 0.4464 0.9746 - 0.0747 0.1256 0.1908 - 0.0275 0.2822 0.3473 - 0.9470 0.4516 0.2231 - 0.5762 0.6301 0.6778 - 0.7522 0.1073 0.8839 - 0.4510 0.6959 0.3521 - 0.3911 0.9126 0.5941 - 0.9814 0.8108 0.5995 - 0.2263 0.3503 0.1219 - 0.2262 0.0686 0.8908 - 0.9722 0.6025 0.8860 - 0.0521 0.6195 0.3267 - 0.6933 0.6580 0.1238 - 0.7840 0.2751 0.9997 - - -spike_snippets(:,:,7) = - - 0.9348 0.5672 0.6494 - 0.6230 0.3300 0.7007 - 0.4442 0.0659 0.1242 - 0.7114 0.5661 0.2602 - 0.0429 0.1881 0.5538 - 0.8699 0.9545 0.7000 - 0.2914 0.0736 0.5799 - 0.8541 0.5021 0.3541 - 0.5734 0.4741 0.6012 - 0.5879 0.1169 0.9234 - 0.6626 0.3217 0.6066 - 0.4561 0.3203 0.5923 - 0.9674 0.8924 0.0659 - 0.2196 0.8062 0.2753 - 0.6065 0.3989 0.9592 - 0.0842 0.8840 0.8314 - 0.4492 0.3714 0.4889 - 0.2868 0.8496 0.4936 - 0.1131 0.4815 0.8227 - 0.7724 0.3689 0.2368 - 0.1026 0.7261 0.7530 - 0.6204 0.1422 0.7569 - 0.9188 0.9783 0.5233 - 0.0023 0.2101 0.1290 - 0.6068 0.1084 0.7324 - 0.4695 0.5852 0.6174 - 0.6207 0.7780 0.2677 - 0.1279 0.1070 0.0026 - 0.6935 0.1865 0.4121 - 0.5727 0.1962 0.7182 - 0.3159 0.0386 0.3871 - 0.3459 0.4791 0.8994 - 0.2445 0.5204 0.6407 - 0.6440 0.6374 0.8091 - 0.8246 0.1185 0.8605 - 0.8813 0.6919 0.5799 - 0.1067 0.4817 0.2726 - 0.3286 0.4898 0.6359 - 0.8793 0.0062 0.7552 - 0.2744 0.2255 0.8729 - - -spike_snippets(:,:,8) = - - 0.3559 0.3502 0.4184 - 0.1456 0.7047 0.4624 - 0.0364 0.5867 0.9092 - 0.5403 0.8551 0.8671 - 0.4563 0.5706 0.5792 - 0.4000 0.5206 0.9052 - 0.9328 0.8906 0.1171 - 0.5250 0.7959 0.7344 - 0.2594 0.6278 0.1148 - 0.8432 0.2758 0.7820 - 0.7348 0.1972 0.3360 - 0.0840 0.1389 0.4690 - 0.6533 0.9474 0.6290 - 0.1758 0.2443 0.1279 - 0.1706 0.1410 0.9089 - 0.4944 0.0307 0.0575 - 0.6134 0.0540 0.3751 - 0.3314 0.0149 0.7502 - 0.8902 0.2016 0.9204 - 0.3945 0.1937 0.0392 - 0.8257 0.9887 0.1202 - 0.2154 0.2457 0.1660 - 0.0023 0.6066 0.1006 - 0.2223 0.0890 0.2271 - 0.1103 0.1547 0.5031 - 0.4946 0.7406 0.5282 - 0.1234 0.0530 0.4693 - 0.7059 0.9016 0.4029 - 0.1858 0.9035 0.7578 - 0.8333 0.7042 0.3734 - 0.9886 0.5343 0.1843 - 0.1650 0.4507 0.9497 - 0.8576 0.5598 0.4163 - 0.3344 0.4154 0.3729 - 0.5832 0.1692 0.8046 - 0.0764 0.7459 0.2572 - 0.0343 0.4037 0.8575 - 0.3879 0.8997 0.3826 - 0.4534 0.1001 0.7087 - 0.5255 0.2042 0.7357 - - -spike_snippets(:,:,9) = - - 0.7075 0.3583 0.2809 - 0.9943 0.8379 0.8414 - 0.4010 0.6446 0.2016 - 0.2224 0.3620 0.1538 - 0.0410 0.2821 0.2583 - 0.9369 0.0431 0.9306 - 0.9925 0.3225 0.9261 - 0.1077 0.2515 0.5068 - 0.4240 0.3376 0.0569 - 0.5236 0.5543 0.9493 - 0.6657 0.5300 0.8385 - 0.3563 0.7412 0.3676 - 0.8088 0.1033 0.4451 - 0.3500 0.2520 0.6293 - 0.7315 0.7107 0.5051 - 0.3474 0.7358 0.2772 - 0.3127 0.1042 0.4424 - 0.0544 0.1513 0.2170 - 0.9940 0.9846 0.5657 - 0.3050 0.7328 0.5181 - 0.7067 0.5802 0.5910 - 0.0262 0.1544 0.0785 - 0.0299 0.2458 0.1297 - 0.3248 0.4770 0.5600 - 0.9692 0.0887 0.6627 - 0.4190 0.9029 0.1486 - 0.1847 0.4102 0.0110 - 0.8229 0.2542 0.8645 - 0.8748 0.7377 0.6509 - 0.8114 0.8655 0.0641 - 0.7379 0.4173 0.7943 - 0.1262 0.2913 0.3552 - 0.7535 0.4450 0.5206 - 0.9617 0.1137 0.8812 - 0.9513 0.4632 0.9043 - 0.0408 0.3225 0.1974 - 0.0124 0.4705 0.4695 - 0.8570 0.5250 0.1845 - 0.3925 0.4026 0.9171 - 0.2274 0.9037 0.7655 - - -spike_snippets(:,:,10) = - - 0.7941 0.5149 0.8517 - 0.7471 0.8854 0.1999 - 0.2383 0.5179 0.0416 - 0.0682 0.5410 0.5133 - 0.3799 0.4434 0.9443 - 0.0974 0.8834 0.6782 - 0.0608 0.0798 0.6398 - 0.4075 0.8149 0.1136 - 0.8780 0.0440 0.9261 - 0.5841 0.8798 0.2789 - 0.1936 0.6428 0.6757 - 0.2792 0.4726 0.1642 - 0.7121 0.9948 0.7833 - 0.7346 0.2496 0.0774 - 0.2914 0.9707 0.6684 - 0.8672 0.1230 0.0800 - 0.8510 0.3240 0.8299 - 0.0924 0.2497 0.7994 - 0.1204 0.1693 0.3222 - 0.6565 0.4299 0.4249 - 0.9711 0.2600 0.8885 - 0.0870 0.0539 0.9095 - 0.6804 0.5909 0.8776 - 0.9831 0.9556 0.0354 - 0.4005 0.2000 0.8917 - 0.7770 0.7415 0.7880 - 0.4564 0.4864 0.3968 - 0.1782 0.2553 0.2720 - 0.9537 0.1915 0.3335 - 0.9620 0.9273 0.4582 - 0.5031 0.2392 0.0366 - 0.2139 0.8565 0.3517 - 0.0704 0.9866 0.9983 - 0.8044 0.9471 0.0526 - 0.9997 0.5544 0.4836 - 0.5022 0.2241 0.1854 - 0.1620 0.1630 0.6789 - 0.0508 0.6460 0.2120 - 0.3283 0.4058 0.2809 - 0.8139 0.0804 0.7395 - - -spike_snippets(:,:,11) = - - 0.2544 0.4666 0.6415 - 0.7723 0.3364 0.3455 - 0.3722 0.6145 0.6607 - 0.0373 0.8671 0.8135 - 0.9431 0.6031 0.3475 - 0.5092 0.7629 0.6814 - 0.8818 0.7412 0.0964 - 0.4394 0.9440 0.6040 - 0.9009 0.4173 0.6211 - 0.5267 0.2959 0.3061 - 0.1462 0.9594 0.5161 - 0.3670 0.2076 0.1559 - 0.9726 0.5897 0.8215 - 0.5163 0.2440 0.4610 - 0.7033 0.8081 0.0302 - 0.7474 0.2464 0.0298 - 0.0537 0.4461 0.1025 - 0.5831 0.2761 0.1187 - 0.4606 0.5784 0.3064 - 0.8775 0.7498 0.3991 - 0.9276 0.5925 0.3863 - 0.4750 0.3527 0.7902 - 0.3017 0.6283 0.7300 - 0.1315 0.8961 0.5218 - 0.6252 0.2800 0.2163 - 0.7672 0.1983 0.2008 - 0.2827 0.0721 0.6766 - 0.0958 0.6591 0.6342 - 0.5979 0.5047 0.9257 - 0.3368 0.1198 0.3731 - 0.2173 0.1262 0.7610 - 0.5702 0.7797 0.4054 - 0.4685 0.7710 0.0751 - 0.6500 0.0987 0.0612 - 0.6616 0.7665 0.9412 - 0.9498 0.5447 0.3660 - 0.5139 0.9977 0.5679 - 0.0635 0.3845 0.9624 - 0.6494 0.0416 0.4315 - 0.6296 0.1024 0.7642 - - -spike_snippets(:,:,12) = - - 0.6146 0.0968 0.3336 - 0.3430 0.2655 0.0046 - 0.1619 0.0499 0.5731 - 0.6363 0.8267 0.4098 - 0.6908 0.1887 0.9967 - 0.2547 0.3581 0.4260 - 0.5295 0.4325 0.6309 - 0.6853 0.4064 0.5448 - 0.8801 0.5851 0.9206 - 0.2101 0.5392 0.8670 - 0.4839 0.2279 0.0945 - 0.1867 0.9327 0.1141 - 0.8110 0.4286 0.8918 - 0.0652 0.8428 0.6683 - 0.6747 0.7311 0.8710 - 0.8184 0.4235 0.1510 - 0.0058 0.8850 0.5862 - 0.4190 0.4357 0.1666 - 0.8585 0.5593 0.4555 - 0.3209 0.5544 0.3689 - 0.5356 0.1007 0.9436 - 0.1546 0.4301 0.4500 - 0.0562 0.7332 0.8797 - 0.4663 0.0001 0.5342 - 0.1850 0.7657 0.5038 - 0.9125 0.2055 0.2562 - 0.5721 0.0714 0.6559 - 0.8022 0.8499 0.5639 - 0.6421 0.5074 0.3886 - 0.8348 0.8373 0.2105 - 0.1514 0.8200 0.3957 - 0.3065 0.4698 0.9103 - 0.5984 0.3031 0.2229 - 0.0662 0.6775 0.7322 - 0.3514 0.1481 0.1377 - 0.2903 0.8945 0.4549 - 0.2797 0.1967 0.2417 - 0.8218 0.1114 0.6109 - 0.1070 0.9263 0.5711 - 0.6670 0.0539 0.6385 - - -spike_snippets(:,:,13) = - - 0.0384 0.0447 0.7745 - 0.2359 0.0803 0.5582 - 0.8278 0.1725 0.9556 - 0.6773 0.0045 0.6783 - 0.7831 0.7674 0.2515 - 0.7814 0.6090 0.8158 - 0.9209 0.5476 0.1611 - 0.7194 0.7287 0.4161 - 0.7315 0.6364 0.0511 - 0.9443 0.4515 0.2623 - 0.0348 0.6175 0.1363 - 0.8028 0.9249 0.8991 - 0.6904 0.8000 0.9812 - 0.5899 0.2797 0.2728 - 0.2293 0.3397 0.7309 - 0.7477 0.0589 0.0045 - 0.4752 0.1478 0.5416 - 0.4774 0.9201 0.1033 - 0.7057 0.4585 0.3031 - 0.9951 0.6517 0.5719 - 0.5022 0.0639 0.1790 - 0.4090 0.1917 0.6295 - 0.6272 0.0826 0.9543 - 0.3429 0.6129 0.5725 - 0.6568 0.0692 0.4979 - 0.9034 0.4953 0.0361 - 0.4969 0.8789 0.0178 - 0.3767 0.8545 0.4115 - 0.8487 0.4793 0.3571 - 0.6909 0.0469 0.2186 - 0.7016 0.1703 0.6514 - 0.3019 0.9341 0.4815 - 0.0172 0.5732 0.9753 - 0.9003 0.7146 0.9119 - 0.8645 0.1699 0.7649 - 0.4176 0.7374 0.9153 - 0.1825 0.2433 0.8153 - 0.6800 0.9977 0.3573 - 0.9317 0.1497 0.5918 - 0.9806 0.8552 0.7741 - - -spike_snippets(:,:,14) = - - 0.9529 0.4197 0.7940 - 0.0680 0.2133 0.2558 - 0.3934 0.9782 0.8711 - 0.9798 0.4954 0.1090 - 0.8314 0.6794 0.1408 - 0.6423 0.9708 0.2159 - 0.6231 0.8391 0.7335 - 0.2810 0.8298 0.2176 - 0.3974 0.6474 0.8730 - 0.2421 0.9274 0.3335 - 0.7245 0.3462 0.8549 - 0.6980 0.8378 0.7992 - 0.2055 0.2089 0.5516 - 0.5671 0.4157 0.0663 - 0.4671 0.0046 0.2549 - 0.6323 0.2667 0.4567 - 0.2121 0.4947 0.0204 - 0.7162 0.4741 0.5615 - 0.7168 0.2574 0.1147 - 0.9861 0.7936 0.2834 - 0.8550 0.3711 0.7613 - 0.2810 0.6640 0.2897 - 0.5666 0.7711 0.8337 - 0.0302 0.8985 0.0659 - 0.3424 0.3668 0.0051 - 0.1083 0.3183 0.8842 - 0.2733 0.8660 0.7756 - 0.1294 0.2436 0.2996 - 0.4480 0.2132 0.7510 - 0.2924 0.7984 0.5947 - 0.4414 0.0408 0.9539 - 0.5015 0.4748 0.2327 - 0.3004 0.1017 0.0860 - 0.4600 0.1732 0.4097 - 0.4972 0.3785 0.1298 - 0.9853 0.8222 0.8488 - 0.9130 0.3340 0.2946 - 0.0938 0.2253 0.0173 - 0.0004 0.3570 0.9206 - 0.1234 0.8649 0.5750 - - -spike_snippets(:,:,15) = - - 0.9922 0.6567 0.1626 - 0.7361 0.5865 0.8551 - 0.3929 0.8085 0.1534 - 0.0604 0.4683 0.8830 - 0.9938 0.9102 0.9857 - 0.9197 0.5770 0.0050 - 0.3592 0.8451 0.9703 - 0.4458 0.3356 0.3232 - 0.0204 0.0565 0.8186 - 0.0823 0.8806 0.2045 - 0.7850 0.5394 0.5331 - 0.4210 0.1433 0.4199 - 0.9766 0.7337 0.7132 - 0.9982 0.2258 0.4000 - 0.6702 0.7083 0.9060 - 0.4952 0.0332 0.1472 - 0.6573 0.1771 0.7804 - 0.1723 0.3458 0.1954 - 0.9137 0.3648 0.7029 - 0.3399 0.3422 0.3585 - 0.2941 0.7883 0.9296 - 0.3875 0.4254 0.1448 - 0.5145 0.9283 0.1949 - 0.3563 0.7005 0.8120 - 0.2778 0.9611 0.9273 - 0.6046 0.2139 0.9705 - 0.4420 0.0302 0.1635 - 0.0088 0.6501 0.5761 - 0.7116 0.0831 0.9489 - 0.8314 0.4768 0.4114 - 0.1151 0.1037 0.5107 - 0.3827 0.2916 0.8996 - 0.4661 0.2260 0.1121 - 0.7078 0.4180 0.0222 - 0.7247 0.5719 0.9316 - 0.1403 0.8271 0.9396 - 0.2412 0.1451 0.0554 - 0.2553 0.1747 0.9921 - 0.6871 0.4399 0.3530 - 0.1298 0.2656 0.9714 - - -spike_snippets(:,:,16) = - - 0.9972 0.9936 0.5482 - 0.8326 0.0840 0.4691 - 0.7708 0.0330 0.8159 - 0.5131 0.1295 0.2345 - 0.7615 0.2317 0.1027 - 0.1724 0.8394 0.5767 - 0.0137 0.1014 0.1270 - 0.0739 0.5132 0.3428 - 0.2915 0.8071 0.9026 - 0.3845 0.5590 0.0266 - 0.8303 0.0359 0.5994 - 0.1240 0.6085 0.5076 - 0.8980 0.9474 0.3432 - 0.6499 0.3732 0.0730 - 0.3303 0.4004 0.7405 - 0.2498 0.7835 0.6898 - 0.1249 0.7027 0.7510 - 0.6946 0.5686 0.2033 - 0.6918 0.5278 0.6515 - 0.8156 0.7957 0.5573 - 0.4032 0.3450 0.7350 - 0.4818 0.8283 0.2974 - 0.0384 0.0808 0.7130 - 0.1391 0.3200 0.3603 - 0.6069 0.4092 0.1513 - 0.3444 0.3243 0.9263 - 0.9033 0.2053 0.4476 - 0.1142 0.2854 0.8082 - 0.0272 0.1698 0.8336 - 0.5026 0.4346 0.5423 - 0.7878 0.6306 0.2140 - 0.9513 0.9625 0.7080 - 0.5834 0.2035 0.9237 - 0.2557 0.0934 0.4052 - 0.9567 0.7467 0.0456 - 0.5091 0.9380 0.0553 - 0.9153 0.0366 0.1606 - 0.3306 0.0036 0.8340 - 0.4779 0.5815 0.6836 - 0.0585 0.7621 0.6835 - - -spike_snippets(:,:,17) = - - 0.3960 0.3974 0.5588 - 0.0424 0.4685 0.4894 - 0.8798 0.5952 0.9892 - 0.7382 0.6502 0.6027 - 0.5835 0.2012 0.6278 - 0.3606 0.6030 0.2146 - 0.8293 0.6913 0.4075 - 0.8117 0.7003 0.9414 - 0.7901 0.7728 0.7922 - 0.5847 0.0021 0.7396 - 0.3876 0.2996 0.2556 - 0.7419 0.4900 0.2650 - 0.0490 0.5944 0.5732 - 0.3899 0.8482 0.7060 - 0.6078 0.9582 0.0290 - 0.2723 0.3243 0.8536 - 0.0005 0.7109 0.3435 - 0.9130 0.8274 0.2376 - 0.2565 0.1573 0.0270 - 0.5802 0.9857 0.7257 - 0.2650 0.6423 0.3761 - 0.8502 0.5365 0.7145 - 0.4163 0.4096 0.5124 - 0.1079 0.8086 0.8176 - 0.1324 0.4778 0.9827 - 0.5382 0.3201 0.7392 - 0.7771 0.2693 0.9050 - 0.4876 0.0728 0.7989 - 0.0328 0.2198 0.0266 - 0.9892 0.3329 0.9052 - 0.3862 0.1512 0.2777 - 0.5593 0.2181 0.0367 - 0.6248 0.3320 0.7972 - 0.8769 0.6821 0.0563 - 0.4199 0.6120 0.9375 - 0.2787 0.7777 0.2754 - 0.8244 0.1253 0.8188 - 0.5485 0.7533 0.2894 - 0.6503 0.1747 0.0322 - 0.0543 0.7022 0.9705 - - -spike_snippets(:,:,18) = - - 0.6586 0.8381 0.9346 - 0.3564 0.1914 0.4731 - 0.5518 0.6622 0.0321 - 0.6634 0.5777 0.4223 - 0.1716 0.2335 0.0385 - 0.2029 0.5658 0.7820 - 0.0426 0.4652 0.7967 - 0.0360 0.2248 0.1815 - 0.7329 0.8276 0.1524 - 0.3917 0.7722 0.6973 - 0.1874 0.7361 0.2067 - 0.2838 0.8671 0.4181 - 0.5444 0.8385 0.8334 - 0.9438 0.5089 0.4384 - 0.3220 0.1924 0.7713 - 0.4797 0.0161 0.6776 - 0.0509 0.9474 0.4790 - 0.5528 0.1435 0.8937 - 0.1712 0.2664 0.4000 - 0.3756 0.1580 0.7842 - 0.7943 0.8386 0.6555 - 0.8971 0.8120 0.6511 - 0.3538 0.1422 0.5621 - 0.4175 0.1287 0.7695 - 0.6317 0.3817 0.7950 - 0.8559 0.0167 0.8612 - 0.8622 0.3802 0.3890 - 0.0369 0.2987 0.0346 - 0.1610 0.0036 0.4241 - 0.7199 0.1141 0.7388 - 0.6749 0.6053 0.3875 - 0.9807 0.8228 0.6587 - 0.2027 0.1864 0.9194 - 0.9936 0.7345 0.3757 - 0.6885 0.1653 0.3631 - 0.8941 0.1107 0.6917 - 0.7975 0.2033 0.0698 - 0.7628 0.9172 0.6178 - 0.8866 0.9178 0.1358 - 0.5250 0.7230 0.1884 - - -spike_snippets(:,:,19) = - - 0.0843 0.0789 0.0190 - 0.9539 0.2201 0.2193 - 0.3115 0.9336 0.0849 - 0.3433 0.4676 0.2098 - 0.2704 0.2124 0.2048 - 0.1612 0.4393 0.1003 - 0.8375 0.4583 0.4274 - 0.0563 0.4216 0.9778 - 0.4400 0.4879 0.2062 - 0.4732 0.5376 0.3918 - 0.9230 0.5727 0.4353 - 0.7881 0.6609 0.1073 - 0.9234 0.2811 0.0459 - 0.5888 0.4915 0.8911 - 0.2365 0.1278 0.2981 - 0.1332 0.9264 0.4458 - 0.3365 0.7043 0.6547 - 0.8653 0.3776 0.0418 - 0.1819 0.6848 0.7250 - 0.0428 0.6453 0.1657 - 0.7494 0.2659 0.1569 - 0.3866 0.6769 0.7458 - 0.0382 0.7546 0.4898 - 0.7645 0.3238 0.0739 - 0.7457 0.2271 0.3200 - 0.9157 0.1533 0.4651 - 0.7660 0.2698 0.7659 - 0.9227 0.1591 0.3390 - 0.8408 0.8849 0.8486 - 0.1186 0.6178 0.3768 - 0.4187 0.6784 0.4346 - 0.3279 0.7755 0.4404 - 0.8957 0.8531 0.9597 - 0.8631 0.8959 0.0828 - 0.1697 0.9542 0.9433 - 0.2106 0.2864 0.0747 - 0.1872 0.6363 0.1239 - 0.0568 0.2628 0.8335 - 0.9199 0.8350 0.3247 - 0.3773 0.3289 0.0652 - - -spike_snippets(:,:,20) = - - 0.3211 0.6437 0.8537 - 0.1520 0.5627 0.1590 - 0.9570 0.1282 0.0198 - 0.5171 0.9608 0.5398 - 0.5639 0.0015 0.5351 - 0.7871 0.4368 0.6525 - 0.0547 0.4123 0.4774 - 0.2071 0.3338 0.3417 - 0.9039 0.8778 0.9688 - 0.5819 0.2111 0.0526 - 0.7195 0.6208 0.2210 - 0.8373 0.6037 0.2933 - 0.5257 0.4083 0.5324 - 0.3001 0.5219 0.6432 - 0.6423 0.4282 0.2968 - 0.0757 0.3441 0.9642 - 0.8420 0.1641 0.5281 - 0.2294 0.1050 0.7394 - 0.4970 0.7150 0.6435 - 0.2402 0.3646 0.3062 - 0.8428 0.1960 0.4764 - 0.4495 0.6721 0.6090 - 0.3322 0.9081 0.6180 - 0.3987 0.8989 0.3979 - 0.4572 0.4035 0.3269 - 0.7665 0.9586 0.1624 - 0.4178 0.9179 0.0901 - 0.3773 0.9761 0.4298 - 0.8494 0.8965 0.7878 - 0.8920 0.3539 0.2586 - 0.0978 0.6594 0.7246 - 0.3655 0.8161 0.2763 - 0.5166 0.0215 0.4210 - 0.8319 0.7026 0.3662 - 0.1834 0.9958 0.9970 - 0.9425 0.3698 0.1860 - 0.5619 0.5043 0.2714 - 0.1227 0.7896 0.3180 - 0.3038 0.2416 0.8651 - 0.0602 0.9943 0.4567 -
 
% Create electrode table region referencing electrodes 0, 1, and 2
shank0_table_region = types.hdmf_common.DynamicTableRegion( ...
'table', types.untyped.ObjectView(electrodesDynamicTable), ...
'description', 'shank0', ...
'data', (0:2)');
 
% Define spike event series for unsorted spike times
spike_events = types.core.SpikeEventSeries( ...
'data', spike_snippets, ...
'timestamps', (0:19)', ... % Timestamps for each event
'description', 'events detected with 100uV threshold', ...
'electrodes', shank0_table_region ...
);
 
% Add spike event series to NWB file acquisition
nwb.acquisition.set('SpikeEvents_Shank0', spike_events);

Detected Events

If you need to store the complete, continuous raw voltage traces, along with unsorted spike times, you should store the traces in ElectricalSeries objects in the acquisition group, and use the EventDetection class to identify the spike events in your raw traces.
% Create the EventDetection object
event_detection = types.core.EventDetection( ...
'detection_method', 'thresholding, 1.5 * std', ...
'source_electricalseries', types.untyped.SoftLink(raw_electrical_series), ...
'source_idx', [1000; 2000; 3000], ...
'times', [.033, .066, .099] ...
);
 
% Add the EventDetection object to the ecephys module
ecephys_module.nwbdatainterface.set('ThresholdEvents', event_detection);

Storing Spike Features (e.g Principal Components)

NWB also provides a way to store features of spikes, such as principal components, using the FeatureExtraction class.
% Generate random feature data (time x channel x feature)
features = rand(3, 12, 4); % 3 time points, 12 channels, 4 features
features = permute(features, [3,2,1]); % reverse dimension order for matnwb
 
% Create the FeatureExtraction object
feature_extraction = types.core.FeatureExtraction( ...
'description', {'PC1', 'PC2', 'PC3', 'PC4'}, ... % Feature descriptions
'electrodes', electrode_table_region, ... % DynamicTableRegion referencing the electrodes table
'times', [.033; .066; .099], ... % Column vector for times
'features', features ...
);
 
% Add the FeatureExtraction object to the ecephys module (if required)
ecephys_module.nwbdatainterface.set('PCA_features', feature_extraction);

Choosing NWB-Types for Electrophysiology Data (A Summary)

As mentioned above, ElectricalSeries objects are meant for storing electrical timeseries data like raw voltage signals or processed signals like LFP or other filtered signals. In addition to the ElectricalSeries class, NWB provides some more classes for storing event-based electropysiological data. We will briefly discuss them here, and refer the reader to the API documentation and the section on Extracellular Physiology in the "NWB Format Specification" for more details on using these objects.
For storing unsorted spiking data, there are two options. Which one you choose depends on what data you have available. If you need to store complete and/or continuous raw voltage traces, you should store the traces with ElectricalSeries objects as acquisition data, and use the EventDetection class for identifying the spike events in your raw traces. If you do not want to store the entire raw voltage traces, only the waveform ‘snippets’ surrounding spike events, you should use SpikeEventSeries objects.
The results of spike sorting (or clustering) should be stored in the top-level Units table. The Units table can hold just the spike times of sorted units or, optionally, include additional waveform information. You can use the optional predefined columns waveform_mean, waveform_sd, and waveforms in the Units table to store individual and mean waveform data.

Writing the NWB File

nwbExport(nwb, 'ecephys_tutorial.nwb')

Reading NWB Data

Data arrays are read passively from the file. Calling TimeSeries.data does not read the data values, but presents an HDF5 object that can be indexed to read data. This allows you to conveniently work with datasets that are too large to fit in RAM all at once. load with no input arguments reads the entire dataset:
nwb2 = nwbRead('ecephys_tutorial.nwb', 'ignorecache');
nwb2.processing.get('ecephys'). ...
nwbdatainterface.get('LFP'). ...
electricalseries.get('ElectricalSeries'). ...
data.load;

Accessing Data Regions

If all you need is a data region, you can index a DataStub object like you would any normal array in MATLAB, as shown below. When indexing the dataset this way, only the selected region is read from disk into RAM. This allows you to handle very large datasets that would not fit entirely into RAM.
% read section of LFP
nwb2.processing.get('ecephys'). ...
nwbdatainterface.get('LFP'). ...
electricalseries.get('ElectricalSeries'). ...
data(1:5, 1:10)
ans = 5×10
-3.5690 0.9190 1.1629 0.1770 -1.1149 -0.0227 1.7513 0.0439 0.3346 -0.6137 - -0.8413 1.7329 0.4917 0.5800 -2.9249 0.5906 0.0099 0.5686 1.5835 -0.2693 - -0.0354 0.5030 -0.1769 0.9896 -0.5627 -2.1664 0.5403 -0.6677 0.4199 -0.2109 - 0.7809 -0.3773 -0.7599 -0.6833 -1.2777 -1.6061 0.4971 0.1317 0.0585 -0.8233 - 0.2562 0.5045 0.0044 0.2529 0.9348 2.2901 -0.0942 0.2287 -0.5769 -0.1247 -
 
% You can use the getRow method of the table to load spike times of a specific unit.
% To get the values, unpack from the returned table.
nwb.units.getRow(1).spike_times{1}
ans = 18×1
0.0591 - 0.2652 - 0.3253 - 0.6815 - 0.7179 - 0.4488 - 0.0672 - 0.6978 - 0.4125 - 0.7288 -

Learn more!

See the API documentation to learn what data types are available.

MATLAB tutorials

Python tutorials

See our tutorials for more details about your data type:
Check out other tutorials that teach advanced NWB topics:
-
- -
\ No newline at end of file diff --git a/tutorials/html/images.html b/tutorials/html/images.html deleted file mode 100644 index bbdaecdf..00000000 --- a/tutorials/html/images.html +++ /dev/null @@ -1,371 +0,0 @@ - -Storing Image Data in NWB

Storing Image Data in NWB

Image data can be a collection of individual images or movie segments (as a movie is simply a series of images), about the subject, the environment, the presented stimuli, or other parts related to the experiment. This tutorial focuses in particular on the usage of:

Create an NWB File

nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'timestamps_reference_time', datetime(2018, 4, 25, 3, 0, 45, 'TimeZone', 'local'), ...
'general_experimenter', 'LastName, FirstName', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', 'DOI:10.1016/j.neuron.2016.12.011' ... % optional
);
nwb
nwb =
NwbFile with properties: - - nwb_version: '2.7.0' - file_create_date: [] - identifier: 'Mouse5_Day3' - session_description: 'mouse in open exploration' - session_start_time: {[2018-04-25T02:30:03.000000+02:00]} - timestamps_reference_time: {[2018-04-25T03:00:45.000000+02:00]} - acquisition: [0×1 types.untyped.Set] - analysis: [0×1 types.untyped.Set] - general: [0×1 types.untyped.Set] - general_data_collection: '' - general_devices: [0×1 types.untyped.Set] - general_experiment_description: '' - general_experimenter: 'LastName, FirstName' - general_extracellular_ephys: [0×1 types.untyped.Set] - general_extracellular_ephys_electrodes: [] - general_institution: 'University of My Institution' - general_intracellular_ephys: [0×1 types.untyped.Set] - general_intracellular_ephys_experimental_conditions: [] - general_intracellular_ephys_filtering: '' - general_intracellular_ephys_intracellular_recordings: [] - general_intracellular_ephys_repetitions: [] - general_intracellular_ephys_sequential_recordings: [] - general_intracellular_ephys_simultaneous_recordings: [] - general_intracellular_ephys_sweep_table: [] - general_keywords: '' - general_lab: '' - general_notes: '' - general_optogenetics: [0×1 types.untyped.Set] - general_optophysiology: [0×1 types.untyped.Set] - general_pharmacology: '' - general_protocol: '' - general_related_publications: 'DOI:10.1016/j.neuron.2016.12.011' - general_session_id: 'session_1234' - general_slices: '' - general_source_script: '' - general_source_script_file_name: '' - general_stimulus: '' - general_subject: [] - general_surgery: '' - general_virus: '' - intervals: [0×1 types.untyped.Set] - intervals_epochs: [] - intervals_invalid_times: [] - intervals_trials: [] - processing: [0×1 types.untyped.Set] - scratch: [0×1 types.untyped.Set] - stimulus_presentation: [0×1 types.untyped.Set] - stimulus_templates: [0×1 types.untyped.Set] - units: [] -

OpticalSeries: Storing series of images as stimuli

OpticalSeries is for time series of images that were presented to the subject as stimuli. We will create an OpticalSeries object with the name "StimulusPresentation" representing what images were shown to the subject and at what times.
Image data can be stored either in the HDF5 file or as an external image file. For this tutorial, we will use fake image data with shape of ('time', 'x', 'y', 'RGB') = (200, 50, 50, 3). As in all TimeSeries, the first dimension is time. The second and third dimensions represent x and y. The fourth dimension represents the RGB value (length of 3) for color images. Please note: As described in the dimensionMapNoDataPipes tutorial, when a MATLAB array is exported to HDF5, the array is transposed. Therefore, in order to correctly export the data, we will need to create a transposed array, where the dimensions are in reverse order compared to the type specification.
NWB differentiates between acquired data and data that was presented as stimulus. We can add it to the NWBFile object as stimulus data.
If the sampling rate is constant, use rate and starting_time to specify time. For irregularly sampled recordings, use timestamps to specify time for each sample image.
image_data = randi(255, [3, 50, 50, 200]); % NB: Array is transposed
optical_series = types.core.OpticalSeries( ...
'distance', 0.7, ... % required
'field_of_view', [0.2, 0.3, 0.7], ... % required
'orientation', 'lower left', ... % required
'data', image_data, ...
'data_unit', 'n.a.', ...
'starting_time_rate', 1.0, ...
'starting_time', 0.0, ...
'description', 'The images presented to the subject as stimuli' ...
);
 
nwb.stimulus_presentation.set('StimulusPresentation', optical_series);

AbstractFeatureSeries: Storing features of visual stimuli

While it is usually recommended to store the entire image data as an OpticalSeries, sometimes it is useful to store features of the visual stimuli instead of or in addition to the raw image data. For example, you may want to store the mean luminance of the image, the contrast, or the spatial frequency. This can be done using an instance of AbstractFeatureSeries. This class is a general container for storing time series of features that are derived from the raw image data.
% Create some fake feature data
feature_data = rand(3, 200); % 200 time points, 3 features
 
% Create an AbstractFeatureSeries object
abstract_feature_series = types.core.AbstractFeatureSeries( ...
'data', feature_data, ...
'timestamps', linspace(0, 1, 200), ...
'description', 'Features of the visual stimuli', ...
'features', {'luminance', 'contrast', 'spatial frequency'}, ...
'feature_units', {'n.a.', 'n.a.', 'cycles/degree'} ...
);
% Add the AbstractFeatureSeries to the NWBFile
nwb.stimulus_presentation.set('StimulusFeatures', abstract_feature_series);

ImageSeries: Storing series of images as acquisition

ImageSeries is a general container for time series of images acquired during the experiment. Image data can be stored either in the HDF5 file or as an external image file. When color images are stored in the HDF5 file the color channel order is expected to be RGB.
image_data = randi(255, [3, 50, 50, 200]);
behavior_images = types.core.ImageSeries( ...
'data', image_data, ...
'description', 'Image data of an animal in environment', ...
'data_unit', 'n.a.', ...
'starting_time_rate', 1.0, ...
'starting_time', 0.0 ...
);
 
nwb.acquisition.set('ImageSeries', behavior_images);

External Files

External files (e.g. video files of the behaving animal) can be added to the NWBFile by creating an ImageSeries object using the external_file attribute that specifies the path to the external file(s) on disk. The file(s) path must be relative to the path of the NWB file. Either external_file or data must be specified, but not both. external_file can be a cell array of multiple video files.
The starting_frame attribute serves as an index to indicate the starting frame of each external file, allowing you to skip the beginning of videos.
external_files = {'video1.pmp4', 'video2.pmp4'};
 
timestamps = [0.0, 0.04, 0.07, 0.1, 0.14, 0.16, 0.21];
behavior_external_file = types.core.ImageSeries( ...
'description', 'Behavior video of animal moving in environment', ...
'data_unit', 'n.a.', ...
'external_file', external_files, ...
'format', 'external', ...
'external_file_starting_frame', [0, 2, 4], ...
'timestamps', timestamps ...
);
 
nwb.acquisition.set('ExternalVideos', behavior_external_file);

Static Images

Static images can be stored in an NWBFile object by creating an RGBAImage, RGBImage or GrayscaleImage object with the image data. All of these image types provide an optional description parameter to include text description about the image and the resolution parameter to specify the pixels/cm resolution of the image.

RGBAImage: for color images with transparency

RGBAImage is for storing data of color image with transparency. data must be 3D where the first and second dimensions represent x and y. The third dimension has length 4 and represents the RGBA value.
image_data = randi(255, [4, 200, 200]);
 
rgba_image = types.core.RGBAImage( ...
'data', image_data, ... % required
'resolution', 70.0, ...
'description', 'RGBA image' ...
);

RGBImage: for color images

RGBImage is for storing data of RGB color image. data must be 3D where the first and second dimensions represent x and y. The third dimension has length 3 and represents the RGB value.
image_data = randi(255, [3, 200, 200]);
 
rgb_image = types.core.RGBImage( ...
'data', image_data, ... % required
'resolution', 70.0, ...
'description', 'RGB image' ...
);

GrayscaleImage: for grayscale images

GrayscaleImage is for storing grayscale image data. data must be 2D where the first and second dimensions represent x and y.
image_data = randi(255, [200, 200]);
 
grayscale_image = types.core.GrayscaleImage( ...
'data', image_data, ... % required
'resolution', 70.0, ...
'description', 'Grayscale image' ...
);

Images: a container for images

Add the images to an Images container that accepts any of these image types.
image_collection = types.core.Images( ...
'description', 'A collection of logo images presented to the subject.'...
);
 
image_collection.image.set('rgba_image', rgba_image);
image_collection.image.set('rgb_image', rgb_image);
image_collection.image.set('grayscale_image', grayscale_image);
 
nwb.acquisition.set('image_collection', image_collection);

Index Series for Repeated Images

You may want to set up a time series of images where some images are repeated many times. You could create an ImageSeries that repeats the data each time the image is shown, but that would be inefficient, because it would store the same data multiple times. A better solution would be to store the unique images once and reference those images. This is how IndexSeries works. First, create an Images container with the order of images defined using an ImageReferences. Then create an IndexSeries that indexes into the Images.
rgbImage = imread('street2.jpg');
grayImage = uint8(sum(double(rgbImage), 3) ./ double(max(max(max(rgbImage)))));
GsStreet = types.core.GrayscaleImage(...
'data', grayImage, ...
'description', 'grayscale image of a street.', ...
'resolution', 28 ...
);
 
RgbStreet = types.core.RGBImage( ...
'data', rgbImage, ...
'resolution', 28, ...
'description', 'RGB Street' ...
);
 
ImageOrder = types.core.ImageReferences(...
'data', [types.untyped.ObjectView(RgbStreet), types.untyped.ObjectView(GsStreet)] ...
);
Images = types.core.Images( ...
'gs_face', GsStreet, ...
'rgb_face', RgbStreet, ...
'description', 'A collection of streets.', ...
'order_of_images', ImageOrder ...
);
 
types.core.IndexSeries(...
'data', [0, 1, 0, 1], ... % NOTE: 0-indexed
'indexed_images', Images, ...
'timestamps', [0.1, 0.2, 0.3, 0.4] ...
)
ans =
IndexSeries with properties: - - indexed_images: [1×1 types.core.Images] - indexed_timeseries: [] - starting_time_unit: 'seconds' - timestamps_interval: 1 - timestamps_unit: 'seconds' - data: [0 1 0 1] - comments: 'no comments' - control: [] - control_description: '' - data_continuity: '' - data_conversion: [] - data_offset: [] - data_resolution: [] - data_unit: 'N/A' - description: 'no description' - starting_time: [] - starting_time_rate: [] - timestamps: [0.1000 0.2000 0.3000 0.4000] -
Here data contains the (0-indexed) index of the displayed image as they are ordered in the ImageReference.

Writing the images to an NWB File

Now use nwbExport to write the file.
nwbExport(nwb, "images_test.nwb");
-
- -
\ No newline at end of file diff --git a/tutorials/html/intro.html b/tutorials/html/intro.html deleted file mode 100644 index b73348b8..00000000 --- a/tutorials/html/intro.html +++ /dev/null @@ -1,511 +0,0 @@ - -Introduction to MatNWB

Introduction to MatNWB

Installing MatNWB

Use the code below within the brackets to install MatNWB from source. MatNWB works by automatically creating API classes based on the schema.
%{
!git clone https://github.com/NeurodataWithoutBorders/matnwb.git
addpath(genpath(pwd));
%}

Set up the NWB File

An NWB file represents a single session of an experiment. Each file must have a session_description, identifier, and session start time. Create a new NWBFile object with those and additional metadata using the NwbFile command. For all MatNWB classes and functions, we use the Matlab method of entering keyword argument pairs, where arguments are entered as name followed by value. Ellipses are used for clarity.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'general_experimenter', 'Last, First', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', {'DOI:10.1016/j.neuron.2016.12.011'}); % optional
nwb
nwb =
NwbFile with properties: - - nwb_version: '2.7.0' - file_create_date: [] - identifier: 'Mouse5_Day3' - session_description: 'mouse in open exploration' - session_start_time: {[2018-04-25T02:30:03.000000+02:00]} - timestamps_reference_time: [] - acquisition: [0×1 types.untyped.Set] - analysis: [0×1 types.untyped.Set] - general: [0×1 types.untyped.Set] - general_data_collection: '' - general_devices: [0×1 types.untyped.Set] - general_experiment_description: '' - general_experimenter: 'Last, First' - general_extracellular_ephys: [0×1 types.untyped.Set] - general_extracellular_ephys_electrodes: [] - general_institution: 'University of My Institution' - general_intracellular_ephys: [0×1 types.untyped.Set] - general_intracellular_ephys_experimental_conditions: [] - general_intracellular_ephys_filtering: '' - general_intracellular_ephys_intracellular_recordings: [] - general_intracellular_ephys_repetitions: [] - general_intracellular_ephys_sequential_recordings: [] - general_intracellular_ephys_simultaneous_recordings: [] - general_intracellular_ephys_sweep_table: [] - general_keywords: '' - general_lab: '' - general_notes: '' - general_optogenetics: [0×1 types.untyped.Set] - general_optophysiology: [0×1 types.untyped.Set] - general_pharmacology: '' - general_protocol: '' - general_related_publications: {'DOI:10.1016/j.neuron.2016.12.011'} - general_session_id: 'session_1234' - general_slices: '' - general_source_script: '' - general_source_script_file_name: '' - general_stimulus: '' - general_subject: [] - general_surgery: '' - general_virus: '' - intervals: [0×1 types.untyped.Set] - intervals_epochs: [] - intervals_invalid_times: [] - intervals_trials: [] - processing: [0×1 types.untyped.Set] - scratch: [0×1 types.untyped.Set] - stimulus_presentation: [0×1 types.untyped.Set] - stimulus_templates: [0×1 types.untyped.Set] - units: [] - -Warning: The following required properties are missing for instance for type "NwbFile": - timestamps_reference_time

Subject Information

You can also provide information about your subject in the NWB file. Create a Subject object to store information such as age, species, genotype, sex, and a freeform description. Then set nwb.general_subject to the Subject object.
Each of these fields is free-form, so any values will be valid, but here are our recommendations:
  • For age, we recommend using the ISO 8601 Duration format
  • For species, we recommend using the formal latin binomal name (e.g. mouse -> Mus musculus, human -> Homo sapiens)
  • For sex, we recommend using F (female), M (male), U (unknown), and O (other)
subject = types.core.Subject( ...
'subject_id', '001', ...
'age', 'P90D', ...
'description', 'mouse 5', ...
'species', 'Mus musculus', ...
'sex', 'M' ...
);
nwb.general_subject = subject;
 
subject
subject =
Subject with properties: - - age: 'P90D' - age_reference: 'birth' - date_of_birth: [] - description: 'mouse 5' - genotype: '' - sex: 'M' - species: 'Mus musculus' - strain: '' - subject_id: '001' - weight: '' -
Note: the DANDI archive requires all NWB files to have a subject object with subject_id specified, and strongly encourages specifying the other fields.

Time Series Data

TimeSeries is a common base class for measurements sampled over time, and provides fields for data and timestamps (regularly or irregularly sampled). You will also need to supply the name and unit of measurement (SI unit).
For instance, we can store a TimeSeries data where recording started 0.0 seconds after start_time and sampled every second (1 Hz):
time_series_with_rate = types.core.TimeSeries( ...
'description', 'an example time series', ...
'data', linspace(0, 100, 10), ...
'data_unit', 'm', ...
'starting_time', 0.0, ...
'starting_time_rate', 1.0);
For irregularly sampled recordings, we need to provide the timestamps for the data:
time_series_with_timestamps = types.core.TimeSeries( ...
'description', 'an example time series', ...
'data', linspace(0, 100, 10), ...
'data_unit', 'm', ...
'timestamps', linspace(0, 1, 10));
The TimeSeries class serves as the foundation for all other time series types in the NWB format. Several specialized subclasses extend the functionality of TimeSeries, each tailored to handle specific kinds of data. In the next section, we’ll explore one of these specialized types. For a full overview, please check out the type hierarchy in the NWB schema documentation.

Other Types of Time Series

As mentioned previously, there are many subtypes of TimeSeries in MatNWB that are used to store different kinds of data. One example is AnnotationSeries, a subclass of TimeSeries that stores text-based records about the experiment. Similar to our TimeSeries example above, we can create an AnnotationSeries object with text information about a stimulus and add it to the stimulus_presentation group in the NWBFile. Below is an example where we create an AnnotationSeries object with annotations for airpuff stimuli and add it to the NWBFile.
% Create an AnnotationSeries object with annotations for airpuff stimuli
annotations = types.core.AnnotationSeries( ...
'description', 'Airpuff events delivered to the animal', ...
'data', {'Left Airpuff', 'Right Airpuff', 'Right Airpuff'}, ...
'timestamps', [1.0, 3.0, 8.0] ...
);
 
% Add the AnnotationSeries to the NWBFile's stimulus group
nwb.stimulus_presentation.set('Airpuffs', annotations)
ans =
Set with properties: - - Airpuffs: [types.core.AnnotationSeries] -

Behavior

SpatialSeries and Position

Many types of data have special data types in NWB. To store the spatial position of a subject, we will use the SpatialSeries and Position classes.
Note: These diagrams follow a standard convention called "UML class diagram" to express the object-oriented relationships between NWB classes. For our purposes, all you need to know is that an open triangle means "extends" (i.e., is a specialized subtype of), and an open diamond means "is contained within." Learn more about class diagrams on the wikipedia page.
SpatialSeries is a subclass of TimeSeries, a common base class for measurements sampled over time, and provides fields for data and time (regularly or irregularly sampled). Here, we put a SpatialSeries object called 'SpatialSeries' in a Position object. If the data is sampled at a regular interval, it is recommended to specify the starting_time and the sampling rate (starting_time_rate), although it is still possible to specify timestamps as in the time_series_with_timestamps example above.
% create SpatialSeries object
spatial_series_ts = types.core.SpatialSeries( ...
'data', [linspace(0,10,100); linspace(0,8,100)], ...
'reference_frame', '(0,0) is bottom left corner', ...
'starting_time', 0, ...
'starting_time_rate', 200 ...
);
 
% create Position object and add SpatialSeries
position = types.core.Position('SpatialSeries', spatial_series_ts);
NWB differentiates between raw, acquired data, which should never change, and processed data, which are the results of preprocessing algorithms and could change. Let's assume that the animal's position was computed from a video tracking algorithm, so it would be classified as processed data. Since processed data can be very diverse, NWB allows us to create processing modules, which are like folders, to store related processed data or data that comes from a single algorithm.
Create a processing module called "behavior" for storing behavioral data in the NWBFile and add the Position object to the module.
% create processing module
behavior_module = types.core.ProcessingModule('description', 'contains behavioral data');
 
% add the Position object (that holds the SpatialSeries object) to the module
% and name the Position object "Position"
behavior_module.nwbdatainterface.set('Position', position);
 
% add the processing module to the NWBFile object, and name the processing module "behavior"
nwb.processing.set('behavior', behavior_module);

Trials

Trials are stored in a TimeIntervals object which is a subclass of DynamicTable. DynamicTable objects are used to store tabular metadata throughout NWB, including for trials, electrodes, and sorted units. They offer flexibility for tabular data by allowing required columns, optional columns, and custom columns.
The trials DynamicTable can be thought of as a table with this structure:
Trials are stored in a TimeIntervals object which subclasses DynamicTable. Here, we are adding 'correct', which will be a logical array.
trials = types.core.TimeIntervals( ...
'colnames', {'start_time', 'stop_time', 'correct'}, ...
'description', 'trial data and properties');
 
trials.addRow('start_time', 0.1, 'stop_time', 1.0, 'correct', false)
trials.addRow('start_time', 1.5, 'stop_time', 2.0, 'correct', true)
trials.addRow('start_time', 2.5, 'stop_time', 3.0, 'correct', false)
 
trials.toTable() % visualize the table
ans = 3×4 table
 idstart_timestop_timecorrect
100.100010
211.500021
322.500030
nwb.intervals_trials = trials;
 
% If you have multiple trials tables, you will need to use custom names for
% each one:
nwb.intervals.set('custom_intervals_table_name', trials);

Write

Now, to write the NWB file that we have built so far:
nwbExport(nwb, 'intro_tutorial.nwb')
We can use the HDFView application to inspect the resulting NWB file.

Read

We can then read the file back in using MatNWB and inspect its contents.
read_nwbfile = nwbRead('intro_tutorial.nwb', 'ignorecache')
read_nwbfile =
NwbFile with properties: - - nwb_version: '2.7.0' - file_create_date: [1×1 types.untyped.DataStub] - identifier: 'Mouse5_Day3' - session_description: 'mouse in open exploration' - session_start_time: [1×1 types.untyped.DataStub] - timestamps_reference_time: [1×1 types.untyped.DataStub] - acquisition: [0×1 types.untyped.Set] - analysis: [0×1 types.untyped.Set] - general: [0×1 types.untyped.Set] - general_data_collection: '' - general_devices: [0×1 types.untyped.Set] - general_experiment_description: '' - general_experimenter: [1×1 types.untyped.DataStub] - general_extracellular_ephys: [0×1 types.untyped.Set] - general_extracellular_ephys_electrodes: [] - general_institution: 'University of My Institution' - general_intracellular_ephys: [0×1 types.untyped.Set] - general_intracellular_ephys_experimental_conditions: [] - general_intracellular_ephys_filtering: '' - general_intracellular_ephys_intracellular_recordings: [] - general_intracellular_ephys_repetitions: [] - general_intracellular_ephys_sequential_recordings: [] - general_intracellular_ephys_simultaneous_recordings: [] - general_intracellular_ephys_sweep_table: [] - general_keywords: '' - general_lab: '' - general_notes: '' - general_optogenetics: [0×1 types.untyped.Set] - general_optophysiology: [0×1 types.untyped.Set] - general_pharmacology: '' - general_protocol: '' - general_related_publications: [1×1 types.untyped.DataStub] - general_session_id: 'session_1234' - general_slices: '' - general_source_script: '' - general_source_script_file_name: '' - general_stimulus: '' - general_subject: [1×1 types.core.Subject] - general_surgery: '' - general_virus: '' - intervals: [1×1 types.untyped.Set] - intervals_epochs: [] - intervals_invalid_times: [] - intervals_trials: [1×1 types.core.TimeIntervals] - processing: [1×1 types.untyped.Set] - scratch: [0×1 types.untyped.Set] - stimulus_presentation: [1×1 types.untyped.Set] - stimulus_templates: [0×1 types.untyped.Set] - units: [] -
We can print the SpatialSeries data traversing the hierarchy of objects. The processing module called 'behavior' contains our Position object named 'Position'. The Position object contains our SpatialSeries object named 'SpatialSeries'.
read_spatial_series = read_nwbfile.processing.get('behavior'). ...
nwbdatainterface.get('Position').spatialseries.get('SpatialSeries')
read_spatial_series =
SpatialSeries with properties: - - reference_frame: '(0,0) is bottom left corner' - starting_time_unit: 'seconds' - timestamps_interval: 1 - timestamps_unit: 'seconds' - data: [1×1 types.untyped.DataStub] - comments: 'no comments' - control: [] - control_description: '' - data_continuity: '' - data_conversion: 1 - data_offset: 0 - data_resolution: -1 - data_unit: 'meters' - description: 'no description' - starting_time: 0 - starting_time_rate: 200 - timestamps: [] -

Reading Data

Counter to normal MATLAB workflow, data arrays are read passively from the file. Calling read_spatial_series.data does not read the data values, but presents a DataStub object that can be indexed to read data.
read_spatial_series.data
ans =
DataStub with properties: - - filename: 'intro_tutorial.nwb' - path: '/processing/behavior/Position/SpatialSeries/data' - dims: [2 100] - ndims: 2 - dataType: 'double' -
This allows you to conveniently work with datasets that are too large to fit in RAM all at once. Access all the data in the matrix using the load method with no arguments.
read_spatial_series.data.load
ans = 2×100
0 0.1010 0.2020 0.3030 0.4040 0.5051 0.6061 0.7071 0.8081 0.9091 1.0101 1.1111 1.2121 1.3131 1.4141 1.5152 1.6162 1.7172 1.8182 1.9192 2.0202 2.1212 2.2222 2.3232 2.4242 2.5253 2.6263 2.7273 2.8283 2.9293 3.0303 3.1313 3.2323 3.3333 3.4343 3.5354 3.6364 3.7374 3.8384 3.9394 4.0404 4.1414 4.2424 4.3434 4.4444 4.5455 4.6465 4.7475 4.8485 4.9495 - 0 0.0808 0.1616 0.2424 0.3232 0.4040 0.4848 0.5657 0.6465 0.7273 0.8081 0.8889 0.9697 1.0505 1.1313 1.2121 1.2929 1.3737 1.4545 1.5354 1.6162 1.6970 1.7778 1.8586 1.9394 2.0202 2.1010 2.1818 2.2626 2.3434 2.4242 2.5051 2.5859 2.6667 2.7475 2.8283 2.9091 2.9899 3.0707 3.1515 3.2323 3.3131 3.3939 3.4747 3.5556 3.6364 3.7172 3.7980 3.8788 3.9596 -
If you only need a section of the data, you can read only that section by indexing the DataStub object like a normal array in MATLAB. This will just read the selected region from disk into RAM. This technique is particularly useful if you are dealing with a large dataset that is too big to fit entirely into your available RAM.
read_spatial_series.data(:, 1:10)
ans = 2×10
0 0.1010 0.2020 0.3030 0.4040 0.5051 0.6061 0.7071 0.8081 0.9091 - 0 0.0808 0.1616 0.2424 0.3232 0.4040 0.4848 0.5657 0.6465 0.7273 -

Next Steps

This concludes the introductory tutorial. Please proceed to one of the specialized tutorials, which are designed to follow this one.
See the API documentation to learn what data types are available.
-
- -
\ No newline at end of file diff --git a/tutorials/html/ophys.html b/tutorials/html/ophys.html deleted file mode 100644 index bbf83ab4..00000000 --- a/tutorials/html/ophys.html +++ /dev/null @@ -1,564 +0,0 @@ - -MatNWB Optical Physiology Tutorial

MatNWB Optical Physiology Tutorial

Introduction

In this tutorial, we will create fake data for a hypothetical optical physiology experiment with a freely moving animal. The types of data we will convert are:
  • Acquired two-photon images
  • Image segmentation (ROIs)
  • Fluorescence and dF/F response
It is recommended to first work through the Introduction to MatNWB tutorial, which demonstrates installing MatNWB and creating an NWB file with subject information, animal position, and trials, as well as writing and reading NWB files in MATLAB.
Please note: The dimensions of timeseries data in MatNWB should be defined in the opposite order of how it is defined in the nwb-schemas. In NWB, time is always stored in the first dimension of the data, whereas in MatNWB data should be specified with time along the last dimension. This is explained in more detail here: MatNWB <-> HDF5 Dimension Mapping.

Set up the NWB file

An NWB file represents a single session of an experiment. Each file must have a session_description, identifier, and session start time. Create a new NWBFile object with those and additional metadata. For all MatNWB functions, we use the Matlab method of entering keyword argument pairs, where arguments are entered as name followed by value.
nwb = NwbFile( ...
'session_description', 'mouse in open exploration',...
'identifier', 'Mouse5_Day3', ...
'session_start_time', datetime(2018, 4, 25, 2, 30, 3, 'TimeZone', 'local'), ...
'timestamps_reference_time', datetime(2018, 4, 25, 3, 0, 45, 'TimeZone', 'local'), ...
'general_experimenter', 'LastName, FirstName', ... % optional
'general_session_id', 'session_1234', ... % optional
'general_institution', 'University of My Institution', ... % optional
'general_related_publications', {'DOI:10.1016/j.neuron.2016.12.011'}); % optional
nwb
nwb =
NwbFile with properties: - - nwb_version: '2.8.0' - file_create_date: [] - identifier: 'Mouse5_Day3' - session_description: 'mouse in open exploration' - session_start_time: {[2018-04-25T02:30:03.000000+02:00]} - timestamps_reference_time: {[2018-04-25T03:00:45.000000+02:00]} - acquisition: [0×1 types.untyped.Set] - analysis: [0×1 types.untyped.Set] - general: [0×1 types.untyped.Set] - general_data_collection: '' - general_devices: [0×1 types.untyped.Set] - general_experiment_description: '' - general_experimenter: 'LastName, FirstName' - general_extracellular_ephys: [0×1 types.untyped.Set] - general_extracellular_ephys_electrodes: [] - general_institution: 'University of My Institution' - general_intracellular_ephys: [0×1 types.untyped.Set] - general_intracellular_ephys_experimental_conditions: [] - general_intracellular_ephys_filtering: '' - general_intracellular_ephys_intracellular_recordings: [] - general_intracellular_ephys_repetitions: [] - general_intracellular_ephys_sequential_recordings: [] - general_intracellular_ephys_simultaneous_recordings: [] - general_intracellular_ephys_sweep_table: [] - general_keywords: '' - general_lab: '' - general_notes: '' - general_optogenetics: [0×1 types.untyped.Set] - general_optophysiology: [0×1 types.untyped.Set] - general_pharmacology: '' - general_protocol: '' - general_related_publications: {'DOI:10.1016/j.neuron.2016.12.011'} - general_session_id: 'session_1234' - general_slices: '' - general_source_script: '' - general_source_script_file_name: '' - general_stimulus: '' - general_subject: [] - general_surgery: '' - general_virus: '' - general_was_generated_by: '' - intervals: [0×1 types.untyped.Set] - intervals_epochs: [] - intervals_invalid_times: [] - intervals_trials: [] - processing: [0×1 types.untyped.Set] - scratch: [0×1 types.untyped.Set] - stimulus_presentation: [0×1 types.untyped.Set] - stimulus_templates: [0×1 types.untyped.Set] - units: [] -

Optical Physiology

Optical physiology results are written in four steps:
  1. Create imaging plane
  2. Acquired two-photon images
  3. Image segmentation
  4. Fluorescence and dF/F responses

Imaging Plane

First, you must create an ImagingPlane object, which will hold information about the area and method used to collect the optical imaging data. This requires creation of a Device object for the microscope and an OpticalChannel object. Then you can create an ImagingPlane.
Create a Device representing a two-photon microscope. The fields description, manufacturer, model_number, model_name, and serial_number are optional, but recommended. Then create an OpticalChannel and add both of these to the ImagingPlane.
device = types.core.Device( ...
'description', 'My two-photon microscope', ...
'manufacturer', 'Loki Labs', ...
'model_number', 'ABC-123', ...
'model_name', 'Loki 1.0', ...
'serial_number', '1234567890');
 
% Add device to nwb object
nwb.general_devices.set('Device', device);
 
optical_channel = types.core.OpticalChannel( ...
'description', 'description', ...
'emission_lambda', 500.);
 
imaging_plane_name = 'imaging_plane';
imaging_plane = types.core.ImagingPlane( ...
'optical_channel', optical_channel, ...
'description', 'a very interesting part of the brain', ...
'device', types.untyped.SoftLink(device), ...
'excitation_lambda', 600., ...
'imaging_rate', 5., ...
'indicator', 'GFP', ...
'location', 'my favorite brain location');
 
nwb.general_optophysiology.set(imaging_plane_name, imaging_plane);

Storing Two-Photon Data

You can create a TwoPhotonSeries class representing two photon imaging data. TwoPhotonSeries, like SpatialSeries, inherits from TimeSeries and is similar in behavior to OnePhotonSeries.
InternalTwoPhoton = types.core.TwoPhotonSeries( ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'starting_time', 0.0, ...
'starting_time_rate', 3.0, ...
'data', ones(200, 100, 1000), ...
'data_unit', 'lumens');
 
nwb.acquisition.set('2pInternal', InternalTwoPhoton);

Storing One-Photon Data

Now that we have our ImagingPlane, we can create a OnePhotonSeries object to store raw one-photon imaging data.
% using internal data. this data will be stored inside the NWB file
InternalOnePhoton = types.core.OnePhotonSeries( ...
'data', ones(100, 100, 1000), ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'starting_time', 0., ...
'starting_time_rate', 1.0, ...
'data_unit', 'normalized amplitude' ...
);
nwb.acquisition.set('1pInternal', InternalOnePhoton);

Motion Correction (optional)

You can also store the result of motion correction using a MotionCorrection object, a container type that can hold one or more CorrectedImageStack objects.
% Create the corrected ImageSeries
corrected = types.core.ImageSeries( ...
'description', 'A motion corrected image stack', ...
'data', ones(100, 100, 1000), ... % 3D data array
'data_unit', 'n/a', ...
'format', 'raw', ...
'starting_time', 0.0, ...
'starting_time_rate', 1.0 ...
);
 
% Create the xy_translation TimeSeries
xy_translation = types.core.TimeSeries( ...
'description', 'x,y translation in pixels', ...
'data', ones(2, 1000), ... % 2D data array
'data_unit', 'pixels', ...
'starting_time', 0.0, ...
'starting_time_rate', 1.0 ...
);
 
% Create the CorrectedImageStack
corrected_image_stack = types.core.CorrectedImageStack( ...
'corrected', corrected, ...
'original', types.untyped.SoftLink(InternalOnePhoton), ... % Ensure `InternalOnePhoton` exists
'xy_translation', xy_translation ...
);
 
% Create the MotionCorrection object
motion_correction = types.core.MotionCorrection();
motion_correction.correctedimagestack.set('CorrectedImageStack', corrected_image_stack);
The motion corrected data is considered processed data and will be added to the processing field of the nwb object using a ProcessingModule called "ophys". First, create the ProcessingModule object and then add the motion_correction object to it, naming it "MotionCorrection".
ophys_module = types.core.ProcessingModule( ...
'description', 'Contains optical physiology data');
ophys_module.nwbdatainterface.set('MotionCorrection', motion_correction);
Finally, add the "ophys" ProcessingModule to the nwb (Note that we can continue adding objects to the "ophys" ProcessingModule without needing to explicitly update the nwb):
nwb.processing.set('ophys', ophys_module);

Plane Segmentation

Image segmentation stores the detected regions of interest in the TwoPhotonSeries data. ImageSegmentation allows you to have more than one segmentation by creating more PlaneSegmentation objects.

Regions of interest (ROIs)

ROIs can be added to a PlaneSegmentation either as an image_mask or as a pixel_mask. An image mask is an array that is the same size as a single frame of the TwoPhotonSeries, and indicates where a single region of interest is. This image mask may be boolean or continuous between 0 and 1. A pixel_mask, on the other hand, is a list of indices (i.e coordinates) and weights for the ROI. The pixel_mask is represented as a compound data type using a ragged array and below is an example demonstrating how to create either an image_mask or a pixel_mask. Changing the dropdown selection will update the PlaneSegmentation object accordingly.
selection = "Create Image Mask"; % "Create Image Mask" or "Create Pixel Mask"
 
% generate fake image_mask data
imaging_shape = [100, 100];
x = imaging_shape(1);
y = imaging_shape(2);
 
n_rois = 20;
image_mask = zeros(y, x, n_rois);
center = randi(90,2,n_rois);
for i = 1:n_rois
image_mask(center(1,i):center(1,i)+10, center(2,i):center(2,i)+10, i) = 1;
end
 
if selection == "Create Pixel Mask"
ind = find(image_mask);
[y_ind, x_ind, roi_ind] = ind2sub(size(image_mask), ind);
 
pixel_mask_struct = struct();
pixel_mask_struct.x = uint32(x_ind); % Add x coordinates to struct field x
pixel_mask_struct.y = uint32(y_ind); % Add y coordinates to struct field y
pixel_mask_struct.weight = single(ones(size(x_ind)));
% Create pixel mask vector data
pixel_mask = types.hdmf_common.VectorData(...
'data', struct2table(pixel_mask_struct), ...
'description', 'pixel masks');
 
% When creating a pixel mask, it is also necessary to specify a
% pixel_mask_index vector. See the documentation for ragged arrays linked
% above to learn more.
num_pixels_per_roi = zeros(n_rois, 1); % Column vector
for i_roi = 1:n_rois
num_pixels_per_roi(i_roi) = sum(roi_ind == i_roi);
end
 
pixel_mask_index = uint16(cumsum(num_pixels_per_roi)); % Note: Use an integer
% type that can accommodate the maximum value of the cumulative sum
 
% Create pixel_mask_index vector
pixel_mask_index = types.hdmf_common.VectorIndex(...
'description', 'Index into pixel_mask VectorData', ...
'data', pixel_mask_index, ...
'target', types.untyped.ObjectView(pixel_mask) );
 
plane_segmentation = types.core.PlaneSegmentation( ...
'colnames', {'pixel_mask'}, ...
'description', 'roi pixel position (x,y) and pixel weight', ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'pixel_mask_index', pixel_mask_index, ...
'pixel_mask', pixel_mask ...
);
 
else % selection == "Create Image Mask"
plane_segmentation = types.core.PlaneSegmentation( ...
'colnames', {'image_mask'}, ...
'description', 'output from segmenting my favorite imaging plane', ...
'imaging_plane', types.untyped.SoftLink(imaging_plane), ...
'image_mask', types.hdmf_common.VectorData(...
'data', image_mask, ...
'description', 'image masks') ...
);
end

Adding ROIs to NWB file

Now create an ImageSegmentation object and put the plane_segmentation object inside of it, naming it "PlaneSegmentation".
img_seg = types.core.ImageSegmentation();
img_seg.planesegmentation.set('PlaneSegmentation', plane_segmentation);
Add the img_seg object to the "ophys" ProcessingModule we created before, naming it "ImageSegmentation".
ophys_module.nwbdatainterface.set('ImageSegmentation', img_seg);

Storing fluorescence of ROIs over time

Now that ROIs are stored, you can store fluorescence data for these regions of interest. This type of data is stored using the RoiResponseSeries class.
To create a RoiResponseSeries object, we will need to reference a set of rows from the PlaneSegmentation table to indicate which ROIs correspond to which rows of your recorded data matrix. This is done using a DynamicTableRegion, which is a type of link that allows you to reference specific rows of a DynamicTable, such as a PlaneSegmentation table by row indices.
First, we create a DynamicTableRegion that references the ROIs of the PlaneSegmentation table.
roi_table_region = types.hdmf_common.DynamicTableRegion( ...
'table', types.untyped.ObjectView(plane_segmentation), ...
'description', 'all_rois', ...
'data', (0:n_rois-1)');
Then we create a RoiResponseSeries object to store fluorescence data for those ROIs.
roi_response_series = types.core.RoiResponseSeries( ...
'rois', roi_table_region, ...
'data', NaN(n_rois, 100), ... % [nRoi, nT]
'data_unit', 'lumens', ...
'starting_time_rate', 3.0, ...
'starting_time', 0.0);
To help data analysis and visualization tools know that this RoiResponseSeries object represents fluorescence data, we will store the RoiResponseSeries object inside of a Fluorescence object. Then we add the Fluorescence object into the same ProcessingModule named "ophys" that we created earlier.
fluorescence = types.core.Fluorescence();
fluorescence.roiresponseseries.set('RoiResponseSeries', roi_response_series);
 
ophys_module.nwbdatainterface.set('Fluorescence', fluorescence);
Tip: If you want to store dF/F data instead of fluorescence data, then store the RoiResponseSeries object in a DfOverF object, which works the same way as the Fluorescence class.

Writing the NWB file

nwb_file_name = 'ophys_tutorial.nwb';
if isfile(nwb_file_name); delete(nwb_file_name); end
nwbExport(nwb, nwb_file_name);
Warning: The property "grid_spacing_unit" of type "types.core.ImagingPlane" was not exported to file location "/general/optophysiology/imaging_plane" because it depends on the property "grid_spacing" which is unset.
Warning: The property "origin_coords_unit" of type "types.core.ImagingPlane" was not exported to file location "/general/optophysiology/imaging_plane" because it depends on the property "origin_coords" which is unset.

Reading the NWB file

read_nwb = nwbRead(nwb_file_name, 'ignorecache');
Data arrays are read passively from the file. Calling TimeSeries.data does not read the data values, but presents an HDF5 object that can be indexed to read data.
read_nwb.processing.get('ophys').nwbdatainterface.get('Fluorescence')...
.roiresponseseries.get('RoiResponseSeries').data
ans =
DataStub with properties: - - filename: 'ophys_tutorial.nwb' - path: '/processing/ophys/Fluorescence/RoiResponseSeries/data' - dims: [20 100] - ndims: 2 - dataType: 'double' -
This allows you to conveniently work with datasets that are too large to fit in RAM all at once. Access the data in the matrix using the load method.
load with no input arguments reads the entire dataset:
read_nwb.processing.get('ophys').nwbdatainterface.get('Fluorescence'). ...
roiresponseseries.get('RoiResponseSeries').data.load
ans = 20×100
NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN -
If all you need is a section of the data, you can read only that section by indexing the DataStub object like a normal array in MATLAB. This will just read the selected region from disk into RAM. This technique is particularly useful if you are dealing with a large dataset that is too big to fit entirely into your available RAM.
read_nwb.processing.get('ophys'). ...
nwbdatainterface.get('Fluorescence'). ...
roiresponseseries.get('RoiResponseSeries'). ...
data(1:5, 1:10)
ans = 5×10
NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN - NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN -
% read back the image/pixel masks and display the first roi
plane_segmentation = read_nwb.processing.get('ophys'). ...
nwbdatainterface.get('ImageSegmentation'). ...
planesegmentation.get('PlaneSegmentation');
 
if ~isempty(plane_segmentation.image_mask)
roi_mask = plane_segmentation.image_mask.data(:,:,1);
elseif ~isempty(plane_segmentation.pixel_mask)
row = plane_segmentation.getRow(1, 'columns', {'pixel_mask'});
pixel_mask = row.pixel_mask{1};
roi_mask = zeros(imaging_shape);
ind = sub2ind(imaging_shape, pixel_mask.y, pixel_mask.x);
roi_mask(ind) = pixel_mask.weight;
end
imshow(roi_mask)

Learn more!

See the API documentation to learn what data types are available.

Other MatNWB tutorials

Python tutorials

See our tutorials for more details about your data type:
Check out other tutorials that teach advanced NWB topics:

-
- -
\ No newline at end of file diff --git a/tutorials/html/scratch.html b/tutorials/html/scratch.html deleted file mode 100644 index fe467263..00000000 --- a/tutorials/html/scratch.html +++ /dev/null @@ -1,164 +0,0 @@ - -Scratch Data

Scratch Data

This tutorial will focus on the basics of working with a NWBFile for storing non-standardizable data. For example, you may want to store results from one-off analyses of some temporary utility. NWB provides in-file scratch space as a dedicated location where miscellaneous non-standard data may be written.

Setup

Let us first set up an environment with some "acquired data".
ContextFile = NwbFile(...
'session_description', 'demonstrate NWBFile scratch', ... % required
'identifier', 'SCRATCH-0', ... % required
'session_start_time', datetime(2019, 4, 3, 11, 0, 0, 'TimeZone', 'local'), ... % required
'file_create_date', datetime(2019, 4, 15, 12, 0, 0, 'TimeZone', 'local'), ... % optional
'general_experimenter', 'Niu, Lawrence', ...
'general_institution', 'NWB' ...
);
% simulate some data
timestamps = 0:100:1024;
data = sin(0.333 .* timestamps) ...
+ cos(0.1 .* timestamps) ...
+ randn(1, length(timestamps));
RawTs = types.core.TimeSeries(...
'data', data, ...
'data_unit', 'm', ...
'starting_time', 0., ...
'starting_time_rate', 100, ...
'description', 'simulated acquired data' ...
);
ContextFile.acquisition.set('raw_timeseries', RawTs);
 
% "analyze" the simulated data
% we provide a re-implementation of scipy.signal.correlate(..., mode='same')
% Ideally, you should use MATLAB-native code though using its equivalent function (xcorr) requires
% the Signal Processing Toolbox
correlatedData = sameCorr(RawTs.data, ones(128, 1)) ./ 128;
% If you are unsure of how HDF5 paths map to MatNWB property structures, we suggest using HDFView to
% verify. In most cases, MatNWB properties map directly to HDF5 paths.
FilteredTs = types.core.TimeSeries( ...
'data', correlatedData, ...
'data_unit', 'm', ...
'starting_time', 0, ...
'starting_time_rate', 100, ...
'description', 'cross-correlated data' ...
)
FilteredTs =
TimeSeries with properties: - - starting_time_unit: 'seconds' - timestamps_interval: 1 - timestamps_unit: 'seconds' - data: [0.0461 0.0461 0.0461 0.0461 0.0461 0.0461 0.0461 0.0461 0.0461 0.0461 0.0461] - comments: 'no comments' - control: [] - control_description: '' - data_continuity: '' - data_conversion: 1 - data_offset: 0 - data_resolution: -1 - data_unit: 'm' - description: 'cross-correlated data' - starting_time: 0 - starting_time_rate: 100 - timestamps: [] -
ProcModule = types.core.ProcessingModule( ...
'description', 'a module to store filtering results', ...
'filtered_timeseries', FilteredTs ...
);
ContextFile.processing.set('core', ProcModule);
nwbExport(ContextFile, 'context_file.nwb');

Warning Regarding the Usage of Scratch Space

Scratch data written into the scratch space should not be intended for reuse or sharing. Standard NWB types, along with any extensions, should always be used for any data intended to be shared. Published data should not include scratch data and any reuse should not require scratch data for data processing.

Writing Data to Scratch Space

Let us first copy what we need from the processed data file.
ScratchFile = NwbFile('identifier', 'SCRATCH-1');
ContextFile = nwbRead('./context_file.nwb', 'ignorecache');
% again, copy the required metadata from the processed file.
ScratchFile.session_description = ContextFile.session_description;
ScratchFile.session_start_time = ContextFile.session_start_time;
We can now do an analysis lacking specification but that we still wish to store results for.
% ProcessingModule stores its timeseries inside of the "nwbdatainterface" property which is a Set of
% NWBDataInterface objects. This is not directly mapped to the NWB file but is used to distinguish
% it and DynamicTable objects which it stores under the "dynamictable" property.
FilteredTs = ContextFile.processing.get('core').nwbdatainterface.get('filtered_timeseries');
% note: MatNWB does not currently support complex numbers. If you wish to store the data, consider
% storing each number as a struct which will write the data to HDF5 using compound types.
dataFft = real(fft(FilteredTs.data.load()));
ScratchData = types.core.ScratchData( ...
'data', dataFft, ...
'notes', 'discrete Fourier transform from filtered data' ...
)
ScratchData =
ScratchData with properties: - - notes: 'discrete Fourier transform from filtered data' - data: [11×1 double] -
ScratchFile.scratch.set('dft_filtered', ScratchData);
nwbExport(ScratchFile, 'scratch_analysis.nwb');
The scratch_analysis.nwb file will now have scratch data stored in it:
scratch_filtered.png
function C = sameCorr(A, B)
% SAMECORR scipy.signals.correlate(..., mode="same") equivalent
for iDim = 1:ndims(B)
B = flip(B, iDim);
end
C = conv(A, conj(B), 'same');
end
-
- -
\ No newline at end of file