diff --git a/analysis/mars/io/common.py b/analysis/mars/io/common.py
new file mode 100644
index 0000000..032f914
--- /dev/null
+++ b/analysis/mars/io/common.py
@@ -0,0 +1,89 @@
+"""
+Class for performing command line arg parsing, tokenizing, etc.
+"""
+
+__author__ = 'Vyassa Baratham '
+
+import argparse
+import os
+
+from mars import NSE_DATAROOT
+from mars.io.tokenizer import Tokenizer
+from mars.configs.block_directory import bl
+from mars.io import NSENWB
+
+class MarsBaseArgParser(argparse.ArgumentParser):
+ def __init__(self, *args, **kwargs):
+ super(MarsBaseArgParser, self).__init__(*args, **kwargs)
+
+ self.add_argument('--block', '--blockname', type=str, required=True,
+ help="Block whose configuration to use " + \
+ "(see block_directory.py)")
+ self.add_argument('--nwb', type=str, required=False, default=None,
+ help="use this .nwb file instead of looking for one " + \
+ "within the block directory. Required if not passing" + \
+ "--droot")
+ self.add_argument('--droot', type=str, required=False, default=NSE_DATAROOT,
+ help="root data directory. Required if not passing --nwb")
+
+ self._args = None
+
+ @property
+ def args(self):
+ if not self._args:
+ self.parse_args()
+ return self._args
+
+ def parse_args(self):
+ self._args = super(MarsBaseArgParser, self).parse_args()
+ return self._args
+
+ def nwb_filename(self):
+ if self.args.nwb:
+ return self.args.nwb
+
+ return os.path.join(
+ self.args.droot,
+ self.args.block.split('_')[0],
+ self.args.block,
+ '{}.nwb'.format(self.args.block)
+ )
+
+ def block_info(self):
+ return bl[self.args.block]
+
+ def reader(self):
+ # return NWBReader(self.nwb_filename(), block_name=self.args.block)
+ return NSENWB.from_existing_nwb(self.args.block, self.nwb_filename())
+
+ def tokenizer(self):
+ # TODO: Load the right one based on block directory (when we put that info there)
+ return Tokenizer(self.reader())
+
+
+class MarsArgParser(MarsBaseArgParser):
+ def __init__(self, *args, **kwargs):
+ super(MarsArgParser, self).__init__(*args, **kwargs)
+
+ self.add_argument('--device', type=str, required=True,
+ help="eg 'Poly' or 'ECoG'")
+
+
+class MarsProcessedArgParser(MarsArgParser):
+ def __init__(self, *args, **kwargs):
+ super(MarsProcessedArgParser, self).__init__(*args, **kwargs)
+
+ self.add_argument('--processed', type=str, required=False, default='Hilb_54bands',
+ help="which preprocessed data to use, " + \
+ "eg. 'Wvlt_4to1200_54band_CAR1' (must be a key " + \
+ "in processing/preprocessed/ in the .nwb file)")
+
+# class MarsRawArgParser(MarsArgParser):
+# def __init__(self, *args, **kwargs):
+# super(MarsArgParser, self).__init__(*args, **kwargs)
+
+# self.add_argument('--raw', type=str, required=True,
+# help="which raw data to use, " + \
+# "eg. 'Wvlt_4to1200_54band_CAR1' (must be a key " + \
+# "in acquisition/Raw/ in the .nwb file)")
+
diff --git a/analysis/mars/io/nsenwb.py b/analysis/mars/io/nsenwb.py
new file mode 100644
index 0000000..4f85de1
--- /dev/null
+++ b/analysis/mars/io/nsenwb.py
@@ -0,0 +1,499 @@
+"""
+Write NSE Lab rodent electrophysiological response recordings to NWB
+"""
+
+__author__ = 'Max Dougherty 1:
+ raise ValueError("Choose one time index method only")
+
+ if time_idx:
+ time_idx = time_idx
+ elif time_range:
+ time_idx = self._index_for_time_range(time_range, dset.rate, dset.starting_time)
+ elif trial_query:
+ time_idx = self._index_for_trials(dset.rate, trial_query, pre_dur, post_dur)
+ else:
+ time_idx = slice(None)
+
+
+ # INDEX BY CHANNELS:
+ if dset_channels and device_channels:
+ raise ValueError("Choose one channel index method only")
+
+ if dset_channels is not None:
+ ch_idx = dset_channels
+ elif device_channels is not None:
+ ch_idx = self._index_for_device_channels(dset, device_channels)
+ else:
+ ch_idx = slice(None)
+
+ if dset.data.ndim < 2:
+ return dset.data[time_idx]
+
+ # Prepare to zscore:
+ if zscore:
+ bl_data = np.concatenate(
+ list(self.index_dset(dset, dset_channels=ch_idx, trial_query="sb == 'b'")),
+ axis=0
+ )
+ bl_mean = np.mean(bl_data, axis=0)
+ bl_std = np.std(bl_data, axis=0)
+ maybe_zscore = lambda x: (x - bl_mean) / bl_std
+ else:
+ maybe_zscore = lambda x: x
+
+ if isinstance(time_idx, types.GeneratorType):
+ def _iter():
+ for t_idx in time_idx:
+ yield maybe_zscore(np.atleast_2d(dset.data[t_idx, ch_idx, ...]))
+ return _iter()
+ else:
+ return maybe_zscore(np.atleast_2d(dset.data[time_idx, ch_idx, ...]))
+
+ @classmethod
+ def _index_for_time_range(cls, time_range, rate, starting_time=0.0):
+ # TODO: Allow for selecting multiple timeranges
+ start = int(np.round((time_range[0]-starting_time) * rate))
+ stop = int(np.round((time_range[1]-starting_time) * rate))
+ return slice(start, stop)
+
+ @classmethod
+ def _index_for_device_channels(cls, dset, channels):
+ device = dset.name # TODO: is dset.name always the device name?
+ try:
+ # processed dset
+ electrodes_df = dset.source_timeseries.electrodes.table.to_dataframe().query('group_name == @device')
+ except AttributeError:
+ # raw dset
+ electrodes_df = dset.electrodes.table.to_dataframe().query('group_name == @device')
+ chs = {elec.location: i for i, elec in enumerate(electrodes_df.itertuples())}
+ return [chs[str(chnum)] for chnum in channels]
+
+ def _index_for_trials(self, rate, trial_query=None, pre_dur=0.0, post_dur=0.0):
+ # Returns a generator
+ table = self.nwb.trials.to_dataframe()
+ if trial_query:
+ table = table.query(trial_query)
+
+ for s in table.itertuples():
+ yield self._index_for_time_range((s.start_time-pre_dur, s.stop_time+post_dur), rate)
+
+ # def electrode_order(self, device_name, device_channels, axis='z'):
+ # # Get the channel order
+ # device_raw = self.read_raw(device_name) #TODO: Can we read electrodes without needing to go through a dataset?
+ # channel_positions = []
+ # for ch in device_channels:
+ # query = 'group_name == "%s" & location == "%s"'%(device_name,ch)
+ # channel_positions.append(float(device_raw.electrodes.table.to_dataframe().query(query)[axis]))
+ # channel_positions = np.array(channel_positions)
+ # channel_order = np.arange(len(device_channels))[np.argsort(channel_positions)]
+ # return channel_order, np.sort(channel_positions)
+
+ def has_analysis_dataset(self,device_path,device_name,dataset_name):
+ # Check if NWB analysis dataset exists
+ carr_path = path.join(self.nwb_directory,self.block_name+'.h5')
+ dset_path = device_path + '/' + device_name + '/' + dataset_name
+ if not path.exists(carr_path):
+ return False
+ with h5py.File(carr_path,'r') as f:
+ if not dset_path in f:
+ return False
+ return True
+
+ def read_analysis_dataset(self,device_path,device_name,dataset_name):
+ # Read an NWB analysis dataset
+ carr_path = path.join(self.nwb_directory,self.block_name+'.h5')
+ dset_path = device_path + '/' + device_name + '/' + dataset_name
+ if not path.exists(carr_path):
+ return False
+ with h5py.File(carr_path,'r') as f:
+ if not dset_path in f:
+ return False
+ data = np.array(f[dset_path])
+ return data
+
+ # These functions are much less capable than index_dset() but are here for backwards compatibility
+ def read_trials(self, dset, pre_dur=0.0, post_dur=0.0, trial_query=None):
+ """
+ Read data associated with a particular stimulus
+ """
+ return self.index_dset(dset, trial_query=trial_query, pre_dur=pre_dur, post_dur=post_dur)
+
+ def index_by_device_channels(self, dset, channels, timerange=None):
+ """
+ dset - nwb Timeseries object
+ channels - device-defined channel numbers
+ """
+ return self.index_dset(dset, device_channels=channels, time_range=timerange)
+
+ ###################
+ ## OTHER METHODS ##
+ ###################
+ def device_channels(self, device, remove_bad=False):
+ """
+ Return the device channel IDs.
+ """
+ elec = self.nwb.electrodes
+ device_idx = elec['group_name'].data[:] == device
+ device_chs = elec['location'].data[device_idx]
+ if remove_bad:
+ bad_chs = np.array(self.block_params['bad_chs'][device]).astype('str')
+ device_chs = np.array([c for c in device_chs if not c in bad_chs])
+ return device_chs
+
+ def channel_positions(self, device, remove_bad=False):
+ """
+ Return an 3 column array containing the x,y,z positions of each electrode in device
+ """
+ elec = self.nwb.electrodes
+ device_idx = elec['group_name'].data[:] == device
+ device_chs = elec['location'].data[device_idx]
+ return np.array([elec['x'].data[device_idx],
+ elec['y'].data[device_idx],
+ elec['z'].data[device_idx]])
+
+ def ordered_channels(self, device='Poly', reverse=False):
+ """
+ Return a list of device channel IDs (starting from 1) and dset indexes (starting from 0),
+ sorted by z coordinate.
+ Also return the corresponding z coordinates
+ """
+ elec = self.nwb.electrodes
+ device_idx = elec['group_name'].data[:] == device
+ z = elec['z'].data[device_idx]
+ ch_ids = np.array([int(ch) for ch in elec['location'].data[device_idx]])
+
+ sort_idx = np.argsort(z) # in mars, z coordinates are positive
+ if reverse:
+ return ch_ids[sort_idx][::-1], sort_idx[::-1], np.sort(z)[::-1]
+ else:
+ return ch_ids[sort_idx], sort_idx, np.sort(z)
+
+ def write(self, save_path=None, time=False):
+ tstart = datetime.now()
+ self.io = NWBHDF5IO(save_path, 'w') if save_path else self.io
+ self.io.write(self.nwb)
+ if time:
+ print('Write time for {}: {}s'.format(self.block_name,datetime.now()-tstart))
+
+ def close(self):
+ # check for self.io without throwing error
+ if getattr(self, 'io'):
+ self.io.close()
+
+
diff --git a/analysis/mars/preprocess.py b/analysis/mars/preprocess.py
new file mode 100755
index 0000000..8dd2f33
--- /dev/null
+++ b/analysis/mars/preprocess.py
@@ -0,0 +1,365 @@
+#!/usr/bin/env python
+from __future__ import print_function
+
+import argparse
+import h5py
+import time
+import sys
+import os
+import logging
+
+import numpy as np
+
+from hdmf.data_utils import AbstractDataChunkIterator, DataChunk
+
+try:
+ from tqdm import tqdm
+except:
+ def tqdm(x, *args, **kwargs):
+ return x
+
+from mars.signal_processing import resample
+from mars.signal_processing import subtract_CAR
+from mars.signal_processing import linenoise_notch
+from mars.signal_processing import hilbert_transform
+from mars.signal_processing import gaussian
+from mars.utils import bands
+from mars.wn import mua_signal, mua_rate
+from mars.io import NSENWB
+
+log = logging.getLogger('mars_preprocess')
+log.setLevel(logging.DEBUG)
+formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
+ch = logging.StreamHandler(stream=sys.stdout)
+ch.setFormatter(formatter)
+ch.setLevel(logging.DEBUG)
+log.addHandler(ch)
+
+
+def _get_cfs(_cfs):
+ # Default: use precomputed wavelet cfs
+ if _cfs is None:
+ return bands.wavelet['cfs']
+
+ # Case 1: use precomputed cfs for Chang Lab or wavelet
+ if _cfs[0].lower() in ('chang', 'changlab'):
+ return bands.chang_lab['cfs']
+ elif _cfs[0].lower() in ('wavelet', 'wave', 'wvlt'):
+ return bands.wavelet['cfs']
+
+ # Case 2: call to a function in bands.py
+ elif _cfs[0].lower() in ('log', 'logspace', 'logspaced'):
+ return bands.log_spaced_cfs(*[float(arg) for arg in _cfs[1:]])
+
+ # Case 3: list of numbers
+ else:
+ return np.array([float(cf) for cf in _cfs])
+
+
+def _get_sds(cfs, _sds):
+ # Default: use precomputed wavelet cfs
+ if _sds is None:
+ return bands.wavelet['sds']
+
+ # Case 1: use precomputed sds for Chang Lab or wavelet
+ if _sds[0].lower() in ('chang', 'changlab'):
+ return bands.chang_lab['sds']
+ elif _sds[0].lower() in ('wavelet', 'wave', 'wvlt'):
+ return bands.wavelet['sds']
+
+ # Case 2: Call to a function in bands.py
+ elif _sds[0] in ('q', 'constq', 'cqt'):
+ return bands.const_Q_sds(cfs, *[float(arg) for arg in _sds[1:]])
+ elif _sds[0] in ('sqrt', 'scaledsqrt'):
+ return bands.scaled_sqrt_sds(cfs, *[float(arg) for arg in _sds[1:]])
+
+ # Case 3: list of numbers
+ else:
+ return np.array([float(sd) for sd in _sds])
+
+
+def __resample(X, new_freq, old_freq, axis=-1):
+ assert new_freq < old_freq
+ n_timepts, n_ch = X.shape
+ if not np.allclose(new_freq, old_freq):
+ for ch in range(n_ch):
+ ch_X = X[:, ch]
+ yield resample(ch_X, new_freq, old_freq, axis=axis)
+ log.info("resampled channel {} of {}".format(ch+1, n_ch))
+
+def _resample(X, new_freq, old_freq, axis=-1):
+ return np.stack(__resample(X, new_freq, old_freq, axis=-1)).T
+
+# def _resample_iterator(X, new_freq, old_freq, axis=-1):
+# assert new_freq < old_freq
+# n_timepts, n_ch = X.shape
+# if not np.allclose(new_freq, old_freq):
+# for ch in range(n_ch):
+# ch_X = X[:, ch]
+# yield DataChunk(data=resample(ch_x, new_freq, old_freq, axis=axis),
+# selection=np.s_[:, ch])
+
+class MyDataChunkIterator(AbstractDataChunkIterator):
+ def __init__(self, it, dtype, n_ch, n_bands, approx_timepts=200000):
+ self.it = it
+ self._dtype = dtype
+ self._maxshape = (None, n_ch, n_bands)
+ self._approx_timepts = approx_timepts
+ self._chunkshape = self._approx_timepts, 1, 1
+ self._n_bands = n_bands
+ self._i = 0
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ data = next(self.it)
+ ch = self._i / self._n_bands
+ band = self._i % self._n_bands
+ self._i += 1
+
+ return DataChunk(data=data, selection=np.s_[:data.shape[0], ch, band])
+
+ next = __next__
+
+ @property
+ def dtype(self):
+ return self._dtype
+
+ @property
+ def maxshape(self):
+ return self._maxshape
+
+ def recommended_chunk_shape(self):
+ return self._chunkshape
+
+ def recommended_data_shape(self):
+ return (self._approx_timepts, self.maxshape[1], self.maxshape[2])
+
+def _notch_filter(X, rate):
+ return linenoise_notch(X, rate)
+
+def _subtract_CAR(X):
+ subtract_CAR(X)
+ return X
+
+def _hilbert_onech(ch_X, rate, cfs, sds, final_resample):
+ """
+ Hilbert transform one channel, band by band
+ First resample already performed. Rate == first_resample
+ """
+ for i, (cf, sd) in enumerate(zip(cfs, sds)):
+ kernel = gaussian(ch_X, rate, cf, sd)
+ transform = np.abs(hilbert_transform(ch_X, rate, kernel))
+ final_data = resample(transform, final_resample, rate)
+ log.info("done band {}".format(i))
+ yield np.squeeze(final_data)
+ # yield DataChunk(data=final_data, selection=np.s_[:, ch, i])
+
+def _hilbert_iterator(X, rate, cfs, sds, first_resample, final_resample):
+ n_timepts, n_ch = X.shape
+ for ch in range(n_ch):
+ # ch_X = resample(np.atleast_2d(X[:, ch]).T, first_resample, rate).T
+ ch_X = np.atleast_2d(resample(np.squeeze(X[:, ch]), first_resample, rate)) # HACK
+ yield np.stack(_hilbert_onech(ch_X, first_resample, cfs, sds, final_resample), axis=-1)
+ log.info("done Hilbert on channel {} of {}".format(ch+1, n_ch))
+
+def _hilbert_one_by_one(X, rate, cfs, sds, first_resample, final_resample):
+ n_timepts, n_ch = X.shape
+ for ch in range(n_ch):
+ ch_X = _resample(np.atleast_2d(X[:, ch]).T, first_resample, rate).T
+ for one_band_done in _hilbert_onech(ch_X, first_resample, cfs, sds, final_resample):
+ yield one_band_done
+ log.info("done Hilbert on channel {} of {}".format(ch+1, n_ch))
+
+def _hilbert_transform(X, rate, cfs, sds, first_resample, final_resample):
+ n_timepts, n_ch = X.shape
+ approx_timepts_final = float(n_timepts) * final_resample / rate
+ # final = None #np.zeros(shape=(n_timepts, n_ch, len(cfs)), dtype=np.float32)
+ # for datachunk in _hilbert_iterator(X, rate, cfs, sds, final_resample=final_resample):
+ # import ipdb; ipdb.set_trace()
+ # if final is None:
+ # pass
+ # final[datachunk.selection] = datachunk.data
+
+
+ # return np.stack(_hilbert_iterator(X, rate, cfs, sds, first_resample, final_resample), axis=1)
+ it = _hilbert_one_by_one(X, rate, cfs, sds, first_resample, final_resample)
+ return MyDataChunkIterator(it, X.dtype, n_ch, 54, approx_timepts=approx_timepts_final)
+ # return DataChunkIterator(data=it, maxshape=(None, n_ch, 54), dtype=np.dtype(float))
+
+ # x = np.stack(_hilbert_iterator(X, rate, cfs, sds, first_resample, final_resample), axis=-1)
+ # return x
+
+def _mua(X_raw, fs, lowcut, highcut, order):
+ mua = mua_signal(X_raw[:], fs, lowcut, highcut, order)
+ return mua, mua_rate(mua, fs)
+
+def _write_data(nsenwb, outfile, device, rate, raw_rate, X, Y, mua, mua_rate, decomp_type, cfs, sds, postfix):
+ def postfixed(s):
+ return '{}_{}'.format(s, postfix) if postfix else s
+
+ nsenwb.add_proc(X, device, postfixed(device), rate)
+ nsenwb.add_proc(Y, device, postfixed('Hilb_54bands'), rate, cfs=cfs, sds=sds)
+ nsenwb.add_proc(mua, device, postfixed('tMUA'), raw_rate)
+ nsenwb.add_proc(mua_rate, device, postfixed('tMUA_rate'), rate)
+
+ if outfile and os.path.exists(outfile):
+ os.remove(outfile)
+ nsenwb.write(save_path=outfile)
+ nsenwb.close()
+
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Preprocessing ecog data from nwb.')
+ parser.add_argument('datafile', type=str, help="Input .nwb file")
+ parser.add_argument('--outfile', type=str, default=None,
+ help="Output file. Default = write to input file")
+ parser.add_argument('--block', type=str, required=True)
+ parser.add_argument('--device', '--device-name', type=str, default='ECoG')
+ parser.add_argument('--acquisition', '--acq', type=str, default='Raw')
+ parser.add_argument('--first-resample', type=float, default=None,
+ help='Resample data to this rate before processing. ' +
+ 'Omit to skip resampling before processing.')
+ parser.add_argument('--final-resample', type=float, default=400.,
+ help='Resample data to this rate after processing. ' +
+ 'Omit to skip resampling after processing.')
+ parser.add_argument('--cfs', type=str, nargs='+', default=None,
+ help="Center frequency of the Gaussian filter. " +
+"""
+Must be one of the following:
+1.) The name of a precomputed set of filters (choices: 'changlab', 'wavelet')
+2.) The name of a function (choices: 'logspaced') followed by
+ args to that function (usually fmin, fmax, but see bands.py)
+3.) A list of floats specifying the center frequencies
+Default = precomputed wavelet 4-1200hz cfs
+eg. to use the precomputed Chang lab filters, use `--cfs changlab`
+eg. to use log spaced frequencies from 10-200hz, use `--cfs logspaced 10 200`
+eg. to use your own list of center frequencies, use `--cfs 2 4 8 16 [...]`
+"""
+ )
+ parser.add_argument('--sds', type=str, nargs='+', default=None,
+ help="Standard deviation of the Gaussian filter. " +
+"""
+Must be one of the following:
+1.) The name of a precomputed set of filters (choices: 'changlab', 'wavelet')
+2.) The name of a function (choices: 'constq', 'scaledsqrt') followed by
+ args to that function (q-factor, or scale, etc. See bands.py)
+3.) A list of floats specifying the center frequencies
+Default = precomputed wavelet 4-1200hz sds
+eg. to use the precomputed Chang lab filters, use `--sds changlab`
+eg. to use constant Q filters with Q=4, use `--sds constq 4`
+eg. to use constant filter widths of 10hz, use `--sds 10 10 10 10 [...]`
+"""
+ )
+ parser.add_argument('--no-notch', default=False, action='store_true',
+ help="Do not perform notch filtering")
+ parser.add_argument('--no-car', default=False, action='store_true',
+ help="Do not perform common avg reference subtraction")
+ parser.add_argument('--decomp-type', type=str, default='hilbert',
+ choices=['hilbert', 'hil'],
+ help="frequency decomposition method")
+ parser.add_argument('--no-magnitude', default=False, action='store_true',
+ help="Do not take the magnitude of the frequency decomp")
+ parser.add_argument('--no-mua', default=False, action='store_true',
+ help="Do not compute MUA")
+ parser.add_argument('--mua-range', type=float, nargs=2, default=(500, 5000),
+ help="critical frequencies for MUA bandpass filter")
+ parser.add_argument('--mua-order', type=int, default=8,
+ help="order for butterworth bandpass filter for MUA")
+ parser.add_argument('--dset-postfix', default=None, required=False,
+ help="String to append to nwb dset names")
+ # parser.add_argument('--luigi', action='store_true', required=False, default=False,
+ # help="use luigi logger, which doesn't go to console")
+ parser.add_argument('--logfile', type=str, default=None, required=False)
+
+ args = parser.parse_args()
+
+ if args.logfile:
+ fh = logging.FileHandler(args.logfile)
+ formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
+ fh.setFormatter(formatter)
+ fh.setLevel(logging.DEBUG)
+ log.addHandler(fh)
+
+ # PARSE ARGS
+ if args.decomp_type in ('hilbert', 'hil'):
+ decomp_type = 'hilbert'
+ else:
+ raise NotImplementedError()
+
+ cfs = _get_cfs(args.cfs)
+ sds = _get_sds(cfs, args.sds)
+
+ if args.outfile and (args.outfile != args.infile):
+ raise NotImplementedError("Cannot write to different outfile until pynwb issue #668 is addressed")
+
+ # LOAD DATA
+ start = time.time()
+ nsenwb = NSENWB.from_existing_nwb(args.block, args.datafile)
+ raw_dset = nsenwb.read_raw(args.device, acq_name=args.acquisition)
+ raw_freq = raw_dset.rate
+
+ X = raw_dset.data
+
+ log.info("Time to load: {} sec".format(time.time()-start))
+
+ # TODO: remove bad electrodes. Or maybe keep them in the file but mark them bad?
+
+ # CAR REMOVAL
+ if not args.no_car:
+ start = time.time()
+ X = _subtract_CAR(X)
+ log.info("Time to subtract CAR: {} sec".format(time.time()-start))
+
+ # NOTCH FILTER
+ if not args.no_notch:
+ start = time.time()
+ X = _notch_filter(X, raw_dset.rate)
+ log.info("Time to notch filter: {} sec".format(time.time()-start))
+
+ # MUA RATE
+ if not args.no_mua:
+ start = time.time()
+ mua, mua_rate = _mua(X, raw_freq, args.mua_range[0], args.mua_range[1], args.mua_order)
+ log.info("Time to compute MUA rate: {} sec".format(time.time()-start))
+ else:
+ mua, mua_rate = None, None
+
+ # FREQUENCY DECOMPOSITION
+ if decomp_type == 'hilbert':
+ start = time.time()
+ Y = _hilbert_transform(X, raw_freq, cfs, sds, args.first_resample, args.final_resample)
+ log.info("Time to Hilbert transform: {} sec".format(time.time()-start))
+ else:
+ raise NotImplementedError()
+
+ # FINAL RESAMPLE
+ if args.final_resample:
+ start = time.time()
+ # Y = _resample(Y, args.final_resample, rate, axis=0) # Done in Hilbert
+ # X = _resample(X, args.final_resample, rate, axis=0) # TODO: uncomment
+ if mua_rate is not None:
+ mua_rate = _resample(mua_rate, args.final_resample, raw_freq, axis=0)
+ log.info("Time to resample: {} sec".format(time.time()-start))
+
+ # TOKENIZE
+ # TODO: store tokenizer class in block directory and load it here.
+ # For now, just assume white noise tokenizer, which may become some sort of default
+ import mars.tokenizers
+ try:
+ tokenizer_name = nsenwb.stim['tokenizer']
+ tokenize = getattr(mars.tokenizers, tokenizer_name)
+ except KeyError:
+ log.error('no tokenizer specified in block directory')
+ except AttributeError:
+ log.error('tokenizer {} not found'.format(tokenizer_name))
+ else:
+ tokenize(nsenwb)
+
+ # WRITE DATA
+ start = time.time()
+ _write_data(nsenwb, args.outfile, args.device, args.final_resample, raw_freq, X, Y, mua, mua_rate,
+ decomp_type, cfs, sds, args.dset_postfix)
+ log.info("Time to write {}: {} sec".format(args.datafile, time.time()-start))
diff --git a/analysis/mars/preprocess_100um.sh b/analysis/mars/preprocess_100um.sh
new file mode 100755
index 0000000..b805b91
--- /dev/null
+++ b/analysis/mars/preprocess_100um.sh
@@ -0,0 +1,19 @@
+infile=$1
+block=Simulation_v0
+
+if [ $# -eq 0 ]
+ then
+ echo "pass input ECP nwb file as argument to this script"
+ exit
+fi
+
+python scripts/preprocess.py $infile --block $block --device ECoG --acquisition Raw --first-resample 3200 --final-resample 400 --no-notch --no-car
+python scripts/preprocess.py $infile --block $block --device Poly --acquisition Raw --first-resample 3200 --final-resample 400 --no-notch --no-car
+
+for i in `seq 0 20`;
+do
+ python scripts/preprocess.py $infile --block $block --device ECoG --acquisition $i --first-resample 3200 \
+ --final-resample 400 --no-notch --no-car --dset-postfix $i
+ python scripts/preprocess.py $infile --block $block --device Poly --acquisition $i --first-resample 3200 \
+ --final-resample 400 --no-notch --no-car --dset-postfix $i
+done
diff --git a/analysis/mars/preprocess_layer_ei.py b/analysis/mars/preprocess_layer_ei.py
new file mode 100644
index 0000000..e194b21
--- /dev/null
+++ b/analysis/mars/preprocess_layer_ei.py
@@ -0,0 +1,69 @@
+"""Script to preprocess a layer_ei contributions file so it has
+processed contributions and lesions """
+import sys
+
+from pynwb import NWBHDF5IO
+
+from mars.io import NSENWB
+from mars.wn import tokenize
+from preprocess import _hilbert_transform, _get_cfs, _get_sds
+from layer_reader import LayerReader
+
+FIRST_RESAMPLE = 3200.0
+FINAL_RESAMPLE = 400.0
+BLOCK = 'Simulation_v1'
+
+if __name__ == '__main__':
+ nwbfile = sys.argv[1]
+ cfs = _get_cfs(None)
+ sds = _get_sds(cfs, None)
+
+ nsenwb = NSENWB.from_existing_nwb(BLOCK, nwbfile)
+ reader = LayerReader(nsenwb.nwb)
+
+ # full CSEP:
+ X = reader.raw_full()
+ X = _hilbert_transform(X, reader.raw_rate(), cfs, sds,
+ FIRST_RESAMPLE, FINAL_RESAMPLE)
+ nsenwb.add_proc(X, 'ECoG', 'Hilb_54bands',
+ FINAL_RESAMPLE, cfs=cfs, sds=sds)
+
+ for layer in [1, 2, 3, 4, 5, 6]:
+ for ei in 'ei':
+ if layer == 1 and ei == 'e':
+ continue
+
+ # contribution:
+ X = reader.raw_contrib(layer, ei)
+ X = _hilbert_transform(X, reader.raw_rate(), cfs, sds,
+ FIRST_RESAMPLE, FINAL_RESAMPLE)
+ nsenwb.add_proc(X, 'ECoG', 'Hilb_54bands_{}{}'.format(layer, ei),
+ FINAL_RESAMPLE, cfs=cfs, sds=sds)
+
+ # lesion:
+ X = reader.raw_lesion(layer, ei)
+ X = _hilbert_transform(X, reader.raw_rate(), cfs, sds,
+ FIRST_RESAMPLE, FINAL_RESAMPLE)
+ nsenwb.add_proc(X, 'ECoG', 'Hilb_54bands_l{}{}'.format(layer, ei),
+ FINAL_RESAMPLE, cfs=cfs, sds=sds)
+
+ # combined e/i contribution:
+ X = reader.raw_contrib(layer)
+ X = _hilbert_transform(X, reader.raw_rate(), cfs, sds,
+ FIRST_RESAMPLE, FINAL_RESAMPLE)
+ nsenwb.add_proc(X, 'ECoG', 'Hilb_54bands_{}'.format(layer),
+ FINAL_RESAMPLE, cfs=cfs, sds=sds)
+
+ # combined e/i lesion:
+ X = reader.raw_lesion(layer)
+ X = _hilbert_transform(X, reader.raw_rate(), cfs, sds,
+ FIRST_RESAMPLE, FINAL_RESAMPLE)
+ nsenwb.add_proc(X, 'ECoG', 'Hilb_54bands_l{}'.format(layer),
+ FINAL_RESAMPLE, cfs=cfs, sds=sds)
+
+
+ tokenize(nsenwb)
+ nsenwb.write()
+
+
+
diff --git a/analysis/mars/preprocess_layers.sh b/analysis/mars/preprocess_layers.sh
new file mode 100644
index 0000000..d2dda91
--- /dev/null
+++ b/analysis/mars/preprocess_layers.sh
@@ -0,0 +1,19 @@
+infile=$1
+block=Simulation_v0
+
+if [ $# -eq 0 ]
+ then
+ echo "pass input ECP nwb file as argument to this script"
+ exit
+fi
+
+python scripts/preprocess.py $infile --block $block --device ECoG --acquisition Raw --first-resample 3200 --final-resample 400 --no-notch --no-car
+python scripts/preprocess.py $infile --block $block --device Poly --acquisition Raw --first-resample 3200 --final-resample 400 --no-notch --no-car
+
+for i in `seq 1 6`;
+do
+ python scripts/preprocess.py $infile --block $block --device ECoG --acquisition L$i --first-resample 3200 \
+ --final-resample 400 --no-notch --no-car --dset-postfix L$i
+ python scripts/preprocess.py $infile --block $block --device Poly --acquisition L$i --first-resample 3200 \
+ --final-resample 400 --no-notch --no-car --dset-postfix L$i
+done
diff --git a/analysis/mars/preprocess_slices.py b/analysis/mars/preprocess_slices.py
new file mode 100644
index 0000000..27a5d70
--- /dev/null
+++ b/analysis/mars/preprocess_slices.py
@@ -0,0 +1,50 @@
+"""Script to preprocess a 100um slice contributions file so it has
+processed contributions and lesions """
+import sys
+
+from pynwb import NWBHDF5IO
+
+from mars.io import NSENWB
+from mars.wn import tokenize
+from preprocess import _hilbert_transform, _get_cfs, _get_sds
+from layer_reader import LayerReader
+
+FIRST_RESAMPLE = 3200.0
+FINAL_RESAMPLE = 400.0
+BLOCK = 'Simulation_v1'
+THICKNESS = 200
+NSLICES = 11
+
+if __name__ == '__main__':
+ nwbfile = sys.argv[1]
+ cfs = _get_cfs(None)
+ sds = _get_sds(cfs, None)
+
+ nsenwb = NSENWB.from_existing_nwb(BLOCK, nwbfile)
+ reader = LayerReader(nsenwb.nwb)
+
+ # full CSEP:
+ X = reader.raw_full()
+ X = _hilbert_transform(X, reader.raw_rate(), cfs, sds,
+ FIRST_RESAMPLE, FINAL_RESAMPLE)
+ nsenwb.add_proc(X, 'ECoG', 'Hilb_54bands',
+ FINAL_RESAMPLE, cfs=cfs, sds=sds)
+
+ for slice_i in range(NSLICES):
+ # contrirbution:
+ X = reader.raw_slice(slice_i, thickness=THICKNESS)
+ X = _hilbert_transform(X, reader.raw_rate(), cfs, sds,
+ FIRST_RESAMPLE, FINAL_RESAMPLE)
+ nsenwb.add_proc(X, 'ECoG', 'Hilb_54bands_{}'.format(slice_i),
+ FINAL_RESAMPLE, cfs=cfs, sds=sds)
+
+ # lesion:
+ X = reader.raw_slice_lesion(slice_i, thickness=THICKNESS)
+ X = _hilbert_transform(X, reader.raw_rate(), cfs, sds,
+ FIRST_RESAMPLE, FINAL_RESAMPLE)
+ nsenwb.add_proc(X, 'ECoG', 'Hilb_54bands_l{}'.format(slice_i),
+ FINAL_RESAMPLE, cfs=cfs, sds=sds)
+
+ tokenize(nsenwb)
+ nsenwb.write()
+
diff --git a/analysis/mars/signal_processing/__init__.py b/analysis/mars/signal_processing/__init__.py
new file mode 100644
index 0000000..1a1052d
--- /dev/null
+++ b/analysis/mars/signal_processing/__init__.py
@@ -0,0 +1,6 @@
+from .hilbert_transform import *
+from .resample import *
+from .linenoise_notch import *
+from .common_referencing import *
+from .bandpass import *
+from .smooth import *
diff --git a/analysis/mars/signal_processing/bandpass.py b/analysis/mars/signal_processing/bandpass.py
new file mode 100644
index 0000000..506aef2
--- /dev/null
+++ b/analysis/mars/signal_processing/bandpass.py
@@ -0,0 +1,23 @@
+# Taken from https://scipy-cookbook.readthedocs.io/items/ButterworthBandpass.html
+
+from __future__ import print_function
+
+from scipy.signal import butter, filtfilt, sosfilt, sosfiltfilt
+
+__all__ = ['butter_bandpass']
+
+
+
+def butter_bandpass(data, fs, lowcut, highcut, order=8, filter_fcn=sosfiltfilt):
+ nyq = 0.5 * fs
+ low = lowcut / nyq
+
+ if nyq > highcut:
+ high = highcut / nyq
+ sos = butter(order, [low, high], btype='bandpass', output='sos')
+ else:
+ print("WARNING: Requested filter abovve nyquist frequency")
+ sos = butter(order, [low], btype='highpass', output='sos')
+
+ y = filter_fcn(sos, data)
+ return y
\ No newline at end of file
diff --git a/analysis/mars/signal_processing/common_referencing.py b/analysis/mars/signal_processing/common_referencing.py
new file mode 100644
index 0000000..bf3bacd
--- /dev/null
+++ b/analysis/mars/signal_processing/common_referencing.py
@@ -0,0 +1,39 @@
+from __future__ import division
+import numpy as np
+
+
+__all__ = ['subtract_CAR',
+ 'subtract_common_median_reference']
+
+def subtract_CAR(X, mean_frac=0.95, round_fcn=np.ceil):
+ """
+ Compute and subtract common average reference
+ mean_frac - average is calculated over the middle X percent. This is X.
+ """
+ timepts, channels = X.shape
+ nchs_excl = int(round_fcn(channels*(1-mean_frac)/2.0))
+ avg = np.mean(np.sort(X)[:, nchs_excl:-nchs_excl], axis=1)
+
+ return X - np.tile(avg, (channels, 1)).T
+
+
+def subtract_common_median_reference(X, channel_axis=-2):
+ """
+ Compute and subtract common median reference
+ for the entire grid.
+
+ Parameters
+ ----------
+ X : ndarray (..., n_channels, n_time)
+ Data to common median reference.
+
+ Returns
+ -------
+ Xp : ndarray (..., n_channels, n_time)
+ Common median referenced data.
+ """
+
+ median = np.nanmedian(X, axis=channel_axis, keepdims=True)
+ Xp = X - median
+
+ return Xp
diff --git a/analysis/mars/signal_processing/fft.py b/analysis/mars/signal_processing/fft.py
new file mode 100644
index 0000000..8fc9d3d
--- /dev/null
+++ b/analysis/mars/signal_processing/fft.py
@@ -0,0 +1,14 @@
+import numpy as np
+
+from numpy.fft import rfftfreq, fftfreq
+
+try:
+ from mkl_fft._numpy_fft import rfft, irfft, fft, ifft
+except ImportError:
+ try:
+ from accelerate.mkl.fftpack import rfft, irfft, fft, ifft
+ except ImportError:
+ try:
+ from pyfftw.interfaces.numpy_fft import rfft, irfft, fft, ifft
+ except ImportError:
+ from numpy.fft import rfft, irfft, fft, ifft
diff --git a/analysis/mars/signal_processing/hilbert_transform.py b/analysis/mars/signal_processing/hilbert_transform.py
new file mode 100644
index 0000000..a2450b4
--- /dev/null
+++ b/analysis/mars/signal_processing/hilbert_transform.py
@@ -0,0 +1,84 @@
+from __future__ import division
+import numpy as np
+from numpy.fft import fftfreq
+
+from .fft import fft, ifft
+
+__authors__ = "Alex Bujan, Jesse Livezey"
+
+
+__all__ = ['gaussian', 'hamming', 'hilbert_transform']
+
+
+def gaussian(X, rate, center, sd):
+ n_channels, time = X.shape
+ freq = fftfreq(time, 1./rate)
+
+ k = np.exp((-(np.abs(freq) - center)**2)/(2 * (sd**2)))
+
+ return k / k.sum()
+
+
+def hamming(X, rate, min_freq, max_freq):
+ n_channels, time = X.shape
+ freq = fftfreq(time, 1./rate)
+
+ pos_in_window = np.logical_and(freq >= min_freq, freq <= max_freq)
+ neg_in_window = np.logical_and(freq <= -min_freq, freq >= -max_freq)
+
+ k = np.zeros(len(freq))
+ window_size = np.count_nonzero(pos_in_window)
+ window = np.hamming(window_size)
+ k[pos_in_window] = window
+ window_size = np.count_nonzero(neg_in_window)
+ window = np.hamming(window_size)
+ k[neg_in_window] = window
+
+ return k / k.sum()
+
+
+def hilbert_transform(X, rate, filters=None, normalize_filters=True):
+ """
+ Apply bandpass filtering with Hilbert transform using
+ a prespecified set of filters.
+
+ Parameters
+ ----------
+ X : ndarray (n_channels, n_time)
+ Input data, dimensions
+ rate : float
+ Number of samples per second.
+ filters : filter or list of filters (optional)
+ One or more bandpass filters
+ normalize_filters : bool
+ If true, normalize each filter so that its entries sum to 1
+
+ Returns
+ -------
+ Xc : array
+ Bandpassed analytical signal (dtype: complex)
+ """
+ if not isinstance(filters, list):
+ filters = [filters]
+ time = X.shape[-1]
+ freq = fftfreq(time, 1. / rate)
+
+ # Heavyside filter
+ h = np.zeros(len(freq))
+ h[freq > 0] = 2.
+ h[0] = 1.
+ h = h[np.newaxis, :]
+
+ Xh = np.zeros((len(filters),) + X.shape, dtype=np.complex)
+ X_fft_h = fft(X) * h
+ for ii, f in enumerate(filters):
+ if f is None:
+ Xh[ii] = ifft(X_fft_h)
+ else:
+ if normalize_filters:
+ f = f / f.sum()
+ Xh[ii] = ifft(X_fft_h * f)
+ if Xh.shape[0] == 1:
+ return Xh[0]
+
+ return Xh
diff --git a/analysis/mars/signal_processing/linenoise_notch.py b/analysis/mars/signal_processing/linenoise_notch.py
new file mode 100644
index 0000000..d811084
--- /dev/null
+++ b/analysis/mars/signal_processing/linenoise_notch.py
@@ -0,0 +1,68 @@
+from __future__ import division
+import numpy as np
+from scipy.signal import firwin2, filtfilt
+from numpy.fft import rfftfreq
+
+try:
+ from accelerate.mkl.fftpack import rfft, irfft
+except ImportError:
+ try:
+ from pyfftw.interfaces.numpy_fft import rfft, irfft
+ except ImportError:
+ from numpy.fft import rfft, irfft
+
+
+__all__ = ['linenoise_notch']
+
+
+__authors__ = "Alex Bujan"
+
+
+def apply_notches(X, notches, rate, fft=True):
+ if fft:
+ fs = rfftfreq(X.shape[-1], 1./rate)
+ delta = 1.
+ fd = rfft(X)
+ else:
+ nyquist = rate/2.
+ n_taps = 1001
+ gain = [1, 1, 0, 0, 1, 1]
+ for notch in notches:
+ if fft:
+ window_mask = np.logical_and(fs > notch-delta, fs < notch+delta)
+ window_size = window_mask.sum()
+ window = np.hamming(window_size)
+ fd[:, window_mask] = (fd[:, window_mask] *
+ (1.-window)[np.newaxis, :])
+ else:
+ freq = np.array([0, notch-1, notch-.5,
+ notch+.5, notch+1, nyquist]) / nyquist
+ filt = firwin2(n_taps, freq, gain)
+ X = filtfilt(filt, np.array([1]), X)
+ if fft:
+ X = irfft(fd)
+ return X
+
+
+def linenoise_notch(X, rate):
+ """
+ Apply Notch filter at 60 Hz and its harmonics
+
+ Parameters
+ ----------
+ X : array
+ Input data, dimensions (n_channels, n_timePoints)
+ rate : float
+ Number of samples per second
+
+ Returns
+ -------
+ X : array
+ Denoised data, dimensions (n_channels, n_timePoints)
+ """
+
+ nyquist = rate / 2
+ noise_hz = 60.
+ notches = np.arange(noise_hz, nyquist, noise_hz)
+
+ return apply_notches(X, notches, rate)
diff --git a/analysis/mars/signal_processing/resample.py b/analysis/mars/signal_processing/resample.py
new file mode 100644
index 0000000..4bfa799
--- /dev/null
+++ b/analysis/mars/signal_processing/resample.py
@@ -0,0 +1,156 @@
+# Clone of scipy resample w/ various fft bindings
+"""
+Copyright (c) 2001, 2002 Enthought, Inc.
+All rights reserved.
+
+Copyright (c) 2003-2016 SciPy Developers.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ a. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+ b. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ c. Neither the name of Enthought nor the names of the SciPy Developers
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
+OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+THE POSSIBILITY OF SUCH DAMAGE.
+"""
+import numpy as np
+from scipy.fftpack import ifftshift, fftfreq
+from scipy.signal import get_window
+from scipy._lib.six import callable
+
+from .fft import fft, ifft
+
+__all__ = ['resample']
+
+
+def _resample(x, num, t=None, axis=0, window=None):
+ """
+ Resample `x` to `num` samples using Fourier method along the given axis.
+ The resampled signal starts at the same value as `x` but is sampled
+ with a spacing of ``len(x) / num * (spacing of x)``. Because a
+ Fourier method is used, the signal is assumed to be periodic.
+ Parameters
+ ----------
+ x : array_like
+ The data to be resampled.
+ num : int
+ The number of samples in the resampled signal.
+ t : array_like, optional
+ If `t` is given, it is assumed to be the sample positions
+ associated with the signal data in `x`.
+ axis : int, optional
+ The axis of `x` that is resampled. Default is 0.
+ window : array_like, callable, string, float, or tuple, optional
+ Specifies the window applied to the signal in the Fourier
+ domain. See below for details.
+ Returns
+ -------
+ resampled_x or (resampled_x, resampled_t)
+ Either the resampled array, or, if `t` was given, a tuple
+ containing the resampled array and the corresponding resampled
+ positions.
+ Notes
+ -----
+ The argument `window` controls a Fourier-domain window that tapers
+ the Fourier spectrum before zero-padding to alleviate ringing in
+ the resampled values for sampled signals you didn't intend to be
+ interpreted as band-limited.
+ If `window` is a function, then it is called with a vector of inputs
+ indicating the frequency bins (i.e. fftfreq(x.shape[axis]) ).
+ If `window` is an array of the same length as `x.shape[axis]` it is
+ assumed to be the window to be applied directly in the Fourier
+ domain (with dc and low-frequency first).
+ For any other type of `window`, the function `scipy.signal.get_window`
+ is called to generate the window.
+ The first sample of the returned vector is the same as the first
+ sample of the input vector. The spacing between samples is changed
+ from ``dx`` to ``dx * len(x) / num``.
+ If `t` is not None, then it represents the old sample positions,
+ and the new sample positions will be returned as well as the new
+ samples.
+ As noted, `resample` uses FFT transformations, which can be very
+ slow if the number of input samples is large and prime, see
+ `scipy.fftpack.fft`.
+ """
+ x = np.asarray(x)
+ X = fft(x, axis=axis)
+ Nx = x.shape[axis]
+ if window is not None:
+ if callable(window):
+ W = window(fftfreq(Nx))
+ elif isinstance(window, np.ndarray):
+ if window.shape != (Nx,):
+ raise ValueError('window must have the same length as data')
+ W = window
+ else:
+ W = ifftshift(get_window(window, Nx))
+ newshape = [1] * x.ndim
+ newshape[axis] = len(W)
+ W.shape = newshape
+ X = X * W
+ sl = [slice(None)] * len(x.shape)
+ newshape = list(x.shape)
+ newshape[axis] = num
+ N = int(np.minimum(num, Nx))
+ Y = np.zeros(newshape, 'D')
+ sl[axis] = slice(0, (N + 1) // 2)
+ Y[sl] = X[sl]
+ sl[axis] = slice(-(N - 1) // 2, None)
+ Y[sl] = X[sl]
+ y = ifft(Y, axis=axis) * (float(num) / float(Nx))
+
+ if x.dtype.char not in ['F', 'D']:
+ y = y.real
+
+ if t is None:
+ return y
+ else:
+ new_t = np.arange(0, num) * (t[1] - t[0]) * Nx / float(num) + t[0]
+ return y, new_t
+
+def resample(X, new_freq, old_freq, axis=-1):
+ """
+ Resamples the ECoG signal from the original
+ sampling frequency to a new frequency.
+
+ Parameters
+ ----------
+ X : array
+ Input data, dimensions (n_channels, ..., n_timePoints)
+ new_freq : float
+ New sampling frequency
+ old_freq : float
+ Original sampling frequency
+ axis : int (optional)
+ Axis along which to resample the data
+
+ Returns
+ -------
+ Xds : array
+ Downsampled data, dimensions (n_channels, ..., n_timePoints_new)
+ """
+ time = X.shape[axis]
+ new_time = int(np.ceil(time * new_freq / old_freq))
+
+ Xds = _resample(X, new_time, axis=axis)
+
+ return Xds
+
diff --git a/analysis/mars/signal_processing/smooth.py b/analysis/mars/signal_processing/smooth.py
new file mode 100644
index 0000000..853d52c
--- /dev/null
+++ b/analysis/mars/signal_processing/smooth.py
@@ -0,0 +1,62 @@
+# Adapted from https://scipy-cookbook.readthedocs.io/items/SignalSmooth.html
+
+import scipy
+import numpy
+
+__all__ = ['smooth']
+
+def smooth(x,window_len=11,window='hanning', mode='full'):
+ """smooth the data using a window with requested size.
+
+ This method is based on the convolution of a scaled window with the signal.
+ The signal is prepared by introducing reflected copies of the signal
+ (with the window size) in both ends so that transient parts are minimized
+ in the begining and end part of the output signal.
+
+ input:
+ x: the input signal
+ window_len: the dimension of the smoothing window; should be an odd integer
+ window: the type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'
+ flat window will produce a moving average smoothing.
+ mode: see argument to fftconvolve
+ output:
+ the smoothed signal
+
+ example:
+ t=linspace(-2,2,0.1)
+ x=sin(t)+randn(len(t))*0.1
+ y=smooth(x)
+
+ see also:
+
+ numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, numpy.convolve
+ scipy.signal.lfilter
+
+ TODO: the window parameter could be the window itself if an array instead of a string
+ NOTE: length(output) != length(input), to correct this: return y[(window_len/2-1):-(window_len/2)] instead of just y.
+ """
+
+ if x.ndim != 1:
+ raise ValueError("smooth only accepts 1 dimension arrays.")
+
+ if x.size < window_len:
+ raise ValueError("Input vector needs to be bigger than window size.")
+
+
+ if window_len<3:
+ return x
+
+
+ if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
+ raise ValueError("Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'")
+
+
+ s=numpy.r_[x[window_len-1:0:-1],x,x[-2:-window_len-1:-1]]
+ #print(len(s))
+ if window == 'flat': #moving average
+ w=numpy.ones(window_len,'d')
+ else:
+ w=eval('numpy.'+window+'(window_len)')
+
+ y=scipy.signal.fftconvolve(w/w.sum(),s,mode=mode)
+ return y
diff --git a/analysis/mars/utils/bands.py b/analysis/mars/utils/bands.py
new file mode 100644
index 0000000..b96bb8f
--- /dev/null
+++ b/analysis/mars/utils/bands.py
@@ -0,0 +1,74 @@
+"""
+Frequency band information for different types of data processing.
+"""
+import os
+
+from scipy.io import loadmat
+import numpy as np
+
+class DataFormat(object):
+ def write_preprocessed(self):
+ raise NotImplementedError
+ def read_preprocessed(self):
+ raise NotImplementedError
+
+def log_spaced_cfs(fmin, fmax, nbin=6):
+ """
+ Center frequencies that are uniform in log space
+ """
+ noct = np.ceil(np.log2(fmax/fmin))
+ return fmin * 2**(np.arange(noct*nbin)/nbin)
+
+def const_Q_sds(cfs, Q=8):
+ return cfs/Q
+
+def scaled_sqrt_sds(cfs, scale=0.39):
+ # equivalent to:
+ # return scale * np.sqrt(cfs)
+ return 10 ** ( np.log10(scale) + .5 * (np.log10(cfs))) * np.sqrt(2.)
+
+
+# Chang lab frequencies
+fq_min = 4.0749286538265
+fq_max = 200.
+scale = 7.
+cfs = 2 ** (np.arange(np.log2(fq_min) * scale, np.log2(fq_max) * scale) / scale)
+sds = scaled_sqrt_sds(cfs)
+chang_lab = {'fq_min': fq_min,
+ 'fq_max': fq_max,
+ 'scale': scale,
+ 'cfs': cfs,
+ 'sds': sds,
+ 'block_path': '{}_Hilb.h5'}
+
+# Standard neuro bands
+bands = ['theta', 'alpha', 'beta', 'high beta', 'gamma', 'high gamma', 'ultra high gamma', 'multiunit activity range']
+abrev = ['T','A','B','HB','G','HG','UHG','MUAR']
+min_freqs = [4., 9., 15., 21., 30., 70.,180.,500]
+max_freqs = [8., 14., 20., 29., 59., 170.,450.,1200]
+HG_freq = 200.
+neuro = {'bands': bands,
+ 'abrev': abrev,
+ 'min_freqs': min_freqs,
+ 'max_freqs': max_freqs,
+ 'HG_freq': HG_freq,
+ 'block_path': '{}_neuro_Hilb.h5'}
+
+def frequency_range(abrev):
+ frq_ind = neuro['abrev'].index(abrev)
+ return [neuro['min_freqs'][frq_ind],neuro['max_freqs'][frq_ind]]
+
+# Wavelet 4-1200hz 54 bands
+# which actually start at 2.6308 hz
+wavelet_cfs = log_spaced_cfs(2.6308, 1200.0)
+wavelet_sds = const_Q_sds(wavelet_cfs)
+wavelet = {'cfs': wavelet_cfs, 'sds': wavelet_sds}
+
+if __name__ == '__main__':
+ # with open(os.path.join(os.path.dirname(__file__), 'cfs.4_1200.54Wvl.mat'), 'r') as matfile:
+ # mat = loadmat(matfile)
+ # cfs = np.squeeze(mat['cfs'])
+ # sds = 10 ** ( np.log10(.39) + .5 * (np.log10(cfs)))
+ # sds = np.array(sds)
+ # print cfs
+ pass
diff --git a/analysis/mars/wn/wn_tokenize.py b/analysis/mars/wn/wn_tokenize.py
new file mode 100644
index 0000000..7bffae2
--- /dev/null
+++ b/analysis/mars/wn/wn_tokenize.py
@@ -0,0 +1,83 @@
+"""
+Tokenize white noise stimulus data
+"""
+
+__author__ = 'Vyassa Baratham '
+
+import numpy as np
+from mars.io import NSENWB
+from mars.signal_processing import smooth
+
+def get_stim_onsets(nsenwb, mark_name):
+ if 'Simulation' in nsenwb.block_name:
+ raw_dset = nsenwb.read_raw('ECoG')
+ end_time = raw_dset.data.shape[0] / raw_dset.rate
+ return np.arange(0.5, end_time, 0.3)
+
+ mark_dset = nsenwb.read_mark(mark_name)
+ mark_fs = mark_dset.rate
+ mark_offset = nsenwb.stim['mark_offset']
+ stim_dur = nsenwb.stim['duration']
+ stim_dur_samp = stim_dur*mark_fs
+
+ mark_threshold = 0.25 if nsenwb.stim.get('mark_is_stim') else nsenwb.stim['mark_threshold']
+ thresh_crossings = np.diff( (mark_dset.data[:] > mark_threshold).astype('int'), axis=0 )
+ stim_onsets = np.where(thresh_crossings > 0.5)[0] + 1 # +1 b/c diff gets rid of 1st datapoint
+
+ real_stim_onsets = [stim_onsets[0]]
+ for stim_onset in stim_onsets[1:]:
+ # Check that each stim onset is more than 2x the stimulus duration since the previous
+ if stim_onset > real_stim_onsets[-1] + 2*stim_dur_samp:
+ real_stim_onsets.append(stim_onset)
+
+ if len(real_stim_onsets) != nsenwb.stim['nsamples']:
+ print("WARNING: found {} stim onsets in block {}, but supposed to have {} samples".format(
+ len(real_stim_onsets), nsenwb.block_name, nsenwb.stim['nsamples']))
+
+ return (real_stim_onsets / mark_fs) + mark_offset
+
+def get_end_time(nsenwb, mark_name):
+ mark_dset = nsenwb.read_mark(mark_name)
+ end_time = mark_dset.num_samples/mark_dset.rate
+ return end_time
+
+def already_tokenized(nsenwb):
+ return nsenwb.nwb.trials and 'sb' in nsenwb.nwb.trials.colnames
+
+def tokenize(nsenwb, mark_name='recorded_mark'):
+ """
+ Required: mark track
+
+ Output: stim on/off as "wn"
+ baseline as "baseline"
+ """
+ if already_tokenized(nsenwb):
+ return
+
+ stim_onsets = get_stim_onsets(nsenwb, mark_name)
+ stim_dur = nsenwb.stim['duration']
+ bl_start = nsenwb.stim['baseline_start']
+ bl_end = nsenwb.stim['baseline_end']
+
+ nsenwb.add_trial_column('sb', 'Stimulus (s) or baseline (b) period')
+
+ # Add the pre-stimulus period to baseline
+ # nsenwb.add_trial(start_time=0.0, stop_time=stim_onsets[0]-stim_dur, sb='b')
+
+ for onset in stim_onsets:
+ nsenwb.add_trial(start_time=onset, stop_time=onset+stim_dur, sb='s')
+ if bl_start==bl_end:
+ continue
+ nsenwb.add_trial(start_time=onset+bl_start, stop_time=onset+bl_end, sb='b')
+
+ # Add the period after the last stimulus to baseline
+ # rec_end_time = get_end_time(nsenwb,mark_name)
+ # nsenwb.add_trial(start_time=stim_onsets[-1]+bl_end, stop_time=rec_end_time, sb='b')
+
+
+if __name__ == '__main__':
+ fn = '/data/ECoGData/R32_B6_tokenizetest.nwb'
+
+ nsenwb = NSENWB.from_existing_nwb('R32_B6', fn)
+
+ tokenize(nsenwb)
diff --git a/analysis/simulation_analysis/ampl_vary.py b/analysis/simulation_analysis/ampl_vary.py
new file mode 100644
index 0000000..467911d
--- /dev/null
+++ b/analysis/simulation_analysis/ampl_vary.py
@@ -0,0 +1,60 @@
+"""
+Plots of thalamic amplitude variations
+"""
+
+import numpy as np
+import matplotlib.pyplot as plt
+import matplotlib.colors as colors
+import matplotlib.cm as cmx
+
+from utils import find_layer_ei_ecp_file
+from power_spectrum import PowerSpectrum, PowerSpectrumRatio
+
+JOBNUMS = (
+ (20, '30103194'),
+ (23, '30103187'),
+ (26, '30034900'),
+ (29, '30034918'),
+ (32, '30034921'),
+ (35, '29812500'),
+ (38, '30034922'),
+ (44, '30034925'),
+)
+
+def get_colors(cmap='Reds', n=len(JOBNUMS)):
+ color_norm = colors.Normalize(vmin=0, vmax=n+1)
+ scalar_map = cmx.ScalarMappable(norm=color_norm, cmap=cmap)
+ cmap = [scalar_map.to_rgba(i+1) for i in range(n)]
+ return cmap
+
+def plot(ps_ax, peak_ax, PS_cls=PowerSpectrum):
+ for color, (thal_freq, jobnum) in zip(get_colors(), JOBNUMS):
+ nwbfile = find_layer_ei_ecp_file(jobnum)
+ plt.sca(ps_ax)
+ plotter = PS_cls(nwbfile, '', nosave=True, color=color)
+ f, spectrum, errs = plotter.plot_one(0)
+ max_f = f[np.argmax(spectrum)]
+ max_resp = np.max(spectrum)
+ alpha = (thal_freq-18.0)/26.0
+ peak_ax.scatter(max_resp, max_f, c=color, #alpha=alpha,
+ marker='s', s=48, edgecolors='none')
+ if thal_freq == 35:
+ peak_ax.plot(max_resp, max_f, 'ro', fillstyle='none', markersize=16, alpha=alpha)
+
+if __name__ == '__main__':
+ fig, axs = plt.subplots(2, 2, figsize=(6, 6))
+
+ axs[0, 0].set_xlabel('Neural freq (Hz)')
+ axs[0, 0].set_ylabel('Z-score')
+ axs[0, 1].set_xlabel('Resp. magnitude (Z-score)')
+ axs[0, 1].set_ylabel('Resp. peak freq (Hz)')
+ plot(axs[0, 0], axs[0, 1], PS_cls=PowerSpectrum)
+
+ axs[1, 0].set_xlabel('Neural freq (Hz)')
+ axs[1, 0].set_ylabel('Stim/bl ratio')
+ axs[1, 1].set_xlabel('Resp. magnitude (ratio)')
+ axs[1, 1].set_ylabel('Resp. peak freq (Hz)')
+ plot(axs[1, 0], axs[1, 1], PS_cls=PowerSpectrumRatio)
+
+ plt.tight_layout()
+ plt.savefig("ampl_vary.pdf")
diff --git a/analysis/simulation_analysis/analysis.py b/analysis/simulation_analysis/analysis.py
new file mode 100644
index 0000000..6430309
--- /dev/null
+++ b/analysis/simulation_analysis/analysis.py
@@ -0,0 +1,184 @@
+"""
+Base classes for analysis objects
+"""
+import os
+import h5py
+from pynwb import NWBHDF5IO
+from argparse import ArgumentParser
+
+import numpy as np
+import matplotlib.pyplot as plt
+
+class BasePlotter(object):
+ def __init__(
+ # TODO: remove outdir as an arg (saving should happen manually in the calling script)
+ self, nwbfile, outdir, mode='r', device='ECoG', auxfile=None,
+ raw_dset_name='Raw', proc_dset_name='Hilb_54bands', block=None,
+ filetype='pdf', identifier='', no_baseline_stats=False,
+ channel=None, stim_i=None, tstart=None, tstop=None,
+ figsize=None, ax=None,
+ color=None, linewidth=None, label=None, nosave=False, show=False,
+ is_expt=False, nwb=None,
+ ):
+ """
+ identifier - appended to filename
+ """
+ self.nwbfile = nwbfile
+ self.auxfile = auxfile
+ self.device = device
+ self.channel = channel
+ self.block = block
+ self.raw_dset_name = raw_dset_name
+ self.proc_dset_name = proc_dset_name
+ self.outdir = outdir
+ self.filetype = filetype
+ self.identifier = identifier
+ self.stim_i = stim_i if stim_i is not None else 'avg'
+ self.tstart = tstart
+ self.tstop = tstop
+ self.is_expt = is_expt
+
+ # TODO: remove?
+ self.nosave = nosave
+ self.show = show
+
+ if figsize and not ax:
+ plt.figure(figsize=figsize)
+
+ self.linewidth = linewidth
+ self.color = color
+ self.label = label
+
+ if nwb:
+ self.nwb = nwb
+ else:
+ self.io = NWBHDF5IO(self.nwbfile, mode)
+ self.nwb = self.io.read()
+
+
+ if not no_baseline_stats:
+ self.raw_bl_stats = self._compute_raw_baseline_stats()
+ self.proc_bl_stats = self._compute_proc_baseline_stats()
+
+ @property
+ def proc_dset(self):
+ try:
+ return self.nwb.modules[self.proc_dset_name].data_interfaces[self.device]
+ except KeyError:
+ raise KeyError("Unable to read processed data from {} (probably needs to be preprocessed)".format(self.nwbfile))
+
+ @property
+ def raw_dset(self):
+ return self.nwb.acquisition[self.raw_dset_name].electrical_series[self.device]
+
+ @property
+ def n_ch(self):
+ return self.raw_dset.data.shape[1]
+
+ def do_plots(self):
+ """subclasses override"""
+ dset = self.proc_dset
+ n_timepts, n_ch = dset.data.shape[:2]
+ for i in range(n_ch):
+ self.plot_one(i)
+ plt.clf()
+
+ def fix_len_off_by_one(self, x, y):
+ """
+ In case a timeseries doesn't line up with its t-axis, fix by cutting off one timepoint
+ from the end of whichever series is shorter
+ """
+ if len(x) == len(y) + 1:
+ x = x[:-1]
+ elif len(x) == len(y) - 1:
+ y = y[:-1]
+ return x, y
+
+ def get_stim_periods(self, rate=None, pre_dur=0.1, post_dur=0.1):
+ """
+ Return stim-on times in seconds, unless rate is passed, in which case
+ in samples at the given sampling rate
+ """
+ trials = self.nwb.trials
+ idxs = trials['sb'][:] == 's'
+ times = zip(trials['start_time'][idxs]-pre_dur, trials['stop_time'][idxs]+post_dur)
+
+ if rate:
+ return [(int(t[0]*rate), int(t[1]*rate)) for t in times]
+ else:
+ return times
+
+ def get_baseline_periods(self, rate=None):
+ """
+ Return baseline period times in seconds, unless rate is passed, in which case
+ in samples at the given sampling rate
+ """
+ trials = self.nwb.trials
+ bl_idxs = trials['sb'][:] == 'b'
+ times = zip(trials['start_time'][bl_idxs], trials['stop_time'][bl_idxs])
+
+ if rate:
+ return [(int(t[0]*rate), int(t[1]*rate)) for t in times]
+ else:
+ return times
+
+ def _compute_raw_baseline_stats(self):
+ # TODO
+ return None, None
+
+ def _compute_proc_baseline_stats(self):
+ """
+ Compute baseline stats per frequency band for the given channel data
+ """
+ if self.auxfile and os.path.exists(self.auxfile):
+ print("Using saved baseline stats")
+ with h5py.File(self.auxfile) as infile:
+ return infile['/bl_mu'][:], infile['/bl_std'][:]
+ else:
+ print("Computing baseline stats")
+ full_data = self.proc_dset.data
+ rate = self.proc_dset.rate
+ bl_periods = self.get_baseline_periods(rate=rate)
+ idx = np.zeros(full_data.shape[0], dtype=bool)
+ for t1, t2 in bl_periods:
+ idx[t1:t2] = True
+ bl_data = full_data[idx, ...]
+ bl_mean = np.average(bl_data, axis=0)
+ bl_std = np.std(bl_data, axis=0)
+ return (bl_mean, bl_std)
+
+ def get_bfs(self):
+ with h5py.File(self.auxfile) as infile:
+ return infile['/bf'][:]
+
+
+ def run(self):
+ self.do_plots()
+
+
+class PlotterArgParser(ArgumentParser):
+ kwarg_fields = [
+ 'tstart', 'tstop', 'stim_i', 'identifier', 'filetype', 'nosave', 'show',
+ 'proc_dset_name', 'auxfile',
+ ]
+ def __init__(self, *args, **kwargs):
+ super(PlotterArgParser, self).__init__(*args, **kwargs)
+ self.add_argument('--nwbfile', '--nwb', type=str, required=True)
+ self.add_argument('--auxfile', '--aux', type=str, required=False, default=None)
+ self.add_argument('--outdir', type=str, required=False, default='.')
+ self.add_argument('--proc-dset-name', '--proc-dset', '--proc', type=str, required=False,
+ default='Hilb_54bands')
+ self.add_argument('--tstart', type=float, required=False, default=None)
+ self.add_argument('--tstop', type=float, required=False, default=None)
+ self.add_argument('--stim-i', type=int, required=False, default=None)
+ self.add_argument('--channel', type=int, required=False, default=None)
+ self.add_argument('--identifier', type=str, required=False, default='',
+ help='append this string to filename')
+ self.add_argument('--filetype', '--extension', '--ext', required=False, default='pdf')
+ self.add_argument('--nosave', default=False, action='store_true')
+ self.add_argument('--show', default=False, action='store_true')
+
+ @property
+ def kwargs(self):
+ args = self.parse_args()
+ return {f: getattr(args, f) for f in PlotterArgParser.kwarg_fields}
diff --git a/analysis/simulation_analysis/layer_reader.py b/analysis/simulation_analysis/layer_reader.py
new file mode 100644
index 0000000..26b2a80
--- /dev/null
+++ b/analysis/simulation_analysis/layer_reader.py
@@ -0,0 +1,97 @@
+from pynwb import NWBHDF5IO
+
+class LayerReader(object):
+ def __init__(self, nwb=None, nwbfile=None, device='ECoG',
+ raw_dset_name='Raw', proc_dset_name='Hilb_54bands'):
+ """
+ nwb = an ecp_layer_ei nwb file object
+ nwbfile = pointer to an nwb file
+ """
+
+ if nwb:
+ self.nwb = nwb
+ elif nwbfile:
+ self.nwbfile = nwbfile
+ self.io = NWBHDF5IO(nwbfile, 'r')
+ self.nwb = self.io.read()
+ else:
+ raise ValueError('Must specify either nwb or nwbfile')
+
+ self.device = device
+ self.raw_dset_name = raw_dset_name
+ self.proc_dset_name = proc_dset_name
+
+ self.raw_dset = self.nwb.acquisition[raw_dset_name].electrical_series[device]
+
+ def raw_rate(self):
+ return self.raw_dset.rate
+
+ def raw_full(self):
+ return self.raw_dset.data[:]
+
+ def raw_contrib_dset_name(self, layer, ei):
+ return '{}{}'.format(layer, ei)
+
+ def raw_contrib(self, layer=None, ei=None):
+ if layer and ei:
+ return self.nwb.acquisition[self.raw_contrib_dset_name(layer, ei)] \
+ .electrical_series[self.device].data[:]
+ elif layer and not ei:
+ if layer == 1:
+ return self.raw_contrib(1, 'i')
+ return self.raw_contrib(layer, 'e') + self.raw_contrib(layer, 'i')
+ elif ei and not layer:
+ raise NotImplementedError("Total e/i across all layers not implemented yet")
+ else:
+ raise ValueError("Must specify layer or ei")
+
+ def raw_lesion(self, layer=None, ei=None):
+ return self.raw_full() - self.raw_contrib(layer, ei)
+
+ def raw_slice(self, slice_i, thickness=100):
+ if thickness == 100:
+ return self.nwb.acquisition[str(slice_i)].electrical_series[self.device].data[:]
+ elif thickness == 200:
+ if slice_i == 10:
+ return self.raw_slice(2*slice_i) # slice_i = 21 does not exist
+ else:
+ return self.raw_slice(2*slice_i) + self.raw_slice(2*slice_i + 1)
+ else:
+ raise ValueError("Can only take 100 or 200um slices")
+
+ def raw_slice_lesion(self, slice_i, thickness=100):
+ return self.raw_full() - self.raw_slice(slice_i, thickness=thickness)
+
+ ###############################################################
+ """Hilbert (processed) data, which must be preprocessed into
+ contributions, not calculated on the fly. Most scripts that need
+ this data will just grab it directly from the nwb, but here's a
+ convenient way to do that"""
+ ###############################################################
+
+ @property
+ def proc_dset(self):
+ return self.nwb.modules[self.proc_dset_name].data_interfaces[self.device]
+
+ def proc_rate(self):
+ return self.proc_dset.rate
+
+ def proc_contrib(self, layer=None, ei=None):
+ if layer and ei:
+ return self.get_proc_dset('{}_{}{}'.format(self.proc_dset_name, layer, ei))
+ elif layer and not ei:
+ return self.get_proc_dset('{}_{}'.format(self.proc_dset_name, layer))
+ elif ei and not layer:
+ raise NotImplementedError("Total e/i across all layers not implemented yet")
+ else:
+ raise ValueError("Must specify layer or ei")
+
+ def proc_lesion(self, layer=None, ei=None):
+ if layer and ei:
+ return self.get_proc_dset('{}_l{}{}'.format(self.proc_dset_name, layer, ei))
+ elif layer and not ei:
+ return self.get_proc_dset('{}_l{}'.format(self.proc_dset_name, layer))
+ elif ei and not layer:
+ raise NotImplementedError("Total e/i across all layers not implemented yet")
+ else:
+ raise ValueError("Must specify layer or ei")
diff --git a/analysis/simulation_analysis/power_spectrum.py b/analysis/simulation_analysis/power_spectrum.py
new file mode 100644
index 0000000..5da99e0
--- /dev/null
+++ b/analysis/simulation_analysis/power_spectrum.py
@@ -0,0 +1,316 @@
+"""
+z-scored power spectrum during peak Hg response
+"""
+
+import os
+import numpy as np
+import matplotlib.pyplot as plt
+
+from analysis import BasePlotter, PlotterArgParser
+
+def find_peak_signal(stim_data, rate, search_window=.005, avg_window=1, time_shift_samp=0):
+ """
+ Average the signal on each channel/band around the peak,
+ where peak is defined as the maximum within some time (search_window)
+ of the peak response (across ALL times) of the high gamma signal
+
+ stim_data: data during the stimulus period
+ shape (n_stims, n_timepts, n_channels, n_bands)
+ search_window: number of SECONDS to search around the high gamma peak
+ avg_window: number of SAMPLES around the high gamma peak to avg over
+ """
+ n_stims, n_timepts, n_ch, n_bands = stim_data.shape
+
+ # Average across stimulus presentations
+ stim_spectra = np.mean(stim_data, axis=0)
+
+ # Compute the max in the high gamma range across all timepoints
+ search = int(search_window * rate) # samples
+ hg_signal = np.mean(stim_spectra[search+avg_window:-search-avg_window, :, 30:37], axis=-1) # 29:36?
+ hg_maxes = np.argmax(hg_signal, axis=0) + search + avg_window # Max of the hg signal on each electrode
+ print("hg maxes", hg_maxes)
+
+ # Compute the average within the search window of the hg peaks
+ avg_ampl_during_peak = np.zeros(shape=stim_spectra.shape[1:])
+ for i in range(n_ch):
+ hg_max = hg_maxes[i] + time_shift_samp
+ for j in range(n_bands):
+ maxidx = np.argmax(stim_spectra[hg_max-search:hg_max+search+1, i, j]) + hg_max - search
+ x = np.mean(stim_spectra[maxidx-avg_window:maxidx+avg_window+1, i, j])
+ avg_ampl_during_peak[i, j] = x
+
+ return avg_ampl_during_peak
+
+
+class PowerSpectrum(BasePlotter):
+ ylabel = 'Z-score'
+
+ def __init__(self, nwbfile, outdir, **kwargs):
+ self.half_width = kwargs.pop('half_width', .0025)
+ self.normalize = kwargs.pop('normalize', False)
+ self.time_shift_samp = kwargs.pop('time_shift_samp', 0)
+ self.errors = kwargs.pop('errors', False)
+ self.elinewidth = kwargs.pop('elinewidth', 0.5)
+
+ super(PowerSpectrum, self).__init__(nwbfile, outdir, **kwargs)
+
+ def set_normalize(self, normalize):
+ self.normalize = normalize
+
+ def transform_data(self, data, bl_mean, bl_std):
+ """Z-score"""
+ return (data - bl_mean) / bl_std
+
+ def get_spectrum(self, channel):
+ """
+ Return the z-scored power spectrum
+ """
+ ch_data = self.proc_dset.data[:, channel, :]
+ n_timepts, n_bands = ch_data.shape
+ rate = self.proc_dset.rate
+
+ hw = int(self.half_width * rate)
+
+ # Rescale to baseline mean/stdev
+ bl_mean, bl_std = self.proc_bl_stats
+ bl_mean, bl_std = bl_mean[channel, :], bl_std[channel, :]
+ ch_data = self.transform_data(ch_data, bl_mean, bl_std)
+
+ # Grab center frequencies from bands table
+ # def log_spaced_cfs(fmin, fmax, nbin=6):
+ # noct = np.ceil(np.log2(fmax/fmin))
+ # return fmin * 2**(np.arange(noct*nbin)/nbin)
+ # band_means = log_spaced_cfs(2.6308, 1200.0)
+ band_means = self.proc_dset.bands['band_mean'][:]
+ hg_band_idx = np.logical_and(band_means > 65, band_means < 170)
+
+ # Grab stim-on data, trial average if requested
+ stim_periods = self.get_stim_periods(rate=rate, pre_dur=0, post_dur=0)
+ if self.stim_i == 'avg':
+ n_stim_timepts = stim_periods[0][1] - stim_periods[0][0]
+ stim_data = np.zeros(shape=(len(stim_periods), n_stim_timepts, n_bands))
+ for i, (t1, t2) in enumerate(stim_periods):
+ stim_data[i, :, :] = ch_data[t1:t1+n_stim_timepts, :]
+
+ # Calculate max of average high gamma response
+ # Average over stims, bands in hg range: time axis remains
+ hg_data = np.average(stim_data[:, :, hg_band_idx], axis=(0,2))
+ max_i = np.argmax(hg_data)
+ self.max_i = max_i
+ print(max_i)
+ max_i += self.time_shift_samp
+ assert max_i - hw > 0
+
+ # Average over stims, time: freq (bands) axis remainds
+ spectrum = np.average(stim_data[:, max_i-hw:max_i+hw, :], axis=(0,1))
+ errors = np.std(stim_data[:, max_i-hw:max_i+hw, :], axis=(0,1))
+ else: # self.stim_i is an integer index
+ tstart, tstop = stim_periods[self.stim_i]
+ stim_data = ch_data[tstart:tstop, :]
+ hg_data = np.average(ch_data[tstart:tstop, hg_band_idx], axis=1)
+ max_i = np.argmax(hg_data)
+ self.max_i = max_i
+ spectrum = np.average(stim_data[max_i-hw:max_i+hw+1, :], axis=0)
+ errors = np.zeros(len(spectrum))
+
+ return band_means, spectrum, errors
+
+ def get_data(self):
+ """
+ Return all data during the stimulus period
+ shape (n_stims, n_timepts, n_channels, n_bands)
+ """
+ n_timepts, n_ch, n_bands = self.proc_dset.data.shape
+ rate = self.proc_dset.rate
+ stim_periods = self.get_stim_periods(rate=rate, pre_dur=0.0, post_dur=0.0)
+ n_stim_timepts = stim_periods[0][1] - stim_periods[0][0]
+ stim_data = np.zeros(shape=(len(stim_periods), n_stim_timepts, n_ch, n_bands))
+ for i, (t1, t2) in enumerate(stim_periods):
+ stim_data[i, ...] = self.proc_dset.data[t1:t1+n_stim_timepts, ...]
+ return stim_data
+
+ def save_spectra(self):
+ all_ch_spectra = np.stack([self.get_spectrum(ch)[1] for ch in range(n_ch)])
+ self.nwb.add_scratch(all_ch_spectra, name='power_spectrum', notes='power spectrum')
+
+ def get_avg_spectrum(self):
+ """
+ Channel average z-score.
+ This should really only be used for experimental blocks.
+ Does not support errorbars
+ """
+ bl_mean, bl_std = self.proc_bl_stats
+ def log_spaced_cfs(fmin, fmax, nbin=6):
+ noct = np.ceil(np.log2(fmax/fmin))
+ return fmin * 2**(np.arange(noct*nbin)/nbin)
+ band_means = log_spaced_cfs(2.6308, 1200.0) if self.is_expt else self.proc_dset.bands['band_mean'][:]
+ stim_data = self.get_data()
+ n_stims, n_timepts, n_ch, n_bands = stim_data.shape
+ bl_mean = np.tile(bl_mean, (n_stims, n_timepts, 1, 1))
+ bl_std = np.tile(bl_std, (n_stims, n_timepts, 1, 1))
+ stim_data = self.transform_data(stim_data, bl_mean, bl_std)
+ stim_peak_spectra = find_peak_signal(stim_data, self.proc_dset.rate, time_shift_samp=self.time_shift_samp)
+ avg_spectrum = np.average(stim_peak_spectra, axis=0)
+ return band_means, avg_spectrum
+
+ def prepare_axes(self):
+ plt.gca().set_xscale('log')
+ plt.xlim([10, 1200])
+ plt.xscale('log')
+ plt.xlabel('Frequency (Hz)')
+ ylabel = self.ylabel + '/max' if self.normalize else self.ylabel
+ plt.ylabel(ylabel)
+
+ def plot_avg(self, **plot_args):
+ """channel average"""
+ f, avg_spectrum = self.get_avg_spectrum()
+ if self.normalize:
+ avg_spectrum = avg_spectrum / np.max(avg_spectrum)
+ self.prepare_axes()
+ plt.gca().plot(f, avg_spectrum, color=self.color, **plot_args)
+
+
+ def plot_one(self, channel, **plot_args):
+ """
+ Make one channel's power spectrum and save it to file
+ """
+
+ band_means, spectrum, errors = self.get_spectrum(channel)
+
+ if self.normalize:
+ errors = errors / max(spectrum)
+ spectrum = spectrum / max(spectrum)
+
+ final_plot_args = {
+ 'label': self.label,
+ 'color': self.color,
+ 'linewidth': self.linewidth,
+ 'elinewidth': self.elinewidth,
+ 'capsize': 1,
+ }
+ final_plot_args.update(plot_args)
+
+ self.prepare_axes()
+ plt.errorbar(
+ band_means, spectrum, yerr=(errors if self.errors else None),
+ **final_plot_args
+ )
+ self.label = None # Prevent the label from being applied multiple times
+ plt.tight_layout()
+
+ if not self.nosave:
+ fn = 'power_spectrum_{}_ch{:02d}_{}.{}'.format(
+ self.device, channel, self.identifier, self.filetype
+ )
+ full_fn = os.path.join(self.outdir, fn)
+ plt.savefig(full_fn)
+
+ if self.show:
+ plt.show()
+
+ return band_means, spectrum, errors
+
+
+ def plot_one_layer_ei(self, layer, ei, contrib_or_lesion, **plot_args):
+ """Only valid for simulation, so channel must be 0"""
+ old_proc_dset_name = self.proc_dset_name # "Hilb_54bands"
+ old_proc_bl_stats = self.proc_bl_stats
+ lesion = 'l' if contrib_or_lesion in ('lesion', 'removal') else ''
+ self.proc_dset_name = '{}_{}{}{}'.format(old_proc_dset_name, lesion, layer, ei)
+ self.proc_bl_stats = self._compute_proc_baseline_stats()
+
+ self.plot_one(0, **plot_args)
+
+ self.proc_dset_name = old_proc_dset_name
+ self.proc_bl_stats = old_proc_bl_stats
+
+ def plot_one_slice(self, slice_i, contrib_or_lesion, **plot_args):
+ old_proc_dset_name = self.proc_dset_name # "Hilb_54bands"
+ old_proc_bl_stats = self.proc_bl_stats
+ lesion = 'l' if contrib_or_lesion in ('lesion', 'removal') else ''
+ self.proc_dset_name = '{}_{}{}'.format(old_proc_dset_name, lesion, slice_i)
+ self.proc_bl_stats = self._compute_proc_baseline_stats()
+
+ self.plot_one(0, **plot_args)
+
+ self.proc_dset_name = old_proc_dset_name
+ self.proc_bl_stats = old_proc_bl_stats
+
+ def plot_diff_layer_ei(self, layer, ei, contrib_or_lesion, **plot_args):
+ """
+ Plot the difference between layer contribution/lesion and full power spectrum
+ """
+ f, full_spectrum, full_errors = self.get_spectrum(0)
+
+ old_proc_dset_name = self.proc_dset_name # "Hilb_54bands"
+ old_proc_bl_stats = self.proc_bl_stats
+ lesion = 'l' if contrib_or_lesion in ('lesion', 'removal') else ''
+ self.proc_dset_name = '{}_{}{}{}'.format(old_proc_dset_name, lesion, layer, ei)
+ self.proc_bl_stats = self._compute_proc_baseline_stats()
+
+ f, layer_spectrum, layer_errors = self.get_spectrum(0)
+
+ self.proc_dset_name = old_proc_dset_name
+ self.proc_bl_stats = old_proc_bl_stats
+
+ diff = full_spectrum - layer_spectrum
+
+ self.prepare_axes()
+ final_plot_args = {
+ 'label': self.label,
+ 'color': self.color,
+ 'linewidth': self.linewidth,
+ }
+ final_plot_args.update(plot_args)
+ plt.plot(f, diff, **final_plot_args)
+
+
+
+class PowerSpectrumRatio(PowerSpectrum):
+ ylabel = 'Stim/baseline ratio'
+
+ def transform_data(self, data, bl_mean, bl_std):
+ """Ratio"""
+ return data / bl_mean
+
+ def plot_one_layer_ei(self, layer, ei, contrib_or_lesion, **plot_args):
+ """Only valid for simulation, so channel must be 0"""
+ old_proc_dset_name = self.proc_dset_name # "Hilb_54bands"
+ lesion = 'l' if contrib_or_lesion in ('lesion', 'removal') else ''
+ self.proc_dset_name = '{}_{}{}{}'.format(old_proc_dset_name, lesion, layer, ei)
+ self.plot_one(0, **plot_args)
+ self.proc_dset_name = old_proc_dset_name
+
+ def plot_one_slice(self, slice_i, contrib_or_lesion, **plot_args):
+ old_proc_dset_name = self.proc_dset_name # "Hilb_54bands"
+ lesion = 'l' if contrib_or_lesion in ('lesion', 'removal') else ''
+ self.proc_dset_name = '{}_{}{}'.format(old_proc_dset_name, lesion, slice_i)
+ self.plot_one(0, **plot_args)
+ self.proc_dset_name = old_proc_dset_name
+
+ def plot_diff_layer_ei(self, layer, ei, contrib_or_lesion, **plot_args):
+ f, full_spectrum, full_errors = self.get_spectrum(0)
+
+ old_proc_dset_name = self.proc_dset_name # "Hilb_54bands"
+ lesion = 'l' if contrib_or_lesion in ('lesion', 'removal') else ''
+ self.proc_dset_name = '{}_{}{}{}'.format(old_proc_dset_name, lesion, layer, ei)
+ f, layer_spectrum, layer_errors = self.get_spectrum(0)
+ self.proc_dset_name = old_proc_dset_name
+
+ diff = full_spectrum - layer_spectrum
+
+ self.prepare_axes()
+ final_plot_args = {
+ 'label': self.label,
+ 'color': self.color,
+ 'linewidth': self.linewidth,
+ }
+ final_plot_args.update(plot_args)
+ plt.plot(f, diff, **final_plot_args)
+
+if __name__ == '__main__':
+ parser = PlotterArgParser()
+ args = parser.parse_args()
+
+ analysis = PowerSpectrumRatio(args.nwbfile, args.outdir, **parser.kwargs)
+ analysis.run()
diff --git a/analysis/simulation_analysis/power_spectrum_100um.py b/analysis/simulation_analysis/power_spectrum_100um.py
new file mode 100644
index 0000000..0aaafc3
--- /dev/null
+++ b/analysis/simulation_analysis/power_spectrum_100um.py
@@ -0,0 +1,141 @@
+"""Plot of power spectrum in each 100um slice, along with 6 graphs
+showing the # of segments in layer i in each slice
+
+"""
+import numpy as np
+import json
+import matplotlib.pyplot as plt
+from matplotlib.gridspec import GridSpec, GridSpecFromSubplotSpec
+import matplotlib.colors as colors
+import matplotlib.cm as cmx
+
+from layer_reader import LayerReader
+from power_spectrum import PowerSpectrum, PowerSpectrumRatio
+from utils import find_slice_ecp_file, get_layer_slice_counts, numerals
+
+TSTART, TSTOP, TSTIM = 2400, 2700, 2500
+JOBNUM = '29812500'
+TYPE = 'zscore'
+# COLOR = 'Greys'
+# COLOR = 'gist_rainbow'
+COLOR = 'gist_heat'
+THICKNESS = 200
+NSLICES = 11
+nwbfile = find_slice_ecp_file(JOBNUM, thickness=THICKNESS)
+
+if TYPE == 'zscore':
+ PS_cls = PowerSpectrum
+else:
+ PS_cls = PowerSpectrumRatio
+
+def plot_contribs(reader, ax):
+ rate = reader.raw_rate()
+ istart, istop = int(round(TSTART/1000.0*rate)), int(round(TSTOP/1000.0*rate))
+ offset = 0
+ for slice_i, color in zip(range(NSLICES), get_colors()):
+ offset += .1
+ ecp = reader.raw_slice(slice_i, thickness=THICKNESS)[istart:istop]
+ t = np.linspace(TSTART, TSTOP, len(ecp))
+ ax.plot(t-TSTIM, ecp-offset, linewidth=0.5, color=color)
+
+def get_colors(cmap=COLOR, n=NSLICES, skipfirst=4 if COLOR=='Greys' else 1):
+ color_norm = colors.Normalize(vmin=0, vmax=n+skipfirst)
+ scalar_map = cmx.ScalarMappable(norm=color_norm, cmap=cmap)
+ cmap = [scalar_map.to_rgba(n-(i+skipfirst)) for i in range(n)]
+ return cmap
+
+def plot_counts(ax, slice_counts, halfgap=8):
+ for (slice_i, count), color in zip(slice_counts.items(), get_colors()):
+ top = slice_i * THICKNESS + halfgap
+ bottom = (slice_i + 1) * THICKNESS - halfgap
+ mid = (top + bottom) / 2.0
+ ax.plot([count, count], [top, bottom], color=color)
+ # if count > 0:
+ # ax.text(count, mid, str(count), rotation=90, fontsize=4,
+ # horizontalalignment='right', verticalalignment='center')
+
+fig = plt.figure(figsize=(11, 4))
+gs = GridSpec(1, 20, figure=fig)
+
+# Raw traces
+ax = plt.subplot(gs[0, 0:3])
+ax.get_yaxis().set_visible(False)
+ax.set_xticks([-100, 0, 100, 200])
+ax.set_xlabel("Time (ms)")
+reader = LayerReader(nwbfile=nwbfile)
+plot_contribs(reader, ax)
+
+# Power spectra
+plotter = PS_cls(nwbfile, '', device='ECoG', nosave=True)
+ax = plt.subplot(gs[0, 4:10])
+plt.sca(ax)
+# plt.title("Power spectra, {}um slices".format(THICKNESS))
+for slice_i, color in zip(range(NSLICES), get_colors()):
+ plotter.plot_one_slice(slice_i, 'contrib', color=color)
+
+layer_slice_counts = get_layer_slice_counts(JOBNUM, thickness=THICKNESS)
+print(json.dumps(layer_slice_counts, indent=4, sort_keys=True))
+layers = [1, 2, 3, 4, 5, 6]
+
+# Num layer segments in each slice
+for layer, slice_counts in layer_slice_counts.items():
+ ax = plt.subplot(gs[0, 11+layer-1:11+layer])
+ ax.set_title(numerals[layer])
+ plt.sca(ax)
+ ax.set_ylim([2100, 0])
+ if layer in (1, 2):
+ xlim = 14000 if THICKNESS == 100 else 25000
+ ax.set_xlim([0, xlim])
+ plt.xticks([xlim], rotation='vertical')
+ else:
+ xlim = 135000 if THICKNESS == 100 else 265000
+ ax.set_xlim([0, xlim])
+ plt.xticks([xlim], rotation='vertical')
+
+ if layer == 1:
+ ax.set_ylabel("Depth (um)")
+ else:
+ ax.get_yaxis().set_visible(False)
+ plot_counts(ax, slice_counts)
+# depth axis on L1
+
+# Num total segments in each slice
+ax = plt.subplot(gs[0, 17])
+plt.sca(ax)
+ax.set_title("Total")
+ax.set_ylim([NSLICES-0.5, -0.5])
+ax.get_yaxis().set_visible(False)
+# ax.get_xaxis().set_visible(False)
+xlim = 200000 if THICKNESS == 100 else 350000
+ax.set_xlim([0, xlim])
+plt.xticks([xlim], rotation='vertical')
+# for spine in ['left', 'right', 'top', 'bottom']:
+# ax.spines[spine].set_visible(False)
+total_counts = [sum(layer_slice_counts[layer][slice_i] for layer in layers) for slice_i in range(NSLICES)]
+print(total_counts)
+ax.barh(
+ range(NSLICES),
+ total_counts,
+ color=get_colors(),
+ height=0.8
+)
+
+# Fraction of segments in each layer
+subgs = GridSpecFromSubplotSpec(NSLICES, 1, subplot_spec=gs[0, 18:20])
+for slice_i, color in zip(range(NSLICES), get_colors()):
+ ax = plt.subplot(subgs[slice_i, 0])
+ ax.axis('off')
+ plt.bar(layers, [layer_slice_counts[l][slice_i] for l in layers], width=0.7, color=color)
+# Label layers on bottom subplot only
+ax = plt.subplot(subgs[-1, 0])
+ax.axis('on')
+ax.get_yaxis().set_visible(False)
+for spine in ['left', 'right', 'top', 'bottom']:
+ ax.spines[spine].set_visible(False)
+ax.set_xticks(layers)
+ax.set_xticklabels([numerals[l] for l in layers])
+
+col = '' if COLOR == 'Greys' else ('_'+COLOR)
+plt.tight_layout()
+plt.savefig('power_spectrum_{}um_{}_{}{}.pdf'.format(THICKNESS, JOBNUM, TYPE, col))
+plt.savefig('power_spectrum_100um_latest.pdf'.format(THICKNESS, JOBNUM, TYPE, col))
diff --git a/analysis/simulation_analysis/power_spectrum_layers.py b/analysis/simulation_analysis/power_spectrum_layers.py
new file mode 100644
index 0000000..6ffff51
--- /dev/null
+++ b/analysis/simulation_analysis/power_spectrum_layers.py
@@ -0,0 +1,80 @@
+# -*- coding: utf-8 -*-
+
+import numpy as np
+import matplotlib.pyplot as plt
+from matplotlib.gridspec import GridSpec
+import matplotlib.colors as colors
+import matplotlib.cm as cmx
+
+from layer_reader import LayerReader
+from power_spectrum import PowerSpectrum, PowerSpectrumRatio
+from utils import find_layer_ei_ecp_file
+
+TSTART, TSTOP, TSTIM = 2400, 2700, 2500
+JOBNUM = '29812500'
+TYPE = 'ratio'
+nwbfile = find_layer_ei_ecp_file(JOBNUM)
+expt_nwbfile = "/Users/vbaratham/src/simulation_analysis/R32_B6_notch_filtered.nwb"
+
+def get_colors(n=6):
+ color_norm_e = colors.Normalize(vmin=0, vmax=n+1)
+ scalar_map_e = cmx.ScalarMappable(norm=color_norm_e, cmap='Greys')
+ cmap = [scalar_map_e.to_rgba(i+1) for i in range(0, n+1)][1:]
+
+ return cmap
+
+def plot_ecp(reader, ax, layer, offset=0, color='red'):
+ data = reader.raw_contrib(layer=layer)
+ rate = reader.raw_rate()
+ istart, istop = int(round(TSTART/1000.0*rate)), int(round(TSTOP/1000.0*rate))
+ ecp = data[istart:istop]
+ t = np.linspace(TSTART, TSTOP, len(ecp))
+ ax.plot(t-TSTIM, ecp-offset, linewidth=0.5, color=color)
+
+def plot_contribs(nwb, ax):
+ layers = [1, 2, 3, 4, 5, 6]
+ colors = get_colors()
+ offset_per_layer = .15
+ reader = LayerReader(nwb=nwb)
+ for layer, color in zip(layers, colors):
+ offset = layer * offset_per_layer
+ plot_ecp(reader, ax, layer, offset=offset, color=color)
+
+def plot_power_spectra_contrib(plotter, ax, _type='contrib'):
+ layers = [1, 2, 3, 4, 5, 6]
+ colors = get_colors()
+ plt.sca(ax)
+ y = .95
+ label = 'Contribution' if _type == 'contrib' else 'Lesion'
+ for layer, color in zip(layers, colors):
+ plotter.plot_one_layer_ei(layer, '', _type, color=color)
+ ax.text(.95, y, "L{} {}".format(layer, label), fontsize=7, color=color,
+ horizontalalignment='right', verticalalignment='top', transform=ax.transAxes)
+ y -= 0.07
+ print("done layer {}".format(layer))
+
+fig = plt.figure(figsize=(9, 3))
+gs = GridSpec(1, 3, figure=fig)
+
+PS_cls = PowerSpectrum if TYPE == 'zscore' else PowerSpectrumRatio
+sim_plotter = PS_cls(nwbfile, '', device='ECoG', stim_i='avg', nosave=True,
+ color='red', label='In silico')
+
+# Raw contribs
+ax = plt.subplot(gs[0, 0])
+plot_contribs(sim_plotter.nwb, ax)
+ax.get_yaxis().set_visible(False)
+ax.set_xlabel("Time (ms)")
+ax.set_title("a", loc='left')
+
+ax = plt.subplot(gs[0, 1])
+plot_power_spectra_contrib(sim_plotter, ax, _type='contrib')
+ax.set_title("b", loc='left')
+
+ax = plt.subplot(gs[0, 2])
+plot_power_spectra_contrib(sim_plotter, ax, _type='lesion')
+ax.set_title("c", loc='left')
+
+plt.tight_layout()
+plt.savefig('power_spectrum_layers_{}_{}.png'.format(TYPE, JOBNUM))
+plt.savefig('power_spectrum_layers_{}_latest.pdf'.format(TYPE))
diff --git a/analysis/simulation_analysis/raw_ecp.py b/analysis/simulation_analysis/raw_ecp.py
new file mode 100644
index 0000000..732a691
--- /dev/null
+++ b/analysis/simulation_analysis/raw_ecp.py
@@ -0,0 +1,77 @@
+import os
+import numpy as np
+import matplotlib.pyplot as plt
+
+from utils import bandpass
+from analysis import BasePlotter, PlotterArgParser
+
+class RawECP(BasePlotter):
+ def __init__(self, nwbfile, outdir, **kwargs):
+ self.bandpass = kwargs.pop('bandpass', None)
+
+ super(RawECP, self).__init__(nwbfile, outdir, **kwargs)
+
+ def add_stim_line(self, where='top'):
+ tr = self.nwb.trials
+ stim_idx = tr['sb'][:] == 's'
+ start_times = tr['start_time'][stim_idx]
+ stop_times = tr['stop_time'][stim_idx]
+ top = plt.ylim()[1 if where=='top' else 0]
+
+ for (start, stop) in zip(start_times, stop_times):
+ if start > self.tstop or stop < self.tstart:
+ continue
+ plt.plot([start, stop], [top, top], color='blue')
+
+
+ def plot_one(self, channel):
+ if args.tstart is None or args.tstop is None:
+ raise ValueError('Must specify --tstart and --tstop for RawECP')
+
+ fig = plt.figure(figsize=(8, 2))
+ rate = self.raw_dset.rate
+ istart, istop = int(self.tstart*rate), int(self.tstop*rate)
+ ch_data = self.raw_dset.data[istart:istop, channel]
+
+ # TODO: axis labels
+ t = np.arange(self.tstart, self.tstop, 1.0/rate)
+
+ # fix off-by-one time round errors
+ t, ch_data = self.fix_len_off_by_one(t, ch_data)
+
+ if self.bandpass:
+ print('bandpassing from {} to {}'.format(self.bandpass[0], self.bandpass[1]))
+ ch_data = bandpass(ch_data, rate, lowcut=self.bandpass[0], highcut=self.bandpass[1])
+
+ plt.plot(t, ch_data, linewidth=0.5, color='red')
+ self.add_stim_line()
+ plt.xlabel('Time (s)')
+ plt.tight_layout()
+
+ if not self.nosave:
+ fn = 'rawECP_{}_ch{:02d}_{}.{}'.format(
+ self.device, channel, self.identifier, self.filetype
+ )
+ full_fn = os.path.join(self.outdir, fn)
+ plt.savefig(full_fn)
+
+ if self.show:
+ plt.show()
+
+ fig.clear()
+
+ def do_plots(self):
+ dset = self.raw_dset
+ n_timepts, n_ch = dset.data.shape[:2]
+ for i in range(n_ch):
+ self.plot_one(i)
+
+
+if __name__ == '__main__':
+ parser = PlotterArgParser()
+ parser.add_argument('--bandpass', nargs=2, type=float, required=False, default=None,
+ help='low/high cutoffs to bandpass before running')
+ args = parser.parse_args()
+
+ analysis = RawECP(args.nwbfile, args.outdir, tstart=args.tstart, tstop=args.tstop, filetype=args.filetype, bandpass=args.bandpass)
+ analysis.run()
diff --git a/analysis/simulation_analysis/tone_avg_ecp.py b/analysis/simulation_analysis/tone_avg_ecp.py
new file mode 100644
index 0000000..de701e0
--- /dev/null
+++ b/analysis/simulation_analysis/tone_avg_ecp.py
@@ -0,0 +1,86 @@
+"""
+Plot the avg raw response to the BF on a given electrode
+"""
+
+import numpy as np
+import matplotlib.pyplot as plt
+import h5py
+
+from analysis import BasePlotter, PlotterArgParser
+from utils import bandpass, highpass
+
+class ToneAvgECP(BasePlotter):
+ def plot(self, channel):
+ rate = self.raw_dset.rate
+ bf = self.get_bfs()[channel]
+ trials = self.nwb.trials
+ trial_idxs = np.logical_and(np.logical_and(trials['sb'][:] == 's', trials['frq'][:] == str(bf)), trials['amp'][:] == '7')
+ # trial_idxs = trials['sb'][:] == 's'
+ start_times = trials['start_time'][trial_idxs]-0.05
+ window_len_samp = int(0.15 * rate)
+ stim_periods = [(int(t*rate), int(t*rate) + window_len_samp) for t in start_times]
+ ch_data = self.raw_dset.data[:, channel] * 1000
+ # ch_data = bandpass(ch_data, rate, 2, 3000)
+ # ch_data = highpass(ch_data, rate, 800)
+ all_stim_data = [ch_data[istart:istop] for istart, istop in stim_periods]
+ stim_data = np.stack(all_stim_data)
+ avg_waveform = np.average(stim_data, axis=0)
+ std_waveform = np.std(stim_data, axis=0)
+ t = np.linspace(-50, 100, len(avg_waveform))
+
+ # DEBUG
+ # for i in range(len(start_times)):
+ # plt.plot(t, stim_data[i, :], color='k', linewidth=0.3, alpha=0.3)
+ # END DEBUG
+ plt.fill_between(t, avg_waveform+std_waveform, avg_waveform-std_waveform, color='grey')
+ plt.plot(t, avg_waveform, color='black')
+
+ # Draw stim bars
+ ymin, ymax = plt.ylim()
+ plt.plot([0, 0], [ymin, ymax], linestyle='--', linewidth=0.5, color='k')
+ plt.plot([50, 50], [ymin, ymax], linestyle='--', linewidth=0.5, color='k')
+
+ # Draw peak bars
+ center_samp = 10
+ center_time = center_samp / self.proc_dset.rate
+ t1, t2 = (center_time - .005) * 1000, (center_time + .005) * 1000
+ plt.plot([t1, t1], [ymin, ymax], linewidth=0.3, color='red')
+ plt.plot([t2, t2], [ymin, ymax], linewidth=0.3, color='red')
+
+ plt.xlim([-50, 100])
+ plt.ylim([ymin, ymax])
+
+ plt.xlabel("Time (ms)")
+ plt.ylabel("Voltage (mV)")
+ plt.tight_layout()
+
+
+if __name__ == '__main__':
+ # TONE150 (not used)
+ # rat = 'R72'
+ # block = 'R72_B6'
+ # rat = 'R73'
+ # block = 'R73_B2'
+ rat = 'R70'
+ block = 'R70_B8'
+ # rat = 'R75'
+ # block = 'R75_B8'
+ my_preproc = ['R70', 'R67']
+
+ rat = 'R32'
+ block = 'R32_B7'
+
+ nwbfile = '/data/{}/{}.nwb'.format(rat, block)
+ auxfile = '/data/{}/{}_aux.h5'.format(rat, block)
+
+ # Not used - all tone blocks are preprocessed by me
+ # proc_dset_name = 'Hilb_54bands' if rat in my_preproc else 'Wvlt_4to1200_54band_CAR0'
+
+ for channel in range(128):
+ plotter = ToneAvgECP(nwbfile, '.', no_baseline_stats=True, auxfile=auxfile)
+ plt.figure(figsize=(4.2, 4))
+ plotter.plot(channel)
+ plt.savefig('plots/tone_raw_{}_ch{}.pdf'.format(block, channel))
+ plt.close()
+ print("done channel {}".format(channel))
+
diff --git a/analysis/simulation_analysis/tone_figure.py b/analysis/simulation_analysis/tone_figure.py
new file mode 100644
index 0000000..5b46e6e
--- /dev/null
+++ b/analysis/simulation_analysis/tone_figure.py
@@ -0,0 +1,205 @@
+# -*- coding: utf-8 -*-
+"""
+Figure 1 of the High gamma ECoG paper
+"""
+import os, sys
+import numpy as np
+import matplotlib.pyplot as plt
+from matplotlib.gridspec import GridSpec
+import matplotlib.patches as patches
+import matplotlib.cm as cmx
+import matplotlib.colors as colors
+
+from tone_avg_ecp import ToneAvgECP
+from tone_spectrogram import ToneSpectrogram
+from tone_power_spectrum import TonePowerSpectrum
+from tone_spectrogram import ToneSpectrogram
+
+rat = 'R18'
+block = 'R18_B12'
+tstart, tstop = 40, 42.5
+# channel = 109 # for single channel plots
+# channels = [10, 20, 30, 40, 50, 60, 70, 80, 124, 100, 109] # for Hg plot
+if len(sys.argv) > 1:
+ channel = int(sys.argv[-1])
+ channels = []
+else:
+ channel = np.random.randint(128)
+ channels = list(np.random.randint(128, size=10))
+channels.append(channel)
+print("Channel = {}".format(channel))
+print("Channels = {}".format(channels))
+if os.path.exists("fig1_ch{}.pdf".format(channel)):
+ exit("Already done channel")
+
+nwbfile = '/data/{}/{}.nwb'.format(rat, block)
+auxfile = '/data/{}/{}_aux.h5'.format(rat, block)
+specfile = '/data/{}/{}_spectra.h5'.format(rat, block)
+
+fig = plt.figure(figsize=(7, 7))
+# gs = GridSpec(4, 3, height_ratios=(1, 2, 2, 3.8))
+
+########
+# AXES
+########
+
+# ECoG micrograph (not produced here)
+
+CBAR_WD = .015
+CBAR_GAP = .005
+LEN = .5
+
+# Stimulus
+# stim_ax = plt.subplot(gs[0, 1:])
+stim_ax = fig.add_axes([.4, 1-.08, LEN, .08])
+stim_ax.get_xaxis().set_visible(False)
+stim_ax.get_yaxis().set_visible(False)
+
+# Z-scored High gamma response
+# hg_ax = plt.subplot(gs[1, 1:], sharex=stim_ax)
+hg_ax = fig.add_axes([.4, 1-.08-.16, LEN, .16], sharex=stim_ax)
+hg_ax.get_xaxis().set_visible(False)
+hg_ax.set_ylabel("Hγ (Z-score)")
+
+freq_colorbar_ax = fig.add_axes([.4+LEN+CBAR_GAP, 1-.08-.16+.005, CBAR_WD, .23])
+
+# Spectrogram
+# spect_ax = plt.subplot(gs[2, 1:], sharex=stim_ax)
+bottom = 1-.08-.16-.16
+spect_ax = fig.add_axes([.4, bottom, LEN, .16], sharex=stim_ax)
+spect_colorbar_ax = fig.add_axes([.4+LEN+CBAR_GAP, bottom, CBAR_WD, .16])
+
+SQ = .25
+
+# Trial-avg raw trace
+# raw_ax = plt.subplot(gs[3, 0])
+raw_ax = fig.add_axes([.08, 1-.08-.16-.16-SQ, SQ, SQ])
+raw_ax.get_xaxis().set_visible(False)
+
+# Trial-avg spectrogram
+# avg_spect_ax = plt.subplot(gs[3, 1])
+bottom = 1-.08-.16-.16-SQ-.02-SQ
+avg_spect_ax = fig.add_axes([.08, bottom, SQ, SQ])
+avg_colorbar_ax = fig.add_axes([.08+SQ+CBAR_GAP, bottom, CBAR_WD, SQ])
+
+# Power spectrum
+# ps_ax = plt.subplot(gs[3, 2])
+ps_ax = fig.add_axes([.5, .08, .45, .45])
+
+
+########
+# PLOTS
+########
+
+
+# Trial-avg raw trace
+plt.sca(raw_ax)
+plotter = ToneAvgECP(nwbfile, '.', no_baseline_stats=True, auxfile=auxfile, nosave=True)
+plotter.plot(channel)
+
+# Trial-avg spectrogram
+plt.sca(avg_spect_ax)
+plotter = ToneSpectrogram(nwbfile, '.', auxfile=auxfile, nosave=True)
+im = plotter.plot_one(channel)
+cbar = fig.colorbar(im, cax=avg_colorbar_ax)
+avg_colorbar_ax.tick_params(labelsize=6)
+cbar.set_label("Z-score", size=8)
+
+# Power spectrum
+plt.sca(ps_ax)
+plotter = TonePowerSpectrum(nwbfile, '.', auxfile=auxfile, half_width=.005, nosave=True)
+plotter.prepare_axes()
+plotter.plot_all_and_avg(specfile=specfile)
+
+# Stimulus
+nwb = plotter.nwb
+bfs = plotter.get_bfs()
+trial_idxs = np.logical_and(nwb.trials['start_time'][:] > tstart,
+ nwb.trials['stop_time'][:] < tstop)
+trial_idxs = np.logical_and(trial_idxs, nwb.trials['sb'][:] == 's')
+start_times = nwb.trials['start_time'][trial_idxs]
+freqs = np.array([float(f) for f in nwb.trials['frq'][trial_idxs]])
+ampls = np.array([float(f) for f in nwb.trials['amp'][trial_idxs]])
+all_freqs = np.array([float(f) for f in nwb.trials['frq'][::2]])
+fmin, fmax = np.min(all_freqs), np.max(all_freqs)
+nfreqs = len(np.unique(all_freqs))
+print(bfs)
+print([bfs[ch] for ch in channels])
+print(freqs)
+print(ampls)
+
+color_norm = colors.LogNorm(vmin=fmin, vmax=fmax)
+cmap = cmx.ScalarMappable(norm=color_norm, cmap='jet')
+# use scalar_map.to_rgba(freq) for each freq's color
+cbar = fig.colorbar(cmap, cax=freq_colorbar_ax)
+cmin, cmax = freq_colorbar_ax.get_xlim()
+cbar_mid = np.exp((np.log(cmin) + np.log(cmax)) / 2.0)
+freq_colorbar_ax.plot(cbar_mid, bfs[channel], marker='.', color='k')
+freq_colorbar_ax.tick_params(labelsize=6)
+cbar.set_label("Freq (Hz)", size=8)
+
+for start_time, freq, ampl in zip(start_times, freqs, ampls):
+ stim_ax.add_patch(patches.Rectangle((start_time, 0), .05, ampl, color=cmap.to_rgba(freq)))
+
+stim_ax.set_xlim([tstart, tstop])
+stim_ax.set_ylim([0, 8])
+
+
+# Hg
+bands = plotter.proc_dset.bands['band_mean'][:]
+f_idx = np.logical_and(bands > 65, bands < 170)
+rate = plotter.proc_dset.rate
+istart, istop = int(tstart*rate), int(tstop*rate)
+t = np.linspace(tstart, tstop, istop-istart)
+bl_mu, bl_std = plotter.proc_bl_stats
+for ch in channels:
+ mu = np.average(bl_mu[ch, f_idx])
+ std = np.average(bl_std[ch, f_idx])
+ ch_hg = plotter.proc_dset.data[istart:istop, ch, f_idx]
+ ch_hg = np.average(ch_hg, axis=-1)
+ ch_hg = (ch_hg - mu) / std
+ hg_ax.plot(t, ch_hg, linewidth=0.5, color=cmap.to_rgba(bfs[ch]))
+
+ymin, ymax = hg_ax.get_ylim()
+for start_time in start_times:
+ hg_ax.plot([start_time, start_time], (ymin, ymax),
+ linestyle='--', color='grey', linewidth=0.5)
+ hg_ax.plot([start_time+.05, start_time+.05], (ymin, ymax),
+ linestyle='--', color='grey', linewidth=0.5)
+hg_ax.set_ylim([ymin, ymax])
+hg_ax.set_xlim([tstart, tstop])
+
+
+# Spectrogram
+class ToneSpectrogramLong(ToneSpectrogram):
+ def get_t_extent(self):
+ t = np.arange(tstart, tstop, 1.0/self.proc_dset.rate)
+ extent = [tstart, tstop, 0, 1]
+ return t, extent
+
+ def draw_stim_bars(self):
+ pass
+ def draw_peak_bars(self):
+ pass
+
+plt.sca(spect_ax)
+plotter = ToneSpectrogramLong(nwbfile, '.', tstart=tstart, tstop=tstop, stim_i='', auxfile=auxfile)
+im = plotter.plot_one(channel, vmin=0, vmax=7)
+cbar = fig.colorbar(im, cax=spect_colorbar_ax)
+spect_colorbar_ax.tick_params(labelsize=6)
+cbar.set_label("Z-score", size=8)
+
+ymin, ymax = spect_ax.get_ylim()
+for start_time in start_times:
+ spect_ax.plot([start_time, start_time], (ymin, ymax),
+ linestyle='--', color='grey', linewidth=0.5)
+ spect_ax.plot([start_time+.05, start_time+.05], (ymin, ymax),
+ linestyle='--', color='grey', linewidth=0.5)
+spect_ax.set_ylim([ymin, ymax])
+spect_ax.set_xlim([tstart, tstop])
+
+
+
+
+# plt.show()
+plt.savefig("fig1_ch{}.pdf".format(channel))
diff --git a/analysis/simulation_analysis/tone_power_spectrum.py b/analysis/simulation_analysis/tone_power_spectrum.py
new file mode 100644
index 0000000..3f79e2b
--- /dev/null
+++ b/analysis/simulation_analysis/tone_power_spectrum.py
@@ -0,0 +1,169 @@
+"""Generate power spectrum from tone150 experimental block. Overlays
+channels. Uses best frequency at each channel only. """
+import os
+
+import numpy as np
+import h5py
+import matplotlib.pyplot as plt
+
+from power_spectrum import PowerSpectrum
+from utils import wavelet_cfs
+
+def bf_tone(plotter, auxfile, hg_min=65, hg_max=170):
+ """
+ Compute and store the baseline stats and best frequencies on each channel
+ """
+ nwb = plotter.nwb
+ bl_mu, bl_std = plotter.proc_bl_stats
+
+ # Grab list of all frequencies presented
+ all_stim_freq = [int(x) for x in np.unique(nwb.trials['frq'][:])]
+ n_stim_freq = len(all_stim_freq)
+
+ # Grab Z-scored high gamma data for each stim freq individually,
+ # compute max within each trial, and average across trials
+ proc_dset = plotter.proc_dset
+ _, n_ch, _ = proc_dset.data.shape
+ trials = plotter.nwb.trials
+ f_idx = np.logical_and(wavelet_cfs > hg_min, wavelet_cfs < hg_max)
+
+ freq_maxes = np.empty(shape=(n_stim_freq, n_ch)) # trial-avg of max Hg amplitude per ch
+ for stim_freq_i, stim_freq in enumerate(all_stim_freq):
+ trial_idxs = np.logical_and(trials['sb'][:] == 's', trials['frq'][:] == str(stim_freq))
+ times = zip(trials['start_time'][trial_idxs], trials['stop_time'][trial_idxs])
+ time_idxs = [(int(t[0]*proc_dset.rate), int(t[1]*proc_dset.rate)) for t in times]
+ ch_maxes = np.empty(shape=(len(time_idxs), n_ch))
+ for trial_i, (istart, istop) in enumerate(time_idxs):
+ trial_data = proc_dset.data[istart:istop, :, f_idx]
+ trial_data = (trial_data - bl_mu[:, f_idx]) / bl_std[:, f_idx]
+ trial_data = np.average(trial_data, axis=-1)
+ ch_maxes[trial_i, :] = np.max(trial_data, axis=0)
+ freq_maxes[stim_freq_i, :] = np.average(ch_maxes, axis=0)
+ bf_idxs = np.argmax(freq_maxes, axis=0)
+ bf = np.array([all_stim_freq[bf_i] for bf_i in bf_idxs])
+
+ with h5py.File(auxfile) as h5file:
+ h5file.create_dataset('/bl_mu', data=bl_mu)
+ h5file.create_dataset('/bl_std', data=bl_std)
+ h5file.create_dataset('/freq_maxes', data=freq_maxes)
+ h5file.create_dataset('/bf', data=bf)
+
+
+class TonePowerSpectrum(PowerSpectrum):
+ def get_bfs(self):
+ with h5py.File(self.auxfile) as infile:
+ return infile['/bf'][:]
+
+
+ def get_spectrum(self, channel):
+ ch_data = self.proc_dset.data[:, channel, :]
+ n_timepts, n_bands = ch_data.shape
+ rate = self.proc_dset.rate
+
+ hw = int(self.half_width * rate)
+
+ # Rescale to baseline mean/stdev
+ bl_mean, bl_std = self.proc_bl_stats
+ bl_mean, bl_std = bl_mean[channel, :], bl_std[channel, :]
+ ch_data = (ch_data - bl_mean) / bl_std
+
+ # Grab center frequencies from bands table
+ # def log_spaced_cfs(fmin, fmax, nbin=6):
+ # noct = np.ceil(np.log2(fmax/fmin))
+ # return fmin * 2**(np.arange(noct*nbin)/nbin)
+ # band_means = log_spaced_cfs(2.6308, 1200.0)
+ # band_means = self.proc_dset.bands['band_mean'][:]
+ band_means = wavelet_cfs
+ hg_band_idx = np.logical_and(band_means > 65, band_means < 170)
+
+ # Grab stim-on data for best freq
+ bf = self.get_bfs()[channel]
+ trials = self.nwb.trials
+ trial_idxs = np.logical_and(np.logical_and(trials['sb'][:] == 's', trials['frq'][:] == str(bf)), trials['amp'][:] == '7')
+ times = zip(trials['start_time'][trial_idxs], trials['stop_time'][trial_idxs])
+ stim_periods = [(int(t[0]*self.proc_dset.rate), int(t[1]*self.proc_dset.rate)) for t in times]
+
+ n_stim_timepts = stim_periods[0][1] - stim_periods[0][0]
+ stim_data = np.zeros(shape=(len(stim_periods), n_stim_timepts, n_bands))
+ for i, (t1, t2) in enumerate(stim_periods):
+ stim_data[i, :, :] = ch_data[t1:t1+n_stim_timepts, :]
+
+ # Calculate max of average high gamma response
+ # Average over stims, bands in hg range: time axis remains
+ hg_data = np.average(stim_data[:, :, hg_band_idx], axis=(0,2))
+ max_i = np.argmax(hg_data)
+ self.max_i = max_i
+ print(channel, max_i)
+ max_i += self.time_shift_samp
+ if max_i - hw <= 0:
+ spectrum = np.zeros(shape=(54,))
+ errors = np.zeros(shape=(54,))
+ else:
+ # Average over stims, time: freq (bands) axis remainds
+ spectrum = np.average(stim_data[:, max_i-hw:max_i+hw, :], axis=(0,1))
+ errors = np.std(stim_data[:, max_i-hw:max_i+hw, :], axis=(0,1))
+
+ return band_means, spectrum, errors
+
+ def plot_all_and_avg(self, specfile=None):
+ if specfile and os.path.exists(specfile):
+ print("using saved spectra")
+ with h5py.File(specfile) as infile:
+ all_spectra = infile['power_spectra'][:]
+ else:
+ print("Computing spectra")
+ all_spectra = [self.get_spectrum(ch)[1] for ch in range(self.n_ch)]
+
+ # if specfile and os.path.exists(specfile):
+ # os.remove(specfile)
+ if specfile and not os.path.exists(specfile):
+ with h5py.File(specfile) as outfile:
+ outfile.create_dataset('f', data=wavelet_cfs)
+ outfile.create_dataset('power_spectra', data=np.stack(all_spectra))
+
+ ch_spectra = []
+ for ch in range(self.n_ch):
+ # f, spectrum, errors = self.get_spectrum(ch)
+ spectrum = all_spectra[ch]
+ if np.any(spectrum > 3.0):
+ ch_spectra.append(spectrum)
+ plt.plot(wavelet_cfs, spectrum, color='k', alpha=0.3, linewidth=0.3)
+ avg_spectrum = np.average(np.stack(ch_spectra), axis=0)
+ plt.plot(wavelet_cfs, avg_spectrum, color='red', alpha=1, linewidth=2)
+ print("plotted {} spectra".format(len(ch_spectra)))
+
+
+
+if __name__ == '__main__':
+ # TONE150:
+ # rat = 'R72'
+ # block = 'R72_B6'
+ # rat = 'R73'
+ # block = 'R73_B2'
+ # rat = 'R75'
+ # block = 'R75_B8'
+ # rat = 'R70'
+ # block = 'R70_B8'
+
+ # rat = 'R32'
+ # block = 'R32_B7'
+ rat = 'R18'
+ block = 'R18_B12'
+
+ nwbfile = '/data/{}/{}.nwb'.format(rat, block)
+ auxfile = '/data/{}/{}_aux.h5'.format(rat, block)
+ specfile = '/data/{}/{}_spectra.h5'.format(rat, block)
+
+ plotter = TonePowerSpectrum(nwbfile, '.',
+ # proc_dset_name='Wvlt_4to1200_54band_CAR0',
+ auxfile=auxfile, half_width=0.005)
+
+ if not os.path.exists(auxfile):
+ bf_tone(plotter, auxfile)
+
+ plt.figure(figsize=(4, 4))
+ plotter.prepare_axes()
+ plotter.plot_all_and_avg(specfile=specfile)
+
+ plt.savefig("plots/tone_ps_{}.pdf".format(block))
+
diff --git a/analysis/simulation_analysis/tone_spectrogram.py b/analysis/simulation_analysis/tone_spectrogram.py
new file mode 100644
index 0000000..16c410d
--- /dev/null
+++ b/analysis/simulation_analysis/tone_spectrogram.py
@@ -0,0 +1,133 @@
+import os
+import numpy as np
+import matplotlib.pyplot as plt
+
+from analysis import BasePlotter, PlotterArgParser
+
+class ToneSpectrogram(BasePlotter):
+
+ def get_t_extent(self):
+ t = np.arange(-100, 150, 1/self.proc_dset.rate)
+ extent = [-100, 150, 0, 1]
+ return t, extent
+
+ def draw_stim_bars(self):
+ ymax, ymin = plt.ylim()
+ plt.plot([0, 0], [ymin, ymax], linestyle='--', linewidth=0.5, color='k')
+ plt.plot([50, 50], [ymin, ymax], linestyle='--', linewidth=0.5, color='k')
+ plt.ylim([ymax, ymin])
+
+ def draw_peak_bars(self):
+ ymax, ymin = plt.ylim()
+ center_samp = 10
+ center_time = center_samp / self.proc_dset.rate
+ t1, t2 = (center_time - .005) * 1000, (center_time + .005) * 1000
+ plt.plot([t1, t1], [ymin, ymax], linewidth=0.3, color='red')
+ plt.plot([t2, t2], [ymin, ymax], linewidth=0.3, color='red')
+ plt.ylim([ymax, ymin])
+
+ def plot_one(self, channel, **plot_kwargs):
+ """
+ Make one spectrogram and save it to file
+ """
+ ch_data = self.proc_dset.data[:, channel, :]
+ rate = self.proc_dset.rate
+
+ # Grab stim-on data, trial average if requested
+ bf = self.get_bfs()[channel]
+ trials = self.nwb.trials
+ trial_idxs = np.logical_and(np.logical_and(trials['sb'][:] == 's', trials['frq'][:] == str(bf)), trials['amp'][:] == '7')
+ times = zip(trials['start_time'][trial_idxs]-.1, trials['stop_time'][trial_idxs]+.1)
+ stim_periods = [(int(t[0]*self.proc_dset.rate), int(t[1]*self.proc_dset.rate)) for t in times]
+ if self.stim_i == 'avg':
+ print("doing stim avg")
+ n_stim_timepts = stim_periods[0][1] - stim_periods[0][0]
+ stim_data = np.average(
+ np.stack([ch_data[t[0]:t[0]+n_stim_timepts] for t in stim_periods]),
+ axis=0
+ )
+ elif self.tstart is not None and self.tstop is not None:
+ print("using tstart, tstop")
+ istart, istop = int(self.tstart*rate), int(self.tstop*rate)
+ stim_data = ch_data[istart:istop, :]
+ else: # self.stim_i is an integer index
+ print("doing stim {}".format(self.stim_i))
+ tstart, tstop = stim_periods[self.stim_i]
+ stim_data = ch_data[tstart:tstop, :]
+
+ # Rescale to baseline mean/stdev
+ bl_mean, bl_std = self.proc_bl_stats
+ bl_mean, bl_std = bl_mean[channel, :], bl_std[channel, :]
+ stim_data = (stim_data - bl_mean) / bl_std
+ # stim_data = stim_data / bl_mean
+
+ # Get band info for axis labels
+ bands = self.proc_dset.bands['band_mean'][:]
+
+ # Make plot
+ t, extent = self.get_t_extent()
+ ax = plt.gca()
+ im = ax.imshow(stim_data.T, origin='lower', cmap='Greys', aspect='auto',
+ extent=extent, **plot_kwargs) # , vmin=0, vmax=5)
+
+
+ plt.xlabel('Time (ms)')
+ plt.ylabel("Frequency (Hz)")
+ ticks, ticklabels = [], []
+ for i in range(0, len(bands), 8):
+ ticks.append(float(i)/len(bands))
+ ticklabels.append(int(bands[i]))
+ ax.set_yticks(ticks)
+ ax.set_yticklabels(ticklabels)
+ # plt.colorbar(label="Stim/baseline ratio")
+ # plt.colorbar().set_label(label="Z-score Amplitude", size=8)
+ plt.tight_layout()
+
+ # Draw stim bars
+ self.draw_stim_bars()
+
+ # Draw peak bars
+ self.draw_peak_bars()
+
+ if not self.nosave:
+ fn = 'spectrogram_{}_ch{:02d}_{}.{}'.format(
+ self.device, channel, self.identifier, self.filetype
+ )
+ full_fn = os.path.join(self.outdir, fn)
+ plt.savefig(full_fn)
+
+ if self.show:
+ plt.show()
+
+ return im
+
+
+if __name__ == '__main__':
+ # TONE150 (not used)
+ # rat = 'R72'
+ # block = 'R72_B6'
+ # rat = 'R73'
+ # block = 'R73_B2'
+ # rat = 'R70'
+ # block = 'R70_B8'
+ # rat = 'R75'
+ # block = 'R75_B8'
+ my_preproc = ['R70', 'R67']
+
+ rat = 'R32'
+ block = 'R32_B7'
+
+ nwbfile = '/data/{}/{}.nwb'.format(rat, block)
+ auxfile = '/data/{}/{}_aux.h5'.format(rat, block)
+
+ # Not used - all tone blocks are preprocessed by me
+ # proc_dset_name = 'Hilb_54bands' if rat in my_preproc else 'Wvlt_4to1200_54band_CAR0'
+
+ plotter = ToneSpectrogram(nwbfile, '.', auxfile=auxfile)
+ for channel in range(128):
+ plt.figure(figsize=(5, 4))
+ plotter.plot_one(channel)
+ plt.savefig('plots/tone_spect_{}_ch{}.pdf'.format(block, channel))
+ plt.close()
+ print("done channel {}".format(channel))
+
diff --git a/analysis/simulation_analysis/utils.py b/analysis/simulation_analysis/utils.py
new file mode 100644
index 0000000..945df77
--- /dev/null
+++ b/analysis/simulation_analysis/utils.py
@@ -0,0 +1,91 @@
+import glob
+import os
+import json
+
+import numpy as np
+from scipy.signal import butter, lfilter
+
+def butter_bandpass(lowcut, highcut, fs, order=5):
+ nyq = 0.5 * fs
+ low = lowcut / nyq
+ high = highcut / nyq
+ b, a = butter(order, [low, high], btype='band')
+ return b, a
+
+
+def bandpass(data, fs, lowcut=20, highcut=5000, order=5):
+ b, a = butter_bandpass(lowcut, highcut, fs, order=order)
+ y = lfilter(b, a, data)
+ return y
+
+def butter_highpass(lowcut, fs, order=5):
+ nyq = 0.5 * fs
+ low = lowcut / nyq
+ b, a = butter(order, low, btype='highpass')
+ return b, a
+
+def highpass(data, fs, lowcut, order=5):
+ b, a = butter_highpass(lowcut, fs, order=order)
+ y = lfilter(b, a, data)
+ return y
+
+def log_spaced_cfs(fmin, fmax, nbin=6):
+ """
+ Center frequencies that are uniform in log space
+ """
+ noct = np.ceil(np.log2(fmax/fmin))
+ return fmin * 2**(np.arange(noct*nbin)/nbin)
+
+wavelet_cfs = log_spaced_cfs(2.6308, 1200.0)
+
+CCROOT = '/Users/vbaratham/src/cortical-column'
+
+def find_layer_ei_ecp_file(jobnum):
+ output_dir = os.path.join(CCROOT, 'runs', jobnum, '1', 'output')
+ ecp_files = glob.glob(os.path.join(output_dir, 'ecp*layer_ei*.nwb'))
+ if len(ecp_files) == 0:
+ raise ValueError('No layer_ei ECP file found')
+ elif len(ecp_files) == 1:
+ return ecp_files[0]
+ else:
+ log.info(
+ 'Found multiple layer_ei ECP files: \n{}\n'.format('\n'.join(ecp_files)) +
+ '\nUsing {}\n'.format(ecp_files[-1])
+ )
+ return ecp_files[-1]
+
+def find_slice_ecp_file(jobnum, thickness=100):
+ output_dir = os.path.join(CCROOT, 'runs', jobnum, '1', 'output')
+ ecp_files = glob.glob(os.path.join(output_dir, 'ecp*{}um*.nwb'.format(thickness)))
+ if len(ecp_files) == 0:
+ raise ValueError('No 100um slice ECP file found')
+ elif len(ecp_files) == 1:
+ return ecp_files[0]
+ else:
+ log.info(
+ 'Found multiple 100um slice ECP files: \n{}\n'.format('\n'.join(ecp_files)) +
+ '\nUsing {}\n'.format(ecp_files[-1])
+ )
+ return ecp_files[-1]
+
+def get_layer_slice_counts(jobnum, thickness=100):
+ fn = os.path.join(CCROOT, 'runs', jobnum, '1', 'output', 'layer_slice_counts.json')
+ with open(fn, 'r') as infile:
+ orig = json.load(infile)
+ counts = {
+ int(layer): {int(slice_i): count for slice_i, count in slice_counts.items()}
+ for layer, slice_counts in orig.items()
+ }
+ if thickness == 100:
+ return counts
+ elif thickness == 200:
+ def convert(layercounts):
+ return {slice_i: layercounts[slice_i*2] + layercounts.get(slice_i*2 + 1, 0)
+ for slice_i in range(11)}
+ for layer in counts.keys():
+ counts[layer] = convert(counts[layer])
+ return counts
+ else:
+ raise ValueError("Can only do 100 or 200um slices")
+
+numerals = {1: 'I', 2: 'II', 3: 'III', 4: 'IV', 5: 'V', 6: 'VI'}
diff --git a/bmtk-vb/CONTRIBUTING.md b/bmtk-vb/CONTRIBUTING.md
new file mode 100644
index 0000000..e55867f
--- /dev/null
+++ b/bmtk-vb/CONTRIBUTING.md
@@ -0,0 +1,26 @@
+# Allen Institute Contribution Agreement
+
+This document describes the terms under which you may make “Contributions” —
+which may include without limitation, software additions, revisions, bug fixes, configuration changes,
+documentation, or any other materials — to any of the projects owned or managed by the Allen Institute.
+If you have questions about these terms, please contact us at terms@alleninstitute.org.
+
+You certify that:
+
+• Your Contributions are either:
+
+1. Created in whole or in part by you and you have the right to submit them under the designated license
+(described below); or
+2. Based upon previous work that, to the best of your knowledge, is covered under an appropriate
+open source license and you have the right under that license to submit that work with modifications,
+whether created in whole or in part by you, under the designated license; or
+
+3. Provided directly to you by some other person who certified (1) or (2) and you have not modified them.
+
+• You are granting your Contributions to the Allen Institute under the terms of the [2-Clause BSD license](https://opensource.org/licenses/BSD-2-Clause)
+(the “designated license”).
+
+• You understand and agree that the Allen Institute projects and your Contributions are public and that
+a record of the Contributions (including all metadata and personal information you submit with them) is
+maintained indefinitely and may be redistributed consistent with the Allen Institute’s mission and the
+2-Clause BSD license.
diff --git a/bmtk-vb/LICENSE.txt b/bmtk-vb/LICENSE.txt
new file mode 100644
index 0000000..280f59d
--- /dev/null
+++ b/bmtk-vb/LICENSE.txt
@@ -0,0 +1,21 @@
+Copyright 2017. Allen Institute. All rights reserved
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+disclaimer in the documentation and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/bmtk-vb/README.md b/bmtk-vb/README.md
new file mode 100644
index 0000000..29d5b8b
--- /dev/null
+++ b/bmtk-vb/README.md
@@ -0,0 +1,32 @@
+# The Brain Modeling Toolkit
+
+A software development package for building, simulating and analyzing large-scale networks of different levels of resolution.
+
+## Level of Support
+We are releasing this code to the public as a tool we expect others to use. Questions concerning bugs and related issues are welcomed. We expect to address them promptly, pull requests will vetted by our staff before inclusion.
+
+
+## Quickstart
+bmtk requires Python 2.7 plus [additional python dependicies](https://alleninstitute.github.io/bmtk/index.html#base-installation). To install with
+base requirements from a command-line:
+
+```bash
+ $ git clone https://github.com/AllenInstitute/bmtk.git
+ $ cd bmtk
+ $ python setup.py install
+```
+
+There are examples of building models and running simulations located in docs/examples/. Some of the simulation engines may require additonal requirements to run.
+
+
+## Documentation
+
+[User Guide](https://alleninstitute.github.io/bmtk/)
+* [Building network models](https://alleninstitute.github.io/bmtk/builder.html)
+* [Running biophysical simulations](https://alleninstitute.github.io/bmtk/bionet.html)
+* [Running point-neuron simulations](https://alleninstitute.github.io/bmtk/pointnet.html)
+* [Running population-level simulations](https://alleninstitute.github.io/bmtk/popnet.html)
+
+
+
+Copyright 2017 Allen Institute
diff --git a/bmtk-vb/bmtk.egg-info/PKG-INFO b/bmtk-vb/bmtk.egg-info/PKG-INFO
new file mode 100644
index 0000000..ad0b5ba
--- /dev/null
+++ b/bmtk-vb/bmtk.egg-info/PKG-INFO
@@ -0,0 +1,57 @@
+Metadata-Version: 2.1
+Name: bmtk
+Version: 0.0.6
+Summary: Brain Modeling Toolkit
+Home-page: https://github.com/AllenInstitute/bmtk
+Author: Kael Dai
+Author-email: kaeld@alleninstitute.org
+License: UNKNOWN
+Description: # The Brain Modeling Toolkit
+
+ A software development package for building, simulating and analyzing large-scale networks of different levels of resolution.
+
+ ## Level of Support
+ We are releasing this code to the public as a tool we expect others to use. Questions concerning bugs and related issues are welcomed. We expect to address them promptly, pull requests will vetted by our staff before inclusion.
+
+
+ ## Quickstart
+ bmtk requires Python 2.7 plus [additional python dependicies](https://alleninstitute.github.io/bmtk/index.html#base-installation). To install with
+ base requirements from a command-line:
+
+ ```bash
+ $ git clone https://github.com/AllenInstitute/bmtk.git
+ $ cd bmtk
+ $ python setup.py install
+ ```
+
+ There are examples of building models and running simulations located in docs/examples/. Some of the simulation engines may require additonal requirements to run.
+
+
+ ## Documentation
+
+ [User Guide](https://alleninstitute.github.io/bmtk/)
+ * [Building network models](https://alleninstitute.github.io/bmtk/builder.html)
+ * [Running biophysical simulations](https://alleninstitute.github.io/bmtk/bionet.html)
+ * [Running point-neuron simulations](https://alleninstitute.github.io/bmtk/pointnet.html)
+ * [Running population-level simulations](https://alleninstitute.github.io/bmtk/popnet.html)
+
+
+
+ Copyright 2017 Allen Institute
+
+Keywords: neuroscience,scientific,modeling,simulation
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Science/Research
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Topic :: Scientific/Engineering :: Bio-Informatics
+Description-Content-Type: text/markdown
+Provides-Extra: mintnet
+Provides-Extra: popnet
+Provides-Extra: pointnet
+Provides-Extra: bionet
diff --git a/bmtk-vb/bmtk.egg-info/SOURCES.txt b/bmtk-vb/bmtk.egg-info/SOURCES.txt
new file mode 100644
index 0000000..9b37b58
--- /dev/null
+++ b/bmtk-vb/bmtk.egg-info/SOURCES.txt
@@ -0,0 +1,215 @@
+README.md
+setup.py
+bmtk/__init__.py
+bmtk.egg-info/PKG-INFO
+bmtk.egg-info/SOURCES.txt
+bmtk.egg-info/dependency_links.txt
+bmtk.egg-info/requires.txt
+bmtk.egg-info/top_level.txt
+bmtk/analyzer/__init__.py
+bmtk/analyzer/cell_vars.py
+bmtk/analyzer/firing_rates.py
+bmtk/analyzer/io_tools.py
+bmtk/analyzer/spike_trains.py
+bmtk/analyzer/spikes_analyzer.py
+bmtk/analyzer/spikes_loader.py
+bmtk/analyzer/utils.py
+bmtk/analyzer/visualization/__init__.py
+bmtk/analyzer/visualization/rasters.py
+bmtk/analyzer/visualization/spikes.py
+bmtk/analyzer/visualization/widgets.py
+bmtk/builder/__init__.py
+bmtk/builder/connection_map.py
+bmtk/builder/connector.py
+bmtk/builder/edge.py
+bmtk/builder/functor_cache.py
+bmtk/builder/id_generator.py
+bmtk/builder/iterator.py
+bmtk/builder/network.py
+bmtk/builder/node.py
+bmtk/builder/node_pool.py
+bmtk/builder/node_set.py
+bmtk/builder/aux/__init__.py
+bmtk/builder/aux/edge_connectors.py
+bmtk/builder/aux/node_params.py
+bmtk/builder/bionet/__init__.py
+bmtk/builder/bionet/swc_reader.py
+bmtk/builder/formats/__init__.py
+bmtk/builder/formats/hdf5_format.py
+bmtk/builder/formats/iformats.py
+bmtk/builder/io/__init__.py
+bmtk/builder/networks/__init__.py
+bmtk/builder/networks/dm_network.py
+bmtk/builder/networks/input_network.py
+bmtk/builder/networks/mpi_network.py
+bmtk/builder/networks/nxnetwork.py
+bmtk/builder/networks/sparse_network.py
+bmtk/simulator/__init__.py
+bmtk/simulator/bionet/__init__.py
+bmtk/simulator/bionet/biocell.py
+bmtk/simulator/bionet/bionetwork.py
+bmtk/simulator/bionet/biosimulator.py
+bmtk/simulator/bionet/cell.py
+bmtk/simulator/bionet/config.py
+bmtk/simulator/bionet/iclamp.py
+bmtk/simulator/bionet/io_tools.py
+bmtk/simulator/bionet/morphology.py
+bmtk/simulator/bionet/nml_reader.py
+bmtk/simulator/bionet/nrn.py
+bmtk/simulator/bionet/pointprocesscell.py
+bmtk/simulator/bionet/pointsomacell.py
+bmtk/simulator/bionet/pyfunction_cache.py
+bmtk/simulator/bionet/sonata_adaptors.py
+bmtk/simulator/bionet/utils.py
+bmtk/simulator/bionet/virtualcell.py
+bmtk/simulator/bionet/default_setters/__init__.py
+bmtk/simulator/bionet/default_setters/cell_models.py
+bmtk/simulator/bionet/default_setters/synapse_models.py
+bmtk/simulator/bionet/default_setters/synaptic_weights.py
+bmtk/simulator/bionet/modules/__init__.py
+bmtk/simulator/bionet/modules/ecp.py
+bmtk/simulator/bionet/modules/record_cellvars.py
+bmtk/simulator/bionet/modules/record_spikes.py
+bmtk/simulator/bionet/modules/save_synapses.py
+bmtk/simulator/bionet/modules/sim_module.py
+bmtk/simulator/bionet/modules/xstim.py
+bmtk/simulator/bionet/modules/xstim_waveforms.py
+bmtk/simulator/core/__init__.py
+bmtk/simulator/core/config.py
+bmtk/simulator/core/edge_population.py
+bmtk/simulator/core/graph.py
+bmtk/simulator/core/io_tools.py
+bmtk/simulator/core/network_reader.py
+bmtk/simulator/core/node_population.py
+bmtk/simulator/core/node_sets.py
+bmtk/simulator/core/simulator.py
+bmtk/simulator/core/simulator_network.py
+bmtk/simulator/core/sonata_reader/__init__.py
+bmtk/simulator/core/sonata_reader/edge_adaptor.py
+bmtk/simulator/core/sonata_reader/network_reader.py
+bmtk/simulator/core/sonata_reader/node_adaptor.py
+bmtk/simulator/filternet/__init__.py
+bmtk/simulator/filternet/cell.py
+bmtk/simulator/filternet/cell_models.py
+bmtk/simulator/filternet/config.py
+bmtk/simulator/filternet/filternetwork.py
+bmtk/simulator/filternet/filters.py
+bmtk/simulator/filternet/filtersimulator.py
+bmtk/simulator/filternet/io_tools.py
+bmtk/simulator/filternet/pyfunction_cache.py
+bmtk/simulator/filternet/transfer_functions.py
+bmtk/simulator/filternet/utils.py
+bmtk/simulator/filternet/default_setters/__init__.py
+bmtk/simulator/filternet/default_setters/cell_loaders.py
+bmtk/simulator/filternet/lgnmodel/__init__.py
+bmtk/simulator/filternet/lgnmodel/cellmodel.py
+bmtk/simulator/filternet/lgnmodel/cursor.py
+bmtk/simulator/filternet/lgnmodel/fitfuns.py
+bmtk/simulator/filternet/lgnmodel/kernel.py
+bmtk/simulator/filternet/lgnmodel/lattice_unit_constructor.py
+bmtk/simulator/filternet/lgnmodel/lgnmodel1.py
+bmtk/simulator/filternet/lgnmodel/linearfilter.py
+bmtk/simulator/filternet/lgnmodel/lnunit.py
+bmtk/simulator/filternet/lgnmodel/make_cell_list.py
+bmtk/simulator/filternet/lgnmodel/movie.py
+bmtk/simulator/filternet/lgnmodel/poissongeneration.py
+bmtk/simulator/filternet/lgnmodel/singleunitcell.py
+bmtk/simulator/filternet/lgnmodel/spatialfilter.py
+bmtk/simulator/filternet/lgnmodel/temporalfilter.py
+bmtk/simulator/filternet/lgnmodel/transferfunction.py
+bmtk/simulator/filternet/lgnmodel/util_fns.py
+bmtk/simulator/filternet/lgnmodel/utilities.py
+bmtk/simulator/filternet/modules/__init__.py
+bmtk/simulator/filternet/modules/base.py
+bmtk/simulator/filternet/modules/create_spikes.py
+bmtk/simulator/filternet/modules/record_rates.py
+bmtk/simulator/mintnet/Image_Library.py
+bmtk/simulator/mintnet/Image_Library_Supervised.py
+bmtk/simulator/mintnet/__init__.py
+bmtk/simulator/mintnet/analysis/LocallySparseNoise.py
+bmtk/simulator/mintnet/analysis/StaticGratings.py
+bmtk/simulator/mintnet/analysis/__init__.py
+bmtk/simulator/mintnet/hmax/C_Layer.py
+bmtk/simulator/mintnet/hmax/Readout_Layer.py
+bmtk/simulator/mintnet/hmax/S1_Layer.py
+bmtk/simulator/mintnet/hmax/S_Layer.py
+bmtk/simulator/mintnet/hmax/Sb_Layer.py
+bmtk/simulator/mintnet/hmax/ViewTunedLayer.py
+bmtk/simulator/mintnet/hmax/__init__.py
+bmtk/simulator/mintnet/hmax/hmax.py
+bmtk/simulator/pointnet/__init__.py
+bmtk/simulator/pointnet/config.py
+bmtk/simulator/pointnet/io_tools.py
+bmtk/simulator/pointnet/pointnetwork.py
+bmtk/simulator/pointnet/pointsimulator.py
+bmtk/simulator/pointnet/property_map.py
+bmtk/simulator/pointnet/pyfunction_cache.py
+bmtk/simulator/pointnet/sonata_adaptors.py
+bmtk/simulator/pointnet/utils.py
+bmtk/simulator/pointnet/default_setters/__init__.py
+bmtk/simulator/pointnet/default_setters/synapse_models.py
+bmtk/simulator/pointnet/default_setters/synaptic_weights.py
+bmtk/simulator/pointnet/modules/__init__.py
+bmtk/simulator/pointnet/modules/multimeter_reporter.py
+bmtk/simulator/pointnet/modules/record_spikes.py
+bmtk/simulator/popnet/__init__.py
+bmtk/simulator/popnet/config.py
+bmtk/simulator/popnet/popedge.py
+bmtk/simulator/popnet/popnetwork.py
+bmtk/simulator/popnet/popnetwork_OLD.py
+bmtk/simulator/popnet/popnode.py
+bmtk/simulator/popnet/popsimulator.py
+bmtk/simulator/popnet/sonata_adaptors.py
+bmtk/simulator/popnet/utils.py
+bmtk/simulator/popnet/property_schemas/__init__.py
+bmtk/simulator/popnet/property_schemas/base_schema.py
+bmtk/simulator/popnet/property_schemas/property_schema_ver0.py
+bmtk/simulator/popnet/property_schemas/property_schema_ver1.py
+bmtk/simulator/utils/__init__.py
+bmtk/simulator/utils/config.py
+bmtk/simulator/utils/graph.py
+bmtk/simulator/utils/io.py
+bmtk/simulator/utils/load_spikes.py
+bmtk/simulator/utils/nwb.py
+bmtk/simulator/utils/property_maps.py
+bmtk/simulator/utils/sim_validator.py
+bmtk/simulator/utils/simulation_inputs.py
+bmtk/simulator/utils/simulation_reports.py
+bmtk/simulator/utils/stimulus/LocallySparseNoise.py
+bmtk/simulator/utils/stimulus/NaturalScenes.py
+bmtk/simulator/utils/stimulus/StaticGratings.py
+bmtk/simulator/utils/stimulus/__init__.py
+bmtk/simulator/utils/tools/__init__.py
+bmtk/simulator/utils/tools/process_spikes.py
+bmtk/simulator/utils/tools/spatial.py
+bmtk/utils/__init__.py
+bmtk/utils/property_schema.py
+bmtk/utils/sim_setup.py
+bmtk/utils/cell_vars/__init__.py
+bmtk/utils/cell_vars/var_reader.py
+bmtk/utils/converters/__init__.py
+bmtk/utils/converters/hoc_converter.py
+bmtk/utils/converters/sonata/__init__.py
+bmtk/utils/converters/sonata/edge_converters.py
+bmtk/utils/converters/sonata/node_converters.py
+bmtk/utils/io/__init__.py
+bmtk/utils/io/cell_vars.py
+bmtk/utils/io/firing_rates.py
+bmtk/utils/io/spike_trains.py
+bmtk/utils/io/tabular_network.py
+bmtk/utils/io/tabular_network_v0.py
+bmtk/utils/io/tabular_network_v1.py
+bmtk/utils/sonata/__init__.py
+bmtk/utils/sonata/column_property.py
+bmtk/utils/sonata/config.py
+bmtk/utils/sonata/edge.py
+bmtk/utils/sonata/file.py
+bmtk/utils/sonata/file_root.py
+bmtk/utils/sonata/group.py
+bmtk/utils/sonata/node.py
+bmtk/utils/sonata/population.py
+bmtk/utils/sonata/types_table.py
+bmtk/utils/sonata/utils.py
+bmtk/utils/spike_trains/__init__.py
+bmtk/utils/spike_trains/spikes_csv.py
+bmtk/utils/spike_trains/spikes_file.py
\ No newline at end of file
diff --git a/bmtk-vb/bmtk.egg-info/dependency_links.txt b/bmtk-vb/bmtk.egg-info/dependency_links.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/bmtk-vb/bmtk.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/bmtk-vb/bmtk.egg-info/requires.txt b/bmtk-vb/bmtk.egg-info/requires.txt
new file mode 100644
index 0000000..582dcee
--- /dev/null
+++ b/bmtk-vb/bmtk.egg-info/requires.txt
@@ -0,0 +1,18 @@
+jsonschema
+pandas
+numpy
+six
+h5py
+matplotlib
+
+[bionet]
+NEURON
+
+[mintnet]
+tensorflow
+
+[pointnet]
+NEST
+
+[popnet]
+DiPDE
diff --git a/bmtk-vb/bmtk.egg-info/top_level.txt b/bmtk-vb/bmtk.egg-info/top_level.txt
new file mode 100644
index 0000000..8ea5840
--- /dev/null
+++ b/bmtk-vb/bmtk.egg-info/top_level.txt
@@ -0,0 +1 @@
+bmtk
diff --git a/bmtk-vb/bmtk/__init__.py b/bmtk-vb/bmtk/__init__.py
new file mode 100644
index 0000000..f4f772b
--- /dev/null
+++ b/bmtk-vb/bmtk/__init__.py
@@ -0,0 +1,23 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+__version__ = '0.0.6'
diff --git a/bmtk-vb/bmtk/__init__.pyc b/bmtk-vb/bmtk/__init__.pyc
new file mode 100644
index 0000000..98acade
Binary files /dev/null and b/bmtk-vb/bmtk/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/__pycache__/__init__.cpython-35.pyc b/bmtk-vb/bmtk/__pycache__/__init__.cpython-35.pyc
new file mode 100644
index 0000000..cf5af18
Binary files /dev/null and b/bmtk-vb/bmtk/__pycache__/__init__.cpython-35.pyc differ
diff --git a/bmtk-vb/bmtk/__pycache__/__init__.cpython-36.pyc b/bmtk-vb/bmtk/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000..ce229e6
Binary files /dev/null and b/bmtk-vb/bmtk/__pycache__/__init__.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..d3311ea
Binary files /dev/null and b/bmtk-vb/bmtk/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/analyzer/__init__.py b/bmtk-vb/bmtk/analyzer/__init__.py
new file mode 100644
index 0000000..7b04c40
--- /dev/null
+++ b/bmtk-vb/bmtk/analyzer/__init__.py
@@ -0,0 +1,189 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+from six import string_types
+import h5py
+import pandas as pd
+import matplotlib.pyplot as plt
+import numpy as np
+
+import bmtk.simulator.utils.config as cfg
+
+
+def _get_config(config):
+ if isinstance(config, string_types):
+ return cfg.from_json(config)
+ elif isinstance(config, dict):
+ return config
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(config, type(config)))
+
+def plot_potential(cell_vars_h5=None, config_file=None, gids=None, show_plot=True, save=False):
+ if (cell_vars_h5 or config_file) is None:
+ raise Exception('Please specify a cell_vars hdf5 file or a simulation config.')
+
+ if cell_vars_h5 is not None:
+ plot_potential_hdf5(cell_vars_h5, gids=gids, show_plot=show_plot,
+ save_as='sim_potential.jpg' if save else None)
+
+ else:
+ # load the json file or object
+ if isinstance(config_file, string_types):
+ config = cfg.from_json(config_file)
+ elif isinstance(config_file, dict):
+ config = config_file
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(config_file, type(config_file)))
+
+ gid_list = gids or config['node_id_selections']['save_cell_vars']
+ for gid in gid_list:
+ save_as = '{}_v.jpg'.format(gid) if save else None
+ title = 'cell gid {}'.format(gid)
+ var_h5 = os.path.join(config['output']['cell_vars_dir'], '{}.h5'.format(gid))
+ plot_potential_hdf5(var_h5, title, show_plot, save_as)
+
+
+def plot_potential_hdf5(cell_vars_h5, gids, title='membrane potential', show_plot=True, save_as=None):
+ data_h5 = h5py.File(cell_vars_h5, 'r')
+ membrane_trace = data_h5['data']
+
+ time_ds = data_h5['/mapping/time']
+ tstart = time_ds[0]
+ tstop = time_ds[1]
+ x_axis = np.linspace(tstart, tstop, len(membrane_trace), endpoint=True)
+
+ gids_ds = data_h5['/mapping/gids']
+ index_ds = data_h5['/mapping/index_pointer']
+ index_lookup = {gids_ds[i]: (index_ds[i], index_ds[i+1]) for i in range(len(gids_ds))}
+ gids = gids_ds.keys() if gids_ds is None else gids
+ for gid in gids:
+ var_indx = index_lookup[gid][0]
+ plt.plot(x_axis, membrane_trace[:, var_indx], label=gid)
+
+ plt.xlabel('time (ms)')
+ plt.ylabel('membrane (mV)')
+ plt.title(title)
+ plt.legend(markerscale=2, scatterpoints=1)
+
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ if show_plot:
+ plt.show()
+
+
+def plot_calcium(cell_vars_h5=None, config_file=None, gids=None, show_plot=True, save=False):
+ if (cell_vars_h5 or config_file) is None:
+ raise Exception('Please specify a cell_vars hdf5 file or a simulation config.')
+
+ if cell_vars_h5 is not None:
+ plot_calcium_hdf5(cell_vars_h5, gids, show_plot=show_plot, save_as='sim_ca.jpg' if save else None)
+
+ else:
+ # load the json file or object
+ if isinstance(config_file, string_types):
+ config = cfg.from_json(config_file)
+ elif isinstance(config_file, dict):
+ config = config_file
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(config_file, type(config_file)))
+
+ gid_list = gids or config['node_id_selections']['save_cell_vars']
+ for gid in gid_list:
+ save_as = '{}_v.jpg'.format(gid) if save else None
+ title = 'cell gid {}'.format(gid)
+ var_h5 = os.path.join(config['output']['cell_vars_dir'], '{}.h5'.format(gid))
+ plot_calcium_hdf5(var_h5, title, show_plot, save_as)
+
+
+def plot_calcium_hdf5(cell_vars_h5, gids, title='Ca2+ influx', show_plot=True, save_as=None):
+ data_h5 = h5py.File(cell_vars_h5, 'r')
+ cai_trace = data_h5['cai/data']
+
+ time_ds = data_h5['/mapping/time']
+ tstart = time_ds[0]
+ tstop = time_ds[1]
+ x_axis = np.linspace(tstart, tstop, len(cai_trace), endpoint=True)
+
+ gids_ds = data_h5['/mapping/gids']
+ index_ds = data_h5['/mapping/index_pointer']
+ index_lookup = {gids_ds[i]: (index_ds[i], index_ds[i+1]) for i in range(len(gids_ds))}
+ gids = gids_ds.keys() if gids_ds is None else gids
+ for gid in gids:
+ var_indx = index_lookup[gid][0]
+ plt.plot(x_axis, cai_trace[:, var_indx], label=gid)
+
+ #plt.plot(x_axis, cai_trace)
+ plt.xlabel('time (ms)')
+ plt.ylabel('calcium [Ca2+]')
+ plt.title(title)
+ plt.legend(markerscale=2, scatterpoints=1)
+
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ if show_plot:
+ plt.show()
+
+
+def spikes_table(config_file, spikes_file=None):
+ config = _get_config(config_file)
+ spikes_file = config['output']['spikes_file']
+ spikes_h5 = h5py.File(spikes_file, 'r')
+ gids = np.array(spikes_h5['/spikes/gids'], dtype=np.uint)
+ times = np.array(spikes_h5['/spikes/timestamps'], dtype=np.float)
+ return pd.DataFrame(data={'gid': gids, 'spike time (ms)': times})
+ #return pd.read_csv(spikes_ascii, names=['time (ms)', 'cell gid'], sep=' ')
+
+
+def nodes_table(nodes_file, population):
+ # TODO: Integrate into sonata api
+ nodes_h5 = h5py.File(nodes_file, 'r')
+ nodes_pop = nodes_h5['/nodes'][population]
+ root_df = pd.DataFrame(data={'node_id': nodes_pop['node_id'], 'node_type_id': nodes_pop['node_type_id'],
+ 'node_group_id': nodes_pop['node_group_id'],
+ 'node_group_index': nodes_pop['node_group_index']}) #,
+ #index=[nodes_pop['node_group_id'], nodes_pop['node_group_index']])
+ root_df = root_df.set_index(['node_group_id', 'node_group_index'])
+
+ node_grps = np.unique(nodes_pop['node_group_id'])
+ for grp_id in node_grps:
+ sub_group = nodes_pop[str(grp_id)]
+ grp_df = pd.DataFrame()
+ for hf_key in sub_group:
+ hf_obj = sub_group[hf_key]
+ if isinstance(hf_obj, h5py.Dataset):
+ grp_df[hf_key] = hf_obj
+
+ subgrp_len = len(grp_df)
+ if subgrp_len > 0:
+ grp_df['node_group_id'] = [grp_id]*subgrp_len
+ grp_df['node_group_index'] = range(subgrp_len)
+ grp_df = grp_df.set_index(['node_group_id', 'node_group_index'])
+ root_df = root_df.join(other=grp_df, how='left')
+
+ return root_df.reset_index(drop=True)
+
+
+def node_types_table(node_types_file, population):
+ return pd.read_csv(node_types_file, sep=' ')
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/analyzer/__init__.pyc b/bmtk-vb/bmtk/analyzer/__init__.pyc
new file mode 100644
index 0000000..bf540e6
Binary files /dev/null and b/bmtk-vb/bmtk/analyzer/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/analyzer/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/analyzer/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..e5e45c9
Binary files /dev/null and b/bmtk-vb/bmtk/analyzer/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/analyzer/cell_vars.py b/bmtk-vb/bmtk/analyzer/cell_vars.py
new file mode 100644
index 0000000..da2e719
--- /dev/null
+++ b/bmtk-vb/bmtk/analyzer/cell_vars.py
@@ -0,0 +1,95 @@
+import os
+import matplotlib.pyplot as plt
+
+from .io_tools import load_config
+from .utils import listify
+from bmtk.utils.cell_vars import CellVarsFile
+
+# In the case reports are missing units, try to guess based on
+missing_units = {
+ 'V_m': 'mV',
+ 'cai': 'mM',
+ 'v': 'mV'
+}
+
+
+def _get_cell_report(config_file, report_name):
+ cfg = load_config(config_file)
+ if report_name is not None:
+ return cfg.reports[report_name], report_name
+
+ else:
+ cell_var_reports = [(r_name, r_dict) for r_name, r_dict in cfg.reports.items()
+ if r_dict['module'] == 'membrane_report']
+ if len(cell_var_reports) == 0:
+ raise Exception('Could not find any membrane_reports in {}'.format(config_file))
+
+ elif len(cell_var_reports) > 1:
+ raise Exception('Found more than one membrane_report, please specify report_name')
+
+ else:
+ report_name = cell_var_reports[0][0]
+ report = cell_var_reports[0][1]
+ report_fname = report['file_name'] if 'file_name' in report else '{}.h5'.format(report_name)
+ return report_name, os.path.join(cfg.output_dir, report_fname)
+
+
+def plot_report(config_file=None, report_file=None, report_name=None, variables=None, gids=None):
+ if report_file is None:
+ report_name, report_file = _get_cell_report(config_file, report_name)
+
+ var_report = CellVarsFile(report_file)
+ variables = listify(variables) if variables is not None else var_report.variables
+ gids = listify(gids) if gids is not None else var_report.gids
+ time_steps = var_report.time_trace
+
+ def __units_str(var):
+ units = var_report.units(var)
+ if units == CellVarsFile.UNITS_UNKNOWN:
+ units = missing_units.get(var, '')
+ return '({})'.format(units) if units else ''
+
+ n_plots = len(variables)
+ if n_plots > 1:
+ # If more than one variale to plot do so in different subplots
+ f, axarr = plt.subplots(n_plots, 1)
+ for i, var in enumerate(variables):
+ for gid in gids:
+ axarr[i].plot(time_steps, var_report.data(gid=gid, var_name=var), label='gid {}'.format(gid))
+
+ axarr[i].legend()
+ axarr[i].set_ylabel('{} {}'.format(var, __units_str(var)))
+ if i < n_plots - 1:
+ axarr[i].set_xticklabels([])
+
+ axarr[i].set_xlabel('time (ms)')
+
+ elif n_plots == 1:
+ # For plotting a single variable
+ plt.figure()
+ for gid in gids:
+ plt.plot(time_steps, var_report.data(gid=0, var_name=variables[0]), label='gid {}'.format(gid))
+ plt.ylabel('{} {}'.format(variables[0], __units_str(variables[0])))
+ plt.xlabel('time (ms)')
+
+ else:
+ return
+
+ plt.show()
+
+ #for gid in gids:
+ # plt.plot(times, var_report.data(gid=0, var_name='v'), label='gid {}'.format(gid))
+
+
+ '''
+
+
+
+ plt.ylabel('{} {}'.format('v', units_str))
+ plt.xlabel('time (ms)')
+ plt.legend()
+ plt.show()
+ '''
+
+
+
diff --git a/bmtk-vb/bmtk/analyzer/firing_rates.py b/bmtk-vb/bmtk/analyzer/firing_rates.py
new file mode 100644
index 0000000..bca785c
--- /dev/null
+++ b/bmtk-vb/bmtk/analyzer/firing_rates.py
@@ -0,0 +1,55 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import numpy as np
+
+def convert_rates(rates_file):
+ rates_df = pd.read_csv(rates_file, sep=' ', names=['gid', 'time', 'rate'])
+ rates_sorted_df = rates_df.sort_values(['gid', 'time'])
+ rates_dict = {}
+ for gid, rates in rates_sorted_df.groupby('gid'):
+ start = rates['time'].iloc[0]
+ #start = rates['rate'][0]
+ end = rates['time'].iloc[-1]
+ dt = float(end - start)/len(rates)
+ rates_dict[gid] = {'start': start, 'end': end, 'dt': dt, 'rates': np.array(rates['rate'])}
+
+ return rates_dict
+
+
+def firing_rates_equal(rates_file1, rates_file2, err=0.0001):
+ trial_1 = convert_rates(rates_file1)
+ trial_2 = convert_rates(rates_file2)
+ if set(trial_1.keys()) != set(trial_2.keys()):
+ return False
+
+ for gid, rates_data1 in trial_1.items():
+ rates_data2 = trial_2[gid]
+ if rates_data1['dt'] != rates_data2['dt'] or rates_data1['start'] != rates_data2['start'] or rates_data1['end'] != rates_data2['end']:
+ return False
+
+ for r1, r2 in zip(rates_data1['rates'], rates_data2['rates']):
+ if abs(r1 - r2) > err:
+ return False
+
+ return True
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/analyzer/io_tools.py b/bmtk-vb/bmtk/analyzer/io_tools.py
new file mode 100644
index 0000000..326389b
--- /dev/null
+++ b/bmtk-vb/bmtk/analyzer/io_tools.py
@@ -0,0 +1,11 @@
+from six import string_types
+from bmtk.simulator.utils.config import ConfigDict
+
+
+def load_config(config):
+ if isinstance(config, string_types):
+ return ConfigDict.from_json(config)
+ elif isinstance(config, dict):
+ return ConfigDict.from_dict(config)
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(config, type(config)))
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/analyzer/spike_trains.py b/bmtk-vb/bmtk/analyzer/spike_trains.py
new file mode 100644
index 0000000..a7f6c8d
--- /dev/null
+++ b/bmtk-vb/bmtk/analyzer/spike_trains.py
@@ -0,0 +1,16 @@
+import numpy as np
+import pandas as pd
+import h5py
+
+
+from bmtk.analyzer.visualization.spikes import plot_spikes as raster_plot
+from bmtk.analyzer.visualization.spikes import plot_rates as rates_plot
+from .io_tools import load_config
+from bmtk.utils.spike_trains import SpikesFile
+
+
+def to_dataframe(config_file, spikes_file=None):
+ config = load_config(config_file)
+ spikes_file = SpikesFile(config.spikes_file)
+ return spikes_file.to_dataframe()
+
diff --git a/bmtk-vb/bmtk/analyzer/spikes_analyzer.py b/bmtk-vb/bmtk/analyzer/spikes_analyzer.py
new file mode 100644
index 0000000..af77187
--- /dev/null
+++ b/bmtk-vb/bmtk/analyzer/spikes_analyzer.py
@@ -0,0 +1,127 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import numpy as np
+
+try:
+ from distutils.version import LooseVersion
+ use_sort_values = LooseVersion(pd.__version__) >= LooseVersion('0.19.0')
+
+except:
+ use_sort_values = False
+
+
+def spikes2dict(spikes_file):
+ spikes_df = pd.read_csv(spikes_file, sep=' ', names=['time', 'gid'])
+
+ if use_sort_values:
+ spikes_sorted = spikes_df.sort_values(['gid', 'time'])
+ else:
+ spikes_sorted = spikes_df.sort(['gid', 'time'])
+
+ spike_dict = {}
+ for gid, spike_train in spikes_sorted.groupby('gid'):
+ spike_dict[gid] = np.array(spike_train['time'])
+ return spike_dict
+
+
+def spike_files_equal(spikes_txt_1, spikes_txt_2, err=0.0001):
+ trial_1 = spikes2dict(spikes_txt_1)
+ trial_2 = spikes2dict(spikes_txt_2)
+ if set(trial_1.keys()) != set(trial_2.keys()):
+ return False
+
+ for gid, spike_train1 in trial_1.items():
+ spike_train2 = trial_2[gid]
+ if len(spike_train1) != len(spike_train2):
+ return False
+
+ for s1, s2 in zip(spike_train1, spike_train2):
+ if abs(s1 - s2) > err:
+ return False
+
+ return True
+
+
+def get_mean_firing_rates(spike_gids, node_ids, tstop_msec):
+
+ """
+ Compute mean firing rate over the duration of the simulation
+
+ :param spike_gids: gids of cells which spiked
+ :param node_ids: np.array of node_ids
+
+ :return mean_firing_rate: np.array mean firing rates
+
+ """
+
+ min_gid = np.min(node_ids)
+ max_gid = np.max(node_ids)
+
+ gid_bins = np.arange(min_gid-0.5,max_gid+1.5,1)
+ hist,bins = np.histogram(spike_gids, bins=gid_bins)
+
+ tstop_sec = tstop_msec*1E-3
+ mean_firing_rates = hist/tstop_sec
+
+ return mean_firing_rates
+
+
+
+def spikes_equal_in_window(spikes1,spikes2,twindow):
+ """
+ Compare spikes within a time window
+ :param spikes1: dict with "time" and "gid" arrays for raster 1
+ :param spikes2: dict with "time" and "gid" arrays for raster 2
+ :param twindow: [tstart,tend] time window
+
+ :return boolean: True if equal, False if different
+ """
+
+ ix1_window0=np.where(spikes1["time"]>twindow[0])
+ ix1_window1=np.where(spikes1["time"]twindow[0])
+ ix2_window1=np.where(spikes2["time"] tstart) & (spikes[0] < tend))
+
+ spike_times = spikes[0][ix_t]
+ spike_gids = spikes[1][ix_t]
+
+ for query, col in cmap.items():
+ query_df = nodes_df.query(query)
+ gids_query = query_df.index
+ print("{} ncells: {} {}".format(query, len(gids_query), col))
+
+ ix_g = np.in1d(spike_gids, gids_query)
+ ax.scatter(spike_times[ix_g], spike_gids[ix_g],
+ marker=marker,
+ # facecolors='none',
+ facecolors=col,
+ # edgecolors=col,
+ s=s,
+ label=query,
+ lw=lw)
diff --git a/bmtk-vb/bmtk/analyzer/visualization/spikes.py b/bmtk-vb/bmtk/analyzer/visualization/spikes.py
new file mode 100644
index 0000000..e7b34e9
--- /dev/null
+++ b/bmtk-vb/bmtk/analyzer/visualization/spikes.py
@@ -0,0 +1,499 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import csv
+import h5py
+from six import string_types
+import pandas as pd
+import numpy as np
+import matplotlib.pyplot as plt
+import matplotlib.cm as cmx
+import matplotlib.colors as colors
+import matplotlib.gridspec as gridspec
+
+import bmtk.simulator.utils.config as config
+
+from mpl_toolkits.axes_grid1 import make_axes_locatable
+
+def _create_node_table(node_file, node_type_file, group_key=None, exclude=[]):
+ """Creates a merged nodes.csv and node_types.csv dataframe with excluded items removed. Returns a dataframe."""
+ node_types_df = pd.read_csv(node_type_file, sep=' ', index_col='node_type_id')
+ nodes_h5 = h5py.File(node_file)
+ # TODO: Use utils.spikesReader
+ node_pop_name = nodes_h5['/nodes'].keys()[0]
+
+ nodes_grp = nodes_h5['/nodes'][node_pop_name]
+ # TODO: Need to be able to handle gid or node_id
+ nodes_df = pd.DataFrame({'node_id': nodes_grp['node_id'], 'node_type_id': nodes_grp['node_type_id']})
+ #nodes_df = pd.DataFrame({'node_id': nodes_h5['/nodes/node_gid'], 'node_type_id': nodes_h5['/nodes/node_type_id']})
+ nodes_df.set_index('node_id', inplace=True)
+
+ # nodes_df = pd.read_csv(node_file, sep=' ', index_col='node_id')
+ full_df = pd.merge(left=nodes_df, right=node_types_df, how='left', left_on='node_type_id', right_index=True)
+
+ if group_key is not None and len(exclude) > 0:
+ # Make sure sure we group-key exists as column
+ if group_key not in full_df:
+ raise Exception('Could not find column {}'.format(group_key))
+
+ group_keys = set(nodes_df[group_key].unique()) - set(exclude)
+ groupings = nodes_df.groupby(group_key)
+ # remove any rows with matching column value
+ for cond in exclude:
+ full_df = full_df[full_df[group_key] != cond]
+
+ return full_df
+
+def _count_spikes(spikes_file, max_gid, interval=None):
+ def parse_line(line):
+ ts, gid = line.strip().split(' ')
+ return float(ts), int(gid)
+
+ if interval is None:
+ t_max = t_bounds_low = -1.0
+ t_min = t_bounds_high = 1e16
+ elif hasattr(interval, "__getitem__") and len(interval) == 2:
+ t_min = t_bounds_low = interval[0]
+ t_max = t_bounds_high = interval[1]
+ elif isinstance(interval, float):
+ t_max = t_min = t_bounds_low = interval[0]
+ t_bounds_high = 1e16
+ else:
+ raise Exception("Unable to determine interval.")
+
+ max_gid = int(max_gid) # strange bug where max_gid was being returned as a float.
+ spikes = [[] for _ in xrange(max_gid+1)]
+ spike_sums = np.zeros(max_gid+1)
+ # TODO: Use utils.spikesReader
+ spikes_h5 = h5py.File(spikes_file, 'r')
+ #print spikes_h5['/spikes'].keys()
+ gid_ds = spikes_h5['/spikes/gids']
+ ts_ds = spikes_h5['/spikes/timestamps']
+
+ for i in range(len(gid_ds)):
+ ts = ts_ds[i]
+ gid = gid_ds[i]
+
+ if gid <= max_gid and t_bounds_low <= ts <= t_bounds_high:
+ spikes[gid].append(ts)
+ spike_sums[gid] += 1
+ t_min = ts if ts < t_min else t_min
+ t_max = ts if ts > t_max else t_max
+
+ """
+ with open(spikes_file, 'r') as fspikes:
+ for line in fspikes:
+ ts, gid = parse_line(line)
+ if gid <= max_gid and t_bounds_low <= ts <= t_bounds_high:
+ spikes[gid].append(ts)
+ spike_sums[gid] += 1
+ t_min = ts if ts < t_min else t_min
+ t_max = ts if ts > t_max else t_max
+ """
+ return spikes, spike_sums/(float(t_max-t_min)*1e-3)
+
+
+
+def plot_spikes_config(configure, group_key=None, exclude=[], save_as=None, show_plot=True):
+ if isinstance(configure, string_types):
+ conf = config.from_json(configure)
+ elif isinstance(configure, dict):
+ conf = configure
+ else:
+ raise Exception("configure variable must be either a json dictionary or json file name.")
+
+ cells_file_name = conf['internal']['nodes']
+ cell_models_file_name = conf['internal']['node_types']
+ spikes_file = conf['output']['spikes_ascii']
+
+ plot_spikes(cells_file_name, cell_models_file_name, spikes_file, group_key, exclude, save_as, show_plot)
+
+
+def plot_spikes(cells_file, cell_models_file, spikes_file, population=None, group_key=None, exclude=[], save_as=None,
+ show=True, title=None):
+ # check if can be shown and/or saved
+ #if save_as is not None:
+ # if os.path.exists(save_as):
+ # raise Exception('file {} already exists. Cannot save.'.format(save_as))
+
+ cm_df = pd.read_csv(cell_models_file, sep=' ')
+ cm_df.set_index('node_type_id', inplace=True)
+
+ cells_h5 = h5py.File(cells_file, 'r')
+ # TODO: Use sonata api
+ if population is None:
+ if len(cells_h5['/nodes']) > 1:
+ raise Exception('Multiple populations in nodes file. Please specify one to plot using population param')
+ else:
+ population = cells_h5['/nodes'].keys()[0]
+
+ nodes_grp = cells_h5['/nodes'][population]
+ c_df = pd.DataFrame({'node_id': nodes_grp['node_id'], 'node_type_id': nodes_grp['node_type_id']})
+ # c_df = pd.read_csv(cells_file, sep=' ')
+ c_df.set_index('node_id', inplace=True)
+ nodes_df = pd.merge(left=c_df,
+ right=cm_df,
+ how='left',
+ left_on='node_type_id',
+ right_index=True) # use 'model_id' key to merge, for right table the "model_id" is an index
+
+ # TODO: Uses utils.SpikesReader to open
+ spikes_h5 = h5py.File(spikes_file, 'r')
+ spike_gids = np.array(spikes_h5['/spikes/gids'], dtype=np.uint)
+ spike_times = np.array(spikes_h5['/spikes/timestamps'], dtype=np.float)
+ # spike_times, spike_gids = np.loadtxt(spikes_file, dtype='float32,int', unpack=True)
+ # spike_gids, spike_times = np.loadtxt(spikes_file, dtype='int,float32', unpack=True)
+
+ spike_times = spike_times * 1.0e-3
+
+ if group_key is not None:
+ if group_key not in nodes_df:
+ raise Exception('Could not find column {}'.format(group_key))
+ groupings = nodes_df.groupby(group_key)
+
+ n_colors = nodes_df[group_key].nunique()
+ color_norm = colors.Normalize(vmin=0, vmax=(n_colors-1))
+ scalar_map = cmx.ScalarMappable(norm=color_norm, cmap='hsv')
+ color_map = [scalar_map.to_rgba(i) for i in range(0, n_colors)]
+ else:
+ groupings = [(None, nodes_df)]
+ color_map = ['blue']
+
+ #marker = '.' if len(nodes_df) > 1000 else 'o'
+ marker = 'o'
+
+ # Create plot
+ gs = gridspec.GridSpec(2, 1, height_ratios=[7, 1])
+ ax1 = plt.subplot(gs[0])
+ gid_min = 10**10
+ gid_max = -1
+ for color, (group_name, group_df) in zip(color_map, groupings):
+ if group_name in exclude:
+ continue
+ group_min_gid = min(group_df.index.tolist())
+ group_max_gid = max(group_df.index.tolist())
+ gid_min = group_min_gid if group_min_gid <= gid_min else gid_min
+ gid_max = group_max_gid if group_max_gid > gid_max else gid_max
+
+ gids_group = group_df.index
+ indexes = np.in1d(spike_gids, gids_group)
+ ax1.scatter(spike_times[indexes], spike_gids[indexes], marker=marker, facecolors=color, label=group_name, lw=0, s=5)
+
+ #ax1.set_xlabel('time (s)')
+ ax1.axes.get_xaxis().set_visible(False)
+ ax1.set_ylabel('cell_id')
+ ax1.set_xlim([0, max(spike_times)])
+ ax1.set_ylim([gid_min, gid_max])
+ plt.legend(markerscale=2, scatterpoints=1)
+
+ ax2 = plt.subplot(gs[1])
+ plt.hist(spike_times, 100)
+ ax2.set_xlabel('time (s)')
+ ax2.set_xlim([0, max(spike_times)])
+ ax2.axes.get_yaxis().set_visible(False)
+ if title is not None:
+ ax1.set_title(title)
+
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ if show:
+ plt.show()
+
+
+def plot_ratess(cells_file, cell_models_file, spikes_file, group_key='pop_name', exclude=['LIF_inh', 'LIF_exc'], save_as=None, show_plot=True):
+ #if save_as is not None:
+ # if os.path.exists(save_as):
+ # raise Exception('file {} already exists. Cannot save.'.format(save_as))
+
+ cm_df = pd.read_csv(cell_models_file, sep=' ')
+ cm_df.set_index('node_type_id', inplace=True)
+
+ c_df = pd.read_csv(cells_file, sep=' ')
+ c_df.set_index('node_id', inplace=True)
+ nodes_df = pd.merge(left=c_df,
+ right=cm_df,
+ how='left',
+ left_on='node_type_id',
+ right_index=True) # use 'model_id' key to merge, for right table the "model_id" is an index
+
+ for cond in exclude:
+ nodes_df = nodes_df[nodes_df[group_key] != cond]
+
+ groupings = nodes_df.groupby(group_key)
+ n_colors = nodes_df[group_key].nunique()
+ color_norm = colors.Normalize(vmin=0, vmax=(n_colors - 1))
+ scalar_map = cmx.ScalarMappable(norm=color_norm, cmap='hsv')
+ color_map = [scalar_map.to_rgba(i) for i in range(0, n_colors)]
+
+
+ spike_times, spike_gids = np.loadtxt(spikes_file, dtype='float32,int', unpack=True)
+ rates = np.zeros(max(spike_gids) + 1)
+ for ts, gid in zip(spike_times, spike_gids):
+ if ts < 500.0:
+ continue
+ rates[gid] += 1
+
+ for color, (group_name, group_df) in zip(color_map, groupings):
+ print(group_name)
+ print(group_df.index)
+ print(rates[group_df.index])
+ plt.plot(group_df.index, rates[group_df.index], '.', color=color)
+
+ plt.show()
+
+ print(n_colors)
+ exit()
+
+
+
+ group_keys = set(nodes_df[group_key].unique()) - set(exclude)
+ groupings = nodes_df.groupby(group_key)
+
+ n_colors = len(group_keys)
+ color_norm = colors.Normalize(vmin=0, vmax=(n_colors - 1))
+ scalar_map = cmx.ScalarMappable(norm=color_norm, cmap='hsv')
+ color_map = [scalar_map.to_rgba(i) for i in range(0, n_colors)]
+
+ for color, (group_name, group_df) in zip(color_map, groupings):
+ print(group_name)
+ print(group_df.index)
+
+ exit()
+
+
+ """
+ print color_map
+ exit()
+
+ n_colors = nodes_df[group_key].nunique()
+ color_norm = colors.Normalize(vmin=0, vmax=(n_colors - 1))
+ scalar_map = cmx.ScalarMappable(norm=color_norm, cmap='hsv')
+ color_map = [scalar_map.to_rgba(i) for i in range(0, n_colors)]
+ """
+
+ spike_times, spike_gids = np.loadtxt(spikes_file, dtype='float32,int', unpack=True)
+ rates = np.zeros(max(spike_gids)+1)
+
+ for ts, gid in zip(spike_times, spike_gids):
+ if ts < 500.0:
+ continue
+
+ rates[gid] += 1
+
+ rates = rates / 3.0
+
+ plt.plot(xrange(max(spike_gids)+1), rates, '.')
+ plt.show()
+
+
+def plot_rates(cells_file, cell_models_file, spikes_file, group_key=None, exclude=[], interval=None, show=True,
+ title=None, save_as=None, smoothed=False):
+ def smooth(data, window=100):
+ h = int(window/2)
+ x_max = len(data)
+ return [np.mean(data[max(0, x-h):min(x_max, x+h)]) for x in xrange(0, x_max)]
+
+ nodes_df = _create_node_table(cells_file, cell_models_file, group_key, exclude)
+ _, spike_rates = _count_spikes(spikes_file, max(nodes_df.index), interval)
+
+ if group_key is not None:
+ groupings = nodes_df.groupby(group_key)
+ group_order = {k: i for i, k in enumerate(nodes_df[group_key].unique())}
+
+ n_colors = len(group_order)
+ color_norm = colors.Normalize(vmin=0, vmax=(n_colors-1))
+ scalar_map = cmx.ScalarMappable(norm=color_norm, cmap='hsv')
+ color_map = [scalar_map.to_rgba(i) for i in range(0, n_colors)]
+ ordered_groupings = [(group_order[name], c, name, df) for c, (name, df) in zip(color_map, groupings)]
+
+ else:
+ ordered_groupings = [(0, 'blue', None, nodes_df)]
+
+ keys = ['' for _ in xrange(len(group_order))]
+ means = [0 for _ in xrange(len(group_order))]
+ stds = [0 for _ in xrange(len(group_order))]
+ fig = plt.figure()
+ ax1 = fig.add_subplot(111)
+ for indx, color, group_name, group_df in ordered_groupings:
+ keys[indx] = group_name
+ means[indx] = np.mean(spike_rates[group_df.index])
+ stds[indx] = np.std(spike_rates[group_df.index])
+ y = smooth(spike_rates[group_df.index]) if smoothed else spike_rates[group_df.index]
+ ax1.plot(group_df.index, y, '.', color=color, label=group_name)
+
+ max_rate = np.max(spike_rates)
+ ax1.set_ylim(0, 50)#max_rate*1.3)
+ ax1.set_ylabel('Hz')
+ ax1.set_xlabel('gid')
+ ax1.legend(fontsize='x-small')
+ if title is not None:
+ ax1.set_title(title)
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ plt.figure()
+ plt.errorbar(xrange(len(means)), means, stds, linestyle='None', marker='o')
+ plt.xlim(-0.5, len(color_map)-0.5) # len(color_map) == last_index + 1
+ plt.ylim(0, 50.0)# max_rate*1.3)
+ plt.xticks(xrange(len(means)), keys)
+ if title is not None:
+ plt.title(title)
+ if save_as is not None:
+ if save_as.endswith('.jpg'):
+ base = save_as[0:-4]
+ elif save_as.endswith('.jpeg'):
+ base = save_as[0:-5]
+ else:
+ base = save_as
+
+ plt.savefig('{}.summary.jpg'.format(base))
+ with open('{}.summary.csv'.format(base), 'w') as f:
+ f.write('population mean stddev\n')
+ for i, key in enumerate(keys):
+ f.write('{} {} {}\n'.format(key, means[i], stds[i]))
+
+ if show:
+ plt.show()
+
+def plot_rates_popnet(cell_models_file, rates_file, model_keys=None, save_as=None, show_plot=True):
+ """Initial method for plotting popnet output
+
+ :param cell_models_file:
+ :param rates_file:
+ :param model_keys:
+ :param save_as:
+ :param show_plot:
+ :return:
+ """
+
+ pops_df = pd.read_csv(cell_models_file, sep=' ')
+ lookup_col = model_keys if model_keys is not None else 'node_type_id'
+ pop_keys = {str(r['node_type_id']): r[lookup_col] for _, r in pops_df.iterrows()}
+
+ # organize the rates file by population
+ # rates = {pop_name: ([], []) for pop_name in pop_keys.keys()}
+ rates_df = pd.read_csv(rates_file, sep=' ', names=['id', 'times', 'rates'])
+ for grp_key, grp_df in rates_df.groupby('id'):
+ grp_label = pop_keys[str(grp_key)]
+ plt.plot(grp_df['times'], grp_df['rates'], label=grp_label)
+
+ plt.legend(fontsize='x-small')
+ plt.xlabel('time (s)')
+ plt.ylabel('firing rates (Hz)')
+
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ if show_plot:
+ plt.show()
+
+def plot_avg_rates(cell_models_file, rates_file, model_keys=None, save_as=None, show_plot=True):
+ pops_df = pd.read_csv(cell_models_file, sep=' ')
+ lookup_col = model_keys if model_keys is not None else 'node_type_id'
+ pop_keys = {str(r['node_type_id']): r[lookup_col] for _, r in pops_df.iterrows()}
+
+ # organize the rates file by population
+ rates = {pop_name: [] for pop_name in pop_keys.keys()}
+ with open(rates_file, 'r') as f:
+ reader = csv.reader(f, delimiter=' ')
+ for row in reader:
+ if row[0] in rates:
+ #rates[row[0]][0].append(row[1])
+ rates[row[0]].append(float(row[2]))
+
+ labels = []
+ means = []
+ stds = []
+ #print rates
+ for pop_name in pops_df['node_type_id'].unique():
+ r = rates[str(pop_name)]
+ if len(r) == 0:
+ continue
+
+ labels.append(pop_keys.get(str(pop_name), str(pop_name)))
+ means.append(np.mean(r))
+ stds.append(np.std(r))
+
+ plt.figure()
+ plt.errorbar(xrange(len(means)), means, stds, linestyle='None', marker='o')
+ plt.xlim(-0.5, len(means) - 0.5)
+ plt.xticks(xrange(len(means)), labels)
+ plt.ylabel('firing rates (Hz)')
+
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ if show_plot:
+ plt.show()
+
+
+def plot_tuning(sg_analysis, node, band, Freq=0, show=True, save_as=None):
+ def index_for_node(node, band):
+ if node == 's4':
+ mask = sg_analysis.node_table.node == node
+ else:
+ mask = (sg_analysis.node_table.node == node) & (sg_analysis.node_table.band == band)
+ return str(sg_analysis.node_table[mask].index[0])
+
+ index = index_for_node(node, band)
+
+ key = index + '/sg/tuning'
+ analysis_file = sg_analysis.get_tunings_file()
+
+ tuning_matrix = analysis_file[key].value[:, :, :, Freq]
+
+ n_or, n_sf, n_ph = tuning_matrix.shape
+
+ vmax = np.max(tuning_matrix[:, :, :])
+ vmin = np.min(tuning_matrix[:, :, :])
+
+ #fig, ax = plt.subplots(1, n_ph, figsize=(12, 16), sharex=True, sharey=True)
+ fig, ax = plt.subplots(1, n_ph, figsize=(13.9, 4.3), sharex=False, sharey=True)
+
+ print(sg_analysis.orientations)
+ for phase in range(n_ph):
+ tuning_to_plot = tuning_matrix[:, :, phase]
+
+ im = ax[phase].imshow(tuning_to_plot, interpolation='nearest', vmax=vmax, vmin=vmin)
+ ax[phase].set_xticklabels([0] + list(sg_analysis.spatial_frequencies))
+ ax[phase].set_yticklabels([0] + list(sg_analysis.orientations))
+
+ ax[phase].set_title('phase = {}'.format(sg_analysis.phases[phase]))
+ ax[phase].set_xlabel('spatial_frequency')
+ if phase == 0:
+ ax[phase].set_ylabel('orientation')
+
+ fig.subplots_adjust(right=0.90)
+ cbar_ax = fig.add_axes([0.92, 0.10, 0.02, 0.75])
+ cbar = fig.colorbar(im, cax=cbar_ax, ticks=[vmin, 0.0, vmax])
+
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ if show:
+ plt.show()
+
+
+ #config_file =
+# plot_spikes('../../examples/pointnet/example2/config.json', 'pop_name')
diff --git a/bmtk-vb/bmtk/analyzer/visualization/spikes.pyc b/bmtk-vb/bmtk/analyzer/visualization/spikes.pyc
new file mode 100644
index 0000000..d07cf0c
Binary files /dev/null and b/bmtk-vb/bmtk/analyzer/visualization/spikes.pyc differ
diff --git a/bmtk-vb/bmtk/analyzer/visualization/widgets.py b/bmtk-vb/bmtk/analyzer/visualization/widgets.py
new file mode 100644
index 0000000..bb9c909
--- /dev/null
+++ b/bmtk-vb/bmtk/analyzer/visualization/widgets.py
@@ -0,0 +1,114 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import matplotlib.pyplot as plt
+import scipy.interpolate as spinterp
+import numpy as np
+
+class PlotWidget(object):
+
+ def __init__(self, t_range, y_range, rate_ax=None, position_ax=None, metadata={}, location_markersize=5):
+
+ if rate_ax is None:
+ self.fig = plt.figure()
+ self.ax = self.fig.add_subplot(111)
+ else:
+ self.ax = rate_ax
+ self.position_ax = position_ax
+
+ self.t_range = t_range
+ self.y_range = y_range
+ self.interp_fcn = spinterp.interp1d(self.t_range, self.y_range)
+ self._t = None
+ self.metadata=metadata
+ self.artist_list = []
+ self.location_markersize = location_markersize
+
+ @property
+ def y(self):
+ return self.interp_fcn(self._t)
+
+ def initialize(self, t0, **kwargs):
+
+ self._t = t0
+ self.plot_data, = self.ax.plot(self.t_range,self.y_range,**kwargs)
+ self.vertical_rule_data, = self.ax.plot([self._t, self._t],self.ax.get_ylim(),'--r')
+ self.point_data, = self.ax.plot([self._t],[self.y],'*r')
+
+ self.artist_list = [self.plot_data, self.vertical_rule_data, self.point_data]
+
+ if (not self.position_ax is None) and 'position' in self.metadata:
+ x = self.metadata['position'][0]
+ y = self.metadata['position'][1]
+ self.location_point_data, = self.position_ax.plot([x],[y],'*r', markersize=self.location_markersize)
+ self.artist_list.append(self.location_point_data)
+
+
+ def update(self, t):
+
+ self._t = t
+ self.point_data
+ self.point_data.set_xdata([self._t])
+ self.vertical_rule_data.set_xdata([self._t, self._t])
+ self.vertical_rule_data.set_ydata(self.ax.get_ylim())
+
+ for data in self.artist_list:
+ self.ax.figure.canvas.blit(data)
+
+ def set_visible(self, visible_or_not):
+
+
+ for data in self.artist_list:
+ data.set_visible(visible_or_not)
+ self.ax.figure.canvas.blit(data)
+
+
+class MovieWidget(object):
+
+ def __init__(self, t_range, data, ax=None, metadata={}):
+
+ if ax is None:
+ self.fig = plt.figure()
+ self.ax = self.fig.add_subplot(111)
+ else:
+ self.ax = ax
+
+ self.t_range = t_range
+ self.frame_rate = 1./np.mean(np.diff(t_range))
+ self.data = data
+ self.ax.get_xaxis().set_visible(False)
+ self.ax.get_yaxis().set_visible(False)
+ self.metadata=metadata
+
+ def initialize(self, t0, vmin=-1, vmax=1, cmap=plt.cm.gray):
+
+ data = self.data[self.ti(t0),:,:]
+ self.im = self.ax.imshow(data, vmin=vmin, vmax=vmax, cmap=cmap)
+
+ def update(self, t):
+
+ data = self.data[self.ti(t),:,:]
+ self.im.set_data(data)
+ self.ax.figure.canvas.draw()
+
+ def ti(self, t):
+ return int(t*self.frame_rate) - int(self.t_range[0]*self.frame_rate)
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/builder/__init__.py b/bmtk-vb/bmtk/builder/__init__.py
new file mode 100644
index 0000000..1f7a3ed
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/__init__.py
@@ -0,0 +1,23 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .networks import DenseNetwork, NetworkBuilder
diff --git a/bmtk-vb/bmtk/builder/__init__.pyc b/bmtk-vb/bmtk/builder/__init__.pyc
new file mode 100644
index 0000000..a7e2b0b
Binary files /dev/null and b/bmtk-vb/bmtk/builder/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/builder/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/builder/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..e3807c1
Binary files /dev/null and b/bmtk-vb/bmtk/builder/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/__pycache__/connection_map.cpython-37.pyc b/bmtk-vb/bmtk/builder/__pycache__/connection_map.cpython-37.pyc
new file mode 100644
index 0000000..b21f72c
Binary files /dev/null and b/bmtk-vb/bmtk/builder/__pycache__/connection_map.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/__pycache__/connector.cpython-37.pyc b/bmtk-vb/bmtk/builder/__pycache__/connector.cpython-37.pyc
new file mode 100644
index 0000000..2234a88
Binary files /dev/null and b/bmtk-vb/bmtk/builder/__pycache__/connector.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/__pycache__/edge.cpython-37.pyc b/bmtk-vb/bmtk/builder/__pycache__/edge.cpython-37.pyc
new file mode 100644
index 0000000..3c5edfc
Binary files /dev/null and b/bmtk-vb/bmtk/builder/__pycache__/edge.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/__pycache__/functor_cache.cpython-37.pyc b/bmtk-vb/bmtk/builder/__pycache__/functor_cache.cpython-37.pyc
new file mode 100644
index 0000000..0068c8a
Binary files /dev/null and b/bmtk-vb/bmtk/builder/__pycache__/functor_cache.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/__pycache__/id_generator.cpython-37.pyc b/bmtk-vb/bmtk/builder/__pycache__/id_generator.cpython-37.pyc
new file mode 100644
index 0000000..2162f3c
Binary files /dev/null and b/bmtk-vb/bmtk/builder/__pycache__/id_generator.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/__pycache__/iterator.cpython-37.pyc b/bmtk-vb/bmtk/builder/__pycache__/iterator.cpython-37.pyc
new file mode 100644
index 0000000..48cf404
Binary files /dev/null and b/bmtk-vb/bmtk/builder/__pycache__/iterator.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/__pycache__/network.cpython-37.pyc b/bmtk-vb/bmtk/builder/__pycache__/network.cpython-37.pyc
new file mode 100644
index 0000000..25dbaff
Binary files /dev/null and b/bmtk-vb/bmtk/builder/__pycache__/network.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/__pycache__/node.cpython-37.pyc b/bmtk-vb/bmtk/builder/__pycache__/node.cpython-37.pyc
new file mode 100644
index 0000000..b2b4559
Binary files /dev/null and b/bmtk-vb/bmtk/builder/__pycache__/node.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/__pycache__/node_pool.cpython-37.pyc b/bmtk-vb/bmtk/builder/__pycache__/node_pool.cpython-37.pyc
new file mode 100644
index 0000000..1a7e2dc
Binary files /dev/null and b/bmtk-vb/bmtk/builder/__pycache__/node_pool.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/__pycache__/node_set.cpython-37.pyc b/bmtk-vb/bmtk/builder/__pycache__/node_set.cpython-37.pyc
new file mode 100644
index 0000000..fb80359
Binary files /dev/null and b/bmtk-vb/bmtk/builder/__pycache__/node_set.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/aux/__init__.py b/bmtk-vb/bmtk/builder/aux/__init__.py
new file mode 100644
index 0000000..2d56a26
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/aux/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/builder/aux/__init__.pyc b/bmtk-vb/bmtk/builder/aux/__init__.pyc
new file mode 100644
index 0000000..581d2e2
Binary files /dev/null and b/bmtk-vb/bmtk/builder/aux/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/builder/aux/edge_connectors.py b/bmtk-vb/bmtk/builder/aux/edge_connectors.py
new file mode 100644
index 0000000..7abba26
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/aux/edge_connectors.py
@@ -0,0 +1,56 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import random
+
+
+def distance_connector(source, target, d_weight_min, d_weight_max, d_max, nsyn_min, nsyn_max):
+ # Avoid self-connections.
+ sid = source.node_id
+ tid = target.node_id
+ if sid == tid:
+ return None
+
+ # first create weights by euclidean distance between cells
+ r = np.linalg.norm(np.array(source['positions']) - np.array(target['positions']))
+ if r > d_max:
+ dw = 0.0
+ else:
+ t = r / d_max
+ dw = d_weight_max * (1.0 - t) + d_weight_min * t
+
+ # drop the connection if the weight is too low
+ if dw <= 0:
+ return None
+
+ # filter out nodes by treating the weight as a probability of connection
+ if random.random() > dw:
+ return None
+
+ # Add the number of synapses for every connection.
+ tmp_nsyn = random.randint(nsyn_min, nsyn_max)
+ return tmp_nsyn
+
+
+def connect_random(source, target, nsyn_min=0, nsyn_max=10, distribution=None):
+ return np.random.randint(nsyn_min, nsyn_max)
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/builder/aux/edge_connectors.pyc b/bmtk-vb/bmtk/builder/aux/edge_connectors.pyc
new file mode 100644
index 0000000..71f9849
Binary files /dev/null and b/bmtk-vb/bmtk/builder/aux/edge_connectors.pyc differ
diff --git a/bmtk-vb/bmtk/builder/aux/node_params.py b/bmtk-vb/bmtk/builder/aux/node_params.py
new file mode 100644
index 0000000..0ce1f4f
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/aux/node_params.py
@@ -0,0 +1,38 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import math
+
+
+def positions_columinar(N=1, center=[0.0, 50.0, 0.0], height=100.0, min_radius=0.0, max_radius=1.0, distribution='uniform'):
+ phi = 2.0 * math.pi * np.random.random([N])
+ r = np.sqrt((min_radius**2 - max_radius**2) * np.random.random([N]) + max_radius**2)
+ x = center[0] + r * np.cos(phi)
+ z = center[2] + r * np.sin(phi)
+ y = center[1] + height * (np.random.random([N]) - 0.5)
+
+ return np.column_stack((x, y, z))
+
+
+def xiter_random(N=1, min_x=0.0, max_x=1.0):
+ return np.random.uniform(low=min_x, high=max_x, size=(N,))
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/builder/aux/node_params.pyc b/bmtk-vb/bmtk/builder/aux/node_params.pyc
new file mode 100644
index 0000000..428a4ba
Binary files /dev/null and b/bmtk-vb/bmtk/builder/aux/node_params.pyc differ
diff --git a/bmtk-vb/bmtk/builder/bionet/__init__.py b/bmtk-vb/bmtk/builder/bionet/__init__.py
new file mode 100644
index 0000000..324aace
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/bionet/__init__.py
@@ -0,0 +1 @@
+from swc_reader import SWCReader
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/builder/bionet/swc_reader.py b/bmtk-vb/bmtk/builder/bionet/swc_reader.py
new file mode 100644
index 0000000..4833a1d
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/bionet/swc_reader.py
@@ -0,0 +1,81 @@
+import numpy as np
+from neuron import h
+
+from bmtk.simulator.bionet import nrn
+from bmtk.simulator.bionet.morphology import Morphology
+
+
+class SWCReader(object):
+ def __init__(self, swc_file, random_seed=10, fix_axon=True):
+ nrn.load_neuron_modules(None, None)
+ self._swc_file = swc_file
+ self._hobj = h.Biophys1(swc_file)
+ if fix_axon:
+ self._fix_axon()
+
+ self._morphology = Morphology(self._hobj)
+ self._morphology.set_seg_props()
+ self._morphology.calc_seg_coords()
+ self._prng = np.random.RandomState(random_seed)
+
+ self._secs = []
+ self._save_sections()
+
+ def _save_sections(self):
+ for sec in self._hobj.all:
+ for _ in sec:
+ self._secs.append(sec)
+
+ def _fix_axon(self):
+ """Removes and refixes axon"""
+ axon_diams = [self._hobj.axon[0].diam, self._hobj.axon[0].diam]
+ for sec in self._hobj.all:
+ section_name = sec.name().split(".")[1][:4]
+ if section_name == 'axon':
+ axon_diams[1] = sec.diam
+
+ for sec in self._hobj.axon:
+ h.delete_section(sec=sec)
+
+ h.execute('create axon[2]', self._hobj)
+ for index, sec in enumerate(self._hobj.axon):
+ sec.L = 30
+ sec.diam = 1
+
+ self._hobj.axonal.append(sec=sec)
+ self._hobj.all.append(sec=sec) # need to remove this comment
+
+ self._hobj.axon[0].connect(self._hobj.soma[0], 1.0, 0)
+ self._hobj.axon[1].connect(self._hobj.axon[0], 1.0, 0)
+
+ h.define_shape()
+
+ def find_sections(self, section_names, distance_range):
+ return self._morphology.find_sections(section_names, distance_range)
+
+ def choose_sections(self, section_names, distance_range, n_sections=1):
+ secs, probs = self.find_sections(section_names, distance_range)
+ secs_ix = self._prng.choice(secs, n_sections, p=probs)
+ return secs_ix, self._morphology.seg_prop['x'][secs_ix]
+
+ def get_coord(self, sec_ids, sec_xs, soma_center=(0.0, 0.0, 0.0), rotations=None):
+ adjusted = self._morphology.get_soma_pos() - np.array(soma_center)
+ absolute_coords = []
+ for sec_id, sec_x in zip(sec_ids, sec_xs):
+ sec = self._secs[sec_id]
+ n_coords = int(h.n3d(sec=sec))
+ coord_indx = int(sec_x*(n_coords - 1))
+ swc_coords = np.array([h.x3d(coord_indx, sec=sec), h.y3d(coord_indx, sec=sec), h.x3d(coord_indx, sec=sec)])
+ absolute_coords.append(swc_coords - adjusted)
+
+ if rotations is not None:
+ raise NotImplementedError
+
+ return absolute_coords
+
+ def get_dist(self, sec_ids):
+ return [self._morphology.seg_prop['dist'][sec_id] for sec_id in sec_ids]
+
+ def get_type(self, sec_ids):
+ return [self._morphology.seg_prop['type'][sec_id] for sec_id in sec_ids]
+
diff --git a/bmtk-vb/bmtk/builder/connection_map.py b/bmtk-vb/bmtk/builder/connection_map.py
new file mode 100644
index 0000000..863cf26
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/connection_map.py
@@ -0,0 +1,153 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from . import connector
+from . import iterator
+
+
+class ConnectionMap(object):
+ """Class for keeping track of connection rules.
+
+ For every connection from source --> target this keeps track of rules (functions, literals, lists) for
+ 1. the number of synapses between source and target
+ 2. Used defined parameters (syn-weight, synaptic-location) for every synapse.
+
+ The number of synapses rule (1) is stored as a connector. Individual synaptic parameters, if they exists, are stored
+ as ParamsRules.
+ """
+
+ class ParamsRules(object):
+ """A subclass to store indvidiual synpatic parameter rules"""
+ def __init__(self, names, rule, rule_params, dtypes):
+ self._names = names
+ self._rule = rule
+ self._rule_params = rule_params
+ self._dtypes = self.__create_dtype_dict(names, dtypes)
+
+ def __create_dtype_dict(self, names, dtypes):
+ if isinstance(names, list):
+ # TODO: compare size of names and dtypes
+ return {n: dt for n, dt in zip(names, dtypes)}
+ else:
+ return {names: dtypes}
+
+ @property
+ def names(self):
+ return self._names
+
+ @property
+ def rule(self):
+ return connector.create(self._rule, **(self._rule_params or {}))
+
+ @property
+ def dtypes(self):
+ return self._dtypes
+
+ def get_prop_dtype(self, prop_name):
+ return self._dtypes[prop_name]
+
+ def __init__(self, sources=None, targets=None, connector=None, connector_params=None, iterator='one_to_one',
+ edge_type_properties=None):
+ self._source_nodes = sources # source nodes
+ self._target_nodes = targets # target nodes
+ self._connector = connector # function, list or value that determines connection between sources and targets
+ self._connector_params = connector_params # parameters passed into connector
+ self._iterator = iterator # rule for iterating between sources and targets
+ self._edge_type_properties = edge_type_properties
+
+ self._params = []
+ self._param_keys = []
+
+ @property
+ def params(self):
+ return self._params
+
+ @property
+ def source_nodes(self):
+ return self._source_nodes
+
+ @property
+ def source_network_name(self):
+ return self._source_nodes.network_name
+
+ @property
+ def target_nodes(self):
+ return self._target_nodes
+
+ @property
+ def target_network_name(self):
+ return self._target_nodes.network_name
+
+ @property
+ def connector(self):
+ return self._connector
+
+ @property
+ def connector_params(self):
+ return self._connector_params
+
+ @property
+ def iterator(self):
+ return self._iterator
+
+ @property
+ def edge_type_properties(self):
+ return self._edge_type_properties or {}
+
+ @property
+ def edge_type_id(self):
+ # TODO: properly implement edge_type
+ return self._edge_type_properties['edge_type_id']
+
+ @property
+ def property_names(self):
+ if len(self._param_keys) == 0:
+ return ['nsyns']
+ else:
+ return self._param_keys
+
+ def properties_keys(self):
+ ordered_keys = sorted(self.property_names)
+ return str(ordered_keys)
+
+
+ def max_connections(self):
+ return len(self._source_nodes) * len(self._target_nodes)
+
+ def add_properties(self, names, rule, rule_params=None, dtypes=None):
+ """A a synaptic property
+
+ :param names: list, or single string, of the property
+ :param rule: function, list or value of property
+ :param rule_params: when rule is a function, rule_params will be passed into function when called.
+ :param dtypes: expected property type
+ """
+ self._params.append(self.ParamsRules(names, rule, rule_params, dtypes))
+ self._param_keys += names
+
+ def connection_itr(self):
+ """Returns a generator that will iterate through the source/target pairs (as specified by the iterator function,
+ and create a connection rule based on the connector.
+ """
+ conr = connector.create(self.connector, **(self.connector_params or {}))
+ itr = iterator.create(self.iterator, conr, **({}))
+ return itr(self.source_nodes, self.target_nodes, conr)
diff --git a/bmtk-vb/bmtk/builder/connection_map.pyc b/bmtk-vb/bmtk/builder/connection_map.pyc
new file mode 100644
index 0000000..222a2ff
Binary files /dev/null and b/bmtk-vb/bmtk/builder/connection_map.pyc differ
diff --git a/bmtk-vb/bmtk/builder/connector.py b/bmtk-vb/bmtk/builder/connector.py
new file mode 100644
index 0000000..0d2cfd6
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/connector.py
@@ -0,0 +1,35 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from . import functor_cache
+
+
+def create(connector, **params):
+ return CONNECTOR_CACHE.create(connector, **params)
+
+
+def register(name, func):
+ CONNECTOR_CACHE.register(name, func)
+
+
+CONNECTOR_CACHE = functor_cache.FunctorCache()
+register('passthrough', lambda *_: {})
diff --git a/bmtk-vb/bmtk/builder/connector.pyc b/bmtk-vb/bmtk/builder/connector.pyc
new file mode 100644
index 0000000..28b9932
Binary files /dev/null and b/bmtk-vb/bmtk/builder/connector.pyc differ
diff --git a/bmtk-vb/bmtk/builder/edge.py b/bmtk-vb/bmtk/builder/edge.py
new file mode 100644
index 0000000..31265a9
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/edge.py
@@ -0,0 +1,66 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+
+class Edge(object):
+ def __init__(self, src_gid, trg_gid, edge_type_props, syn_props):
+ self.__src_gid = src_gid
+ self.__trg_gid = trg_gid
+ self.__edge_type_props = edge_type_props
+ self.__syn_props = syn_props
+
+ @property
+ def source_gid(self):
+ return self.__src_gid
+
+ @property
+ def target_gid(self):
+ return self.__trg_gid
+
+ @property
+ def edge_type_properties(self):
+ return self.__edge_type_props
+
+ @property
+ def edge_type_id(self):
+ return self.edge_type_properties['edge_type_id']
+
+ @property
+ def synaptic_properties(self):
+ return self.__syn_props
+
+ def __contains__(self, item):
+ return item in self.edge_type_properties or item in self.synaptic_properties
+
+ def __getitem__(self, item):
+ if item in self.edge_type_properties:
+ return self.edge_type_properties[item]
+ elif item in self.synaptic_properties:
+ return self.synaptic_properties[item]
+ else:
+ return None
+
+ def __repr__(self):
+ rstr = "{} --> {} ('edge_type_id': {}, ".format(self.source_gid, self.target_gid, self.edge_type_id)
+ rstr += "{}: {}" ', '.join("'{}': {}".format(k, v) for k, v in self.synaptic_properties.items())
+ return rstr + ")"
diff --git a/bmtk-vb/bmtk/builder/edge.pyc b/bmtk-vb/bmtk/builder/edge.pyc
new file mode 100644
index 0000000..860eab3
Binary files /dev/null and b/bmtk-vb/bmtk/builder/edge.pyc differ
diff --git a/bmtk-vb/bmtk/builder/formats/__init__.py b/bmtk-vb/bmtk/builder/formats/__init__.py
new file mode 100644
index 0000000..6480e34
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/formats/__init__.py
@@ -0,0 +1,246 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+""" network2.format
+
+The XFormat classes are implemented within Network class to allow network objects to handle different data types.
+Each class should be able to control both input and output file format (json, csv, h5, etc) and the expected parameters,
+including their corresponding order.
+
+Example:
+ net = Network(format=ISeeFormat)
+ ...
+ net.save(cells="cells.csv", models="cell_models.csv", connections="connections.h5")
+
+Todo:
+ * change network.load(cls) to be format specific.
+"""
+import csv
+import h5py
+import numpy as np
+import json
+import pandas as pd
+
+from ..node import Node
+
+from iformats import IFormat
+
+
+class DefaultFormat(IFormat):
+ def save_nodes(self, file_name):
+ raise NotImplementedError()
+
+ def save_edges(self, file_name):
+ raise NotImplementedError()
+
+ def save(self, file_name):
+ raise NotImplementedError()
+
+
+class ISeeFormat(IFormat):
+ """Controls the output of networks that will be used in the isee_engine simulator.
+
+ The nodes are saved in a cells and cell_model csv files with predefined format. the edges/connections are
+ saved in a connections h5 format.
+ """
+ def save_cells(self, filename, columns, position_labels=None):
+ """Saves nodes/cell information and their model type metadata.
+
+ :param cells_csv: name of csv file where cell information will be saved.
+ :param models_csv: name of csv file where cell model information will be saved.
+ """
+ # TODO: add checks and warnings if parameters are missing.
+ with open(filename, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=' ')
+ header = []
+ for col in columns:
+ if col == 'position':
+ for label in position_labels:
+ if label:
+ header.append(label)
+ else:
+ header.append(col)
+ csvw.writerow(header)
+ for nid, params in self._network.nodes():
+ row_array = []
+ for col in columns:
+ if col == 'position':
+ for i, label in enumerate(position_labels):
+ if label:
+ row_array.append(params['position'][i])
+ else:
+ row_array.append(params[col])
+
+ csvw.writerow(row_array)
+
+ def save_types(self, filename, columns, key=None):
+ seen_types = set()
+
+ with open(filename, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=' ')
+ csvw.writerow(columns)
+ #csvw.writerow(['model_id', 'electrophysiology' 'level_of_detail', 'morphology', 'rotation_angle_zaxis'])
+ for node_set in self._network._node_sets:
+ props = node_set.properties#['properties']
+
+ if key is not None:
+ key_val = props.get(key, None)
+ if key_val is not None and key_val in seen_types:
+ continue
+ else:
+ seen_types.add(key_val)
+
+ row_array = []
+ for col in columns:
+ row_array.append(props.get(col, 'NA'))
+ csvw.writerow(row_array)
+
+ def save_edges(self, filename, include_nsyns=True):
+ """Saves connection information into h5 format
+
+ :param filename: Name of h5 file where connection information will be stored.
+ :param include_nsyns: setting to false will omit the nsyns table in the h5 file, default
+ true (nsyn table included).
+ """
+ print("save_edges")
+
+ n_nodes = self._network.nnodes
+ n_edges = self._network.nedges
+
+ # TODO: check the order of the node list
+
+ print("> building tables with %d nodes and %d edges" % (self._network.nnodes, self._network.nedges))
+ indptr_table = [0]
+ nsyns_table = []
+ src_gids_table = []
+ edge_types_table = []
+ for trg in self._network.nodes():
+ tid = trg[1]['id']
+ for edges in self._network.edges([tid], rank=1):
+ src_gids_table.append(edges[0])
+ nsyns_table.append(edges[2])
+ edge_types_table.append(edges[3])
+
+ #if len(src_gids_table) == indptr_table[-1]:
+ # print "node %d doesn't have any edges" % (tid)
+ indptr_table.append(len(src_gids_table))
+
+
+ print("> saving tables to %s" % (filename))
+
+ with h5py.File(filename, 'w') as hf:
+ hf.create_dataset('edge_ptr', data=indptr_table)
+ if include_nsyns:
+ hf.create_dataset('num_syns', data=nsyns_table)
+ hf.create_dataset('src_gids', data=src_gids_table)
+ hf.create_dataset('edge_types', data=edge_types_table)
+ hf.attrs["shape"] = (n_nodes, n_nodes)
+
+
+ """
+ temp = np.empty([n_edges, 3])
+ for i, edge in enumerate(self._network.edges()):
+ temp[i, 0] = edge[0]
+ temp[i, 1] = edge[1]
+ temp[i, 2] = edge[2]
+
+ src_gids_new = np.array([])
+ nsyns_new = np.array([])
+ indptr_new = []
+ counter = 0
+ indptr_new.append(counter)
+ print "Building database"
+ for i in range(n_nodes):
+ indicies = np.where(temp[:, 1] == i)
+
+ src_gids_new = np.concatenate([src_gids_new, np.array(temp[indicies[0], 0])])
+ nsyns_new = np.concatenate([nsyns_new, np.array(temp[indicies[0], 2])])
+
+ counter += np.size(indicies[0])
+ indptr_new.append(counter)
+
+ print "Writing to h5"
+
+ indptr_new = np.array(indptr_new)
+
+ src_gids_new = src_gids_new.astype(int)
+ print src_gids_new
+ exit()
+
+ nsyns_new = nsyns_new.astype(int)
+ indptr_new = indptr_new.astype(int)
+
+ with h5py.File(filename, 'w') as hf:
+ hf.create_dataset('indptr', data=indptr_new)
+ if include_nsyns:
+ hf.create_dataset('nsyns', data=nsyns_new)
+ hf.create_dataset('src_gids', data=src_gids_new)
+ hf.attrs["shape"] = (n_nodes, n_nodes)
+ """
+
+ def save(self, cells_fname, cell_models_fname, connections_fname, include_nsyns=True):
+ """Saves node (cells) and connection information to files.
+
+ :param cells_fname: name of csv file where cell information will be saved.
+ :param cell_models_fname: name of csv file where cell model information will be saved.
+ :param connections_fname: Name of h5 file where connection information will be stored.
+ :param include_nsyns: set to False to build h5 without nsyn table.
+ """
+ #self.save_nodes(cells_fname, cell_models_fname)
+ self.save_edges(connections_fname, include_nsyns)
+
+ def load(self, nodes, edge_types=None, node_types=None, edges=None, positions=None):
+ # TODO: check imported ids
+
+ df = pd.read_csv(nodes, sep=' ')
+ if node_types is not None:
+ types_df = pd.read_csv(node_types, sep=' ', index_col='node_type_id')
+ df = pd.merge(left=df, right=types_df, how='left', left_on='node_type_id', right_index=True)
+
+ gids_df = df['node_id'] if 'node_id' in df.columns else df['id']
+ #df = df.drop(['id'], axis=1)
+
+ positions_df = None
+ if positions:
+ positions_df = df[positions]
+ df = df.drop(positions, axis=1)
+
+ node_params = df.to_dict(orient='records')
+ node_tuples = [Node(gids_df[i], gids_df[i], None, array_params=node_params[i])
+ for i in xrange(df.shape[0])]
+
+
+ if positions:
+ self._network.positions = position_set.PositionSet()
+ posr = positioner.create('points', location=positions_df.as_matrix())
+ #self._network.positions.add(posr(df.shape[0]), gids_df.tolist())
+ self._network.positions.add(positions_df.values, gids_df.tolist())
+
+ for i in xrange(df.shape[0]):
+ node_tuples[i]['position'] = np.array(positions_df.loc[i])
+
+ self._network.positions.finalize()
+
+ self._network._initialize()
+ self._network._add_nodes(node_tuples)
+ self._network.nodes_built = True
+
diff --git a/bmtk-vb/bmtk/builder/formats/hdf5_format.py b/bmtk-vb/bmtk/builder/formats/hdf5_format.py
new file mode 100644
index 0000000..a0227ca
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/formats/hdf5_format.py
@@ -0,0 +1,423 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import csv
+import json
+import math
+import h5py
+import pandas as pd
+from ast import literal_eval
+
+import bmtk
+from .iformats import IFormat
+from bmtk.builder.node_pool import NodePool
+from time import gmtime, strftime
+
+
+class HDF5Format(IFormat):
+ """
+ Format prior to Blue-brain project collaboration.
+ Saves as:
+ nodes (csv)
+ node_types (csv)
+ edge_types (csv)
+ edges (h5)
+ """
+
+ CSV_DELIMITER = ' '
+ COL_NODE_TYPE_ID = 'node_type_id'
+ COL_EDGE_TYPE_ID = 'edge_type_id'
+ COL_TARGET_QUERY = 'target_query'
+ COL_SOURCE_QUERY = 'source_query'
+ COL_NODE_ID = 'node_id'
+ BASE_DIR = 'network'
+
+ @property
+ def format(self):
+ return 'msdk.HDF5Format'
+
+ def save(self, directory, **kwargs):
+ """ saves nodes.csv, node_types.csv, edges.h5, edge_types.csv and .metadata.json. Will overwrite existing files.
+
+ :param directory: Directory where all the files will be saved, creating dir if it doesn't exists.
+ :param kwargs:
+ """
+ if directory is None:
+ base_path = os.path.join(self.BASE_DIR, self._network.name)
+ else:
+ base_path = directory
+
+ metadata = {
+ 'version': bmtk.__version__,
+ 'name': self._network.name,
+ 'date_created': strftime("%Y-%m-%d %H:%M:%S", gmtime()),
+ 'file_format': self.format,
+ 'network_class': self._network.__class__.__name__
+ }
+
+ # save node-types.
+ node_types_path = os.path.join(base_path, 'node_types.csv')
+ self.save_node_types(node_types_path, **kwargs)
+ metadata['node_types_file'] = 'node_types.csv'
+
+ # save individual nodes.
+ if self._network.nodes_built:
+ # make sure nodes have been built
+ nodes_path = os.path.join(base_path, 'nodes.csv')
+ self.save_nodes(nodes_path, **kwargs)
+ metadata['nodes_file'] = 'nodes.csv'
+ else:
+ print('Nodes not built. Unable to save to nodes.csv.')
+
+ # save edge-types.
+ edge_types_path = os.path.join(base_path, 'edge_types.csv')
+ self.save_edge_types(edge_types_path, **kwargs)
+ metadata['edge_types_file'] = 'edge_types.csv'
+
+ # save edges if they have been built
+ if self._network.edges_built:
+ edges_path = os.path.join(base_path, 'edges.h5')
+ self.save_edges(edges_path, **kwargs)
+ metadata['edges_file'] = 'edges.h5'
+ else:
+ print('Edges not built. Unable to save to edges.h5.')
+
+ # save the metadata file
+ metadata_path = os.path.join(base_path, '.metadata.json')
+ with open(metadata_path, 'w') as mdfile:
+ json.dump(metadata, mdfile, indent=2)
+
+ def save_node_types(self, file_name, columns=None, **kwargs):
+ """Write node_types to csv.
+
+ :param file_name: path to csv file. Will be overwritten if it exists
+ :param columns: optional columns (not incl. manditory ones). If None then will use all node properties.
+ :param kwargs: optional
+ """
+ self.__checkpath(file_name, **kwargs)
+
+ # csv should always start with node_type_id
+ manditory_cols = [self.COL_NODE_TYPE_ID]
+
+ # Determine which columns are in the node_types file and their order
+ nt_properties = self._network.node_type_properties
+ opt_cols = []
+ if columns is None:
+ # use all node type properties
+ opt_cols = list(nt_properties)
+ else:
+ # check that columns specified by user exists
+ for col_name in columns:
+ if col_name not in nt_properties:
+ raise Exception('No node property {} found in network, cannot save {}.'.format(col_name, file_name))
+ else:
+ opt_cols.append(col_name)
+
+ # write to csv iteratively
+ cols = manditory_cols + opt_cols
+ with open(file_name, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=self.CSV_DELIMITER)
+ csvw.writerow(cols)
+ for node_set in self._network._node_sets:
+ props = node_set.properties
+ row = []
+ for cname in cols:
+ # TODO: determine dtype of parameters so we can use the appropiate none value
+ row.append(props.get(cname, 'NA')) # get column name or NA if it doesn't exists for this node
+ csvw.writerow(row)
+
+ def save_nodes(self, file_name, columns=None, **kwargs):
+ """Write nodes to csv.
+
+ :param file_name: path to csv file. Will be overwritten if it exists
+ :param columns: optional columns (not incl. manditory ones). If None then will use all node properties.
+ :param kwargs: optional
+ """
+ self.__checkpath(file_name, **kwargs)
+
+ # csv will start with node_id and node_type_id
+ manditory_columns = [self.COL_NODE_ID, self.COL_NODE_TYPE_ID]
+
+ # optional columns from either node params or node-type properties
+ opt_columns = []
+ if columns is None:
+ opt_columns = list(self._network.node_params)
+ else:
+ all_cols = self._network.node_params | self._network.node_type_properties
+ for col_name in columns:
+ if col_name not in all_cols:
+ # verify params/properties exist
+ raise Exception('No edge property {} found in network, cannot save {}.'.format(col_name, file_name))
+ else:
+ opt_columns.append(col_name)
+
+ # write to csv
+ with open(file_name, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=self.CSV_DELIMITER)
+ csvw.writerow(manditory_columns + opt_columns)
+ for nid, node in self._network.nodes():
+ row = [node.node_id, node.node_type_id]
+ for cname in opt_columns:
+ row.append(node.get(cname, 'NA'))
+ csvw.writerow(row)
+
+ def save_edge_types(self, file_name, columns=None, **kwargs):
+ """Write edge-types to csv.
+
+ :param file_name: path to csv file. Will be overwritten if it exists
+ :param columns: optional columns (not incl. manditory ones). If None then will use all node properties.
+ :param kwargs: optional
+ """
+ self.__checkpath(file_name, **kwargs)
+
+ # start with edge_type_id, target_query and source_query
+ manditory_cols = [self.COL_EDGE_TYPE_ID, self.COL_TARGET_QUERY, self.COL_SOURCE_QUERY]
+
+ # optional columns
+ edge_props = self._network.edge_type_properties
+ opt_cols = []
+ if columns is None:
+ opt_cols = list(edge_props)
+ else:
+ for col_name in columns:
+ if col_name not in edge_props:
+ raise Exception('No edge property {} found in network, cannot save {}.'.format(col_name, file_name))
+ else:
+ opt_cols.append(col_name)
+
+ # write to csv by iteratively going through all edge-types
+ with open(file_name, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=self.CSV_DELIMITER)
+ csvw.writerow(manditory_cols + opt_cols)
+ for et in self._network._edge_sets:
+ edge = et['edge']
+ targetnodes = edge.targets # get source as NodePools to get the source_query strings
+ sourcenodes = edge.sources # same with target
+ row_array = [edge.id, targetnodes.filter_str, sourcenodes.filter_str]
+ edge_params = edge.parameters
+ for col in opt_cols:
+ row_array.append(edge_params.get(col, 'NA'))
+ csvw.writerow(row_array)
+
+ def save_edges(self, file_name, **kwargs):
+ """Saves edges to edges.h5
+
+ :param file_name: path to hdf5 file. Will be overwritten if it exists
+ :param kwargs: optional
+ """
+ self.__checkpath(file_name, **kwargs)
+
+ # Get sources, targets, nsyns and edge_type_id for all edges.
+ print("> building tables with %d nodes and %d edges" % (self._network.nnodes, self._network.nedges))
+ indptr_table = [0]
+ nsyns_table = []
+ src_gids_table = []
+ edge_types_table = []
+ for trg in self._network.nodes():
+ # the targets have to be ordered.
+ tid = trg[1].node_id
+ for edges in self._network.edges([tid], rank=1):
+ src_gids_table.append(edges[0])
+ nsyns_table.append(edges[2])
+ edge_types_table.append(edges[3])
+
+ indptr_table.append(len(src_gids_table))
+
+ # save to h5
+ print("> saving tables to %s" % (file_name))
+ with h5py.File(file_name, 'w') as hf:
+ hf.create_dataset('edge_ptr', data=indptr_table)
+ hf.create_dataset('num_syns', data=nsyns_table)
+ hf.create_dataset('src_gids', data=src_gids_table)
+ hf.create_dataset('edge_types', data=edge_types_table)
+
+ def __checkpath(self, file_name, **kwargs):
+ """Makes sure file_name is a valid file path and can be written."""
+ dir_path = os.path.dirname(file_name)
+ if not os.path.exists(dir_path):
+ # create file's directory if it doesn't exist
+ os.makedirs(dir_path)
+
+ def __load_nodes(self, nodes_file, node_types_file):
+ """Loads nodes and node_types from exists files
+
+ :param nodes_file: path to nodes csv
+ :param node_types_file: path to node_types csv
+ """
+ def eval(val):
+ # Helper function that can convert csv to an appropiate type. Helpful for cells of lists (positions, etc)
+ # TODO: keep column dtypes in metadata and use that for converting each column
+ if isinstance(val, float) and math.isnan(val):
+ return None
+ elif isinstance(val, basestring):
+ try:
+ # this will be helpful for turning strings into lists where appropiate "(0, 1, 2)" --> (0, 1, 2)
+ return literal_eval(val)
+ except ValueError:
+ return val
+ return val
+
+ if nodes_file is None and node_types_file is None:
+ return None
+
+ elif nodes_file is not None and node_types_file is not None:
+ # Get the array_params from nodes_file and properties from nodes_types_file, combine them to call
+ # add_nodes() function and rebuilt the nodes.
+ nt_df = pd.read_csv(node_types_file, self.CSV_DELIMITER) #, index_col=self.COL_NODE_TYPE_ID)
+ n_df = pd.read_csv(nodes_file, self.CSV_DELIMITER)
+
+ for _, row in nt_df.iterrows():
+ # iterate through the node_types, find all nodes with matching node_type_id and get those node's
+ # parameters as a dictionary of lists
+ node_type_props = {l: eval(row[l]) for l in nt_df.columns if eval(row[l]) is not None}
+ selected_nodes = n_df[n_df[self.COL_NODE_TYPE_ID] == row[self.COL_NODE_TYPE_ID]]
+ N = len(selected_nodes.axes[0])
+ array_params = {l: list(selected_nodes[l]) for l in selected_nodes.columns
+ if l not in ['node_type_id', 'position']}
+
+ # Special function for position_params
+ position = None
+ position_params = None
+ if 'position' in selected_nodes.columns:
+ position_params = {'location': [eval(p) for p in selected_nodes['position']]}
+ position = 'points'
+
+ self._network.add_nodes(N, position=position, position_params=position_params,
+ array_params=array_params, **node_type_props)
+
+ self._network._build_nodes()
+
+ elif node_types_file is not None:
+ # nodes_types exists but nodes doesn't. We convert each row (node_type) in the csv to a collection
+ # of nodes with N=1, no array_params.
+ nt_df = pd.read_csv(node_types_file, self.CSV_DELIMITER)
+ for _, row in nt_df.iterrows():
+ node_type_props = {l: eval(row[l]) for l in nt_df.columns if eval(row[l]) is not None}
+ self._network.add_nodes(N=1, **node_type_props)
+ self._network._build_nodes()
+
+ elif nodes_file is not None:
+ # nodes exists but node_types doesn't. In this case group together all nodes by node_type_id and add them
+ # as a single population (with no node_params)
+ n_df = pd.read_csv(nodes_file, self.CSV_DELIMITER)
+ for nt_id, df in n_df.groupby(self.COL_NODE_TYPE_ID):
+ N = len(df.axes[0])
+ array_params = {l: list(df[l]) for l in df.columns
+ if l not in ['node_type_id', 'position']}
+
+ position = None
+ position_params = None
+ if 'position' in df.columns:
+ position_params = {'location': [eval(p) for p in df['position']]}
+ position = 'points'
+
+ self._network.add_nodes(N, position=position, position_params=position_params,
+ array_params=array_params, node_type_id=nt_id)
+ self._network._build_nodes()
+
+ def __load_edge_types(self, edges_file, edge_types_file):
+ """Loads edges and edge_types
+
+ :param edges_file: path to edges hdf5
+ :param edge_types_file: path to edge_types csv
+ """
+ if edge_types_file is None and edges_file is None:
+ return
+
+ if edge_types_file is not None:
+ # load in the edge-types. iterate through all the rows of edge_types.csv and call connect() function.
+ et_pd = pd.read_csv(edge_types_file, self.CSV_DELIMITER)
+ prop_cols = [label for label in et_pd.columns
+ if label not in [self.COL_SOURCE_QUERY, self.COL_TARGET_QUERY]]
+
+ for _, row in et_pd.iterrows():
+ # the connect function requires a Pool of nodes (like net.nodes()) or a dictionary filter.
+ source_nodes = NodePool.from_filter(self._network, row[self.COL_SOURCE_QUERY])
+ target_nodes = NodePool.from_filter(self._network, row[self.COL_TARGET_QUERY])
+ # TODO: evaluate edge-properties and exclude any that are None.
+ edge_params = {label: row[label] for label in prop_cols}
+
+ # don't try to guess connection rule
+ self._network.connect(source=source_nodes, target=target_nodes, edge_params=edge_params)
+
+ if edges_file is not None:
+ # Create edges from h5.
+ if not self._network.nodes_built:
+ print('The nodes have not been built. Cannot load edges file.')
+ return
+
+ # load h5 tables
+ edges_h5 = h5py.File(edges_file, 'r')
+ edge_types_ds = edges_h5['edge_types']
+ num_syns_ds = edges_h5['num_syns']
+ src_gids_ds = edges_h5['src_gids']
+ edge_ptr_ds = edges_h5['edge_ptr']
+ n_edge_ptr = len(edge_ptr_ds)
+
+ # the network needs edge-types objects while building the edges. If the edge_types_file exists then they
+ # would have been added in the previous section of code. If edge_types_file is missing we will create
+ # filler edge types based on the edge_type_id's found in edge_ptr dataset
+ if edge_types_file is None:
+ for et_id in set(edges_h5['edge_types'][:]):
+ self._network.connect(edge_params={self.COL_NODE_TYPE_ID: et_id})
+
+ # TODO: if edge_types.csv does exists we should check it has matching edge_type_ids with edges.h5/edge_ptr
+
+ def itr_fnc(et):
+ # Creates a generator that will iteratively go through h5 file and return (source_gid, target_gid,
+ # nsyn) values for connections with matching edge_type.edge_type_id
+ edge_type_id = et.id
+ for ep_indx in xrange(n_edge_ptr - 1):
+ trg_gid = ep_indx
+ for syn_indx in xrange(edge_ptr_ds[ep_indx], edge_ptr_ds[ep_indx + 1]):
+ if edge_types_ds[syn_indx] == edge_type_id:
+ src_gid = src_gids_ds[syn_indx]
+ n_syn = num_syns_ds[syn_indx]
+ yield (src_gid, trg_gid, n_syn)
+
+ for edge in self._network.edge_types():
+ # create iterator and directly add edges
+ itr = itr_fnc(edge)
+ self._network._add_edges(edge, itr)
+
+ self.edges_built = True
+
+ def load_dir(self, directory, metadata):
+ def get_path(f):
+ if f not in metadata:
+ return None
+ file_name = metadata[f]
+ if directory is None or os.path.isabs(file_name):
+ return file
+ return os.path.join(directory, file_name)
+
+ nodes_file = get_path('nodes_file')
+ node_types_file = get_path('node_types_file')
+ self.__load_nodes(nodes_file, node_types_file)
+
+ edge_types_file = get_path('edge_types_file')
+ edges_file = get_path('edges_file')
+ self.__load_edge_types(edges_file, edge_types_file)
+
+ def load(self, nodes_file=None, node_types_file=None, edges_file=None, edge_types_file=None):
+ self.__load_nodes(nodes_file, node_types_file)
diff --git a/bmtk-vb/bmtk/builder/formats/iformats.py b/bmtk-vb/bmtk/builder/formats/iformats.py
new file mode 100644
index 0000000..a29261e
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/formats/iformats.py
@@ -0,0 +1,29 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+class IFormat(object):
+ def __init__(self, network):
+ self._network = network
+
+ @property
+ def format(self):
+ raise NotImplementedError()
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/builder/functor_cache.py b/bmtk-vb/bmtk/builder/functor_cache.py
new file mode 100644
index 0000000..0da8fc1
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/functor_cache.py
@@ -0,0 +1,55 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from six import string_types
+import functools
+
+
+class FunctorCache(object):
+ def __init__(self):
+ self.cache = {}
+
+ def create(self, connector, **params):
+ if params is None:
+ params = {}
+
+ if isinstance(connector, string_types):
+ # TODO: don't do this, a user may want to return a string in connection_map params
+ func = self.cache[connector]
+ return functools.partial(func, **params)
+
+ elif isinstance(connector, dict):
+ return lambda *args: connector
+
+ elif isinstance(connector, list):
+ # for the iterator we want to pass backs lists as they are
+ return connector
+
+ elif callable(connector):
+ return functools.partial(connector, **params)
+
+ else:
+ # should include all numericals, non-callable objects and tuples
+ return lambda *args: connector
+
+ def register(self, name, func):
+ self.cache[name] = func
diff --git a/bmtk-vb/bmtk/builder/functor_cache.pyc b/bmtk-vb/bmtk/builder/functor_cache.pyc
new file mode 100644
index 0000000..69e7056
Binary files /dev/null and b/bmtk-vb/bmtk/builder/functor_cache.pyc differ
diff --git a/bmtk-vb/bmtk/builder/id_generator.py b/bmtk-vb/bmtk/builder/id_generator.py
new file mode 100644
index 0000000..9d7b798
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/id_generator.py
@@ -0,0 +1,71 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import threading
+import numpy as np
+import six
+
+
+
+class IDGenerator(object):
+ """ A simple class for fetching global ids. To get a unqiue global ID class next(), which should be thread-safe. It
+ Also has a remove_id(gid) in which case next() will never return the gid. The remove_id function is used for cases
+ when using imported networks and we want to elimnate previously created id.
+
+ TODO:
+ * Implement a bit array to keep track of already existing gids
+ * It might be necessary to implement with MPI support?
+ """
+ def __init__(self, init_val=0):
+ self.__counter = init_val
+ self.__taken = set()
+ self.__lock = threading.Lock()
+
+ def remove_id(self, gid):
+ assert(np.issubdtype(type(gid), np.integer))
+ if gid >= self.__counter:
+ self.__taken.add(gid)
+
+ def next(self):
+ self.__lock.acquire()
+ while self.__counter in self.__taken:
+ self.__taken.remove(self.__counter)
+ self.__counter += 1
+
+ nid = self.__counter
+ self.__counter += 1
+ self.__lock.release()
+
+ return nid
+
+ def __contains__(self, gid):
+ return gid < self.__counter
+
+ def __call__(self, *args, **kwargs):
+ if len(args) == 1:
+ N = args[0]
+ elif 'N' in 'kwargs':
+ N = args['N']
+
+ assert(isinstance(N, (int, long)))
+ return [self.next() for _ in six.moves.range(N)]
+
diff --git a/bmtk-vb/bmtk/builder/id_generator.pyc b/bmtk-vb/bmtk/builder/id_generator.pyc
new file mode 100644
index 0000000..5114175
Binary files /dev/null and b/bmtk-vb/bmtk/builder/id_generator.pyc differ
diff --git a/bmtk-vb/bmtk/builder/io/__init__.py b/bmtk-vb/bmtk/builder/io/__init__.py
new file mode 100644
index 0000000..00a458f
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/io/__init__.py
@@ -0,0 +1,66 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import h5py
+from ..network import Network
+
+def write_edges_to_h5(network, filename, synapse_key=None, verbose=True):
+ assert(isinstance(network, Network))
+
+ # The network edges may either be a raw value, dictionary or list
+ if synapse_key == None:
+ lookup = lambda x: x
+
+ elif isinstance(synapse_key, str):
+ lookup = lambda x: x[synapse_key]
+
+ elif isinstance(synapse_key, int):
+ lookup = lambda x: x[synapse_key]
+
+ else:
+ raise Exception("Unable to resolve the synapse_key type.")
+
+ # Create the tables for indptr, nsyns and src_gids
+ if verbose:
+ print("> building tables with {} nodes and {} edges.".format(network.nnodes, network.nedges))
+ indptr_table = [0]
+ nsyns_table = []
+ src_gids_table = []
+ for trg in network.nodes():
+ # TODO: check the order of the node list
+ tid = trg[1]['id']
+ for edges in network.edges([tid], rank=1):
+ src_gids_table.append(edges[0])
+ nsyns_table.append(lookup(edges[2]))
+
+ if len(src_gids_table) == indptr_table[-1]:
+ print("node %d doesn't have any edges {}".format(tid))
+ indptr_table.append(len(src_gids_table))
+
+ # Save the tables in h5 format
+ if verbose:
+ print("> Saving table to {}.".format(filename))
+ with h5py.File(filename, 'w') as hf:
+ hf.create_dataset('indptr', data=indptr_table)
+ hf.create_dataset('nsyns', data=nsyns_table)
+ hf.create_dataset('src_gids', data=src_gids_table, dtype=int32)
+ hf.attrs["shape"] = (network.nnodes, network.nnodes)
diff --git a/bmtk-vb/bmtk/builder/iterator.py b/bmtk-vb/bmtk/builder/iterator.py
new file mode 100644
index 0000000..1469cfa
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/iterator.py
@@ -0,0 +1,124 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import itertools
+import functools
+import types
+
+
+class IteratorCache(object):
+ def __init__(self):
+ self.cache = {}
+
+ def create(self, itr_name, itr_type, **params):
+ if params is None:
+ params = {}
+
+ if (itr_name, itr_type) in self.cache:
+ func = self.cache[(itr_name, itr_type)]
+ return functools.partial(func, **params)
+
+ else:
+ raise Exception("Couldn't find iterator for ({}, {}).".format(itr_name, itr_type))
+
+ def register(self, name, itr_type, func):
+ self.cache[(name, itr_type)] = func
+
+
+def create(iterator, connector, **params):
+ return ITERATOR_CACHE.create(iterator, type(connector), **params)
+
+
+def register(name, dtype, func):
+ ITERATOR_CACHE.register(name, dtype, func)
+
+
+########################################################################
+# Pre-defined iterators
+########################################################################
+def one_to_all_iterator(source_nodes, target_nodes, connector):
+ """Calls the connector function with (1 source, all targets), iterated for each source"""
+ target_list = list(target_nodes) # list of all targets
+ target_node_ids = [t.node_id for t in target_list] # slight improvement than calling node_id S*T times
+ for source in source_nodes:
+ source_node_id = source.node_id
+ edge_vals = connector(source, target_list)
+ for i, target in enumerate(target_list):
+ yield (source_node_id, target_node_ids[i], edge_vals[i])
+
+
+def all_to_one_iterator(source_nodes, target_nodes, connector):
+ """Iterate through all the target nodes and return target node + list of all sources"""
+ source_list = list(source_nodes)
+ for target in target_nodes:
+ val = connector(source_list, target)
+ for i, source in enumerate(source_list):
+ yield (source.node_id, target.node_id, val[i])
+
+
+def one_to_one_iterator(source_nodes, target_nodes, connector):
+ # TODO: may be faster to pull out the node_ids, don't user itertools
+ for source, target in itertools.product(source_nodes, target_nodes):
+ val = connector(source, target)
+ yield (source.node_id, target.node_id, val)
+
+
+def one_to_one_list_iterator(source_nodes, target_nodes, vals):
+ assert(len(vals) == len(source_nodes)*len(target_nodes))
+ for i, (source, target) in enumerate(itertools.product(source_nodes, target_nodes)):
+ yield (source.node_id, target.node_id, vals[i])
+
+
+def one_to_all_list_iterator(source_nodes, target_nodes, vals):
+ assert(len(vals) == len(target_nodes))
+ source_ids = [s.node_id for s in list(source_nodes)]
+ target_ids = [t.node_id for t in list(target_nodes)]
+ for src_id in source_ids:
+ for i, trg_id in enumerate(target_ids):
+ yield (src_id, trg_id, vals[i])
+
+
+def all_to_one_list_iterator(source_nodes, target_nodes, vals):
+ assert(len(vals) == len(source_nodes))
+ source_ids = [s.node_id for s in list(source_nodes)]
+ target_ids = [t.node_id for t in list(target_nodes)]
+ for trg_id in target_ids:
+ for i, src_id in enumerate(source_ids):
+ yield (src_id, trg_id, vals[i])
+
+
+def lambda_iterator(source_nodes, target_nodes, lambda_val):
+ for source, target in itertools.product(source_nodes, target_nodes):
+ yield (source.node_id, target.node_id, lambda_val())
+
+
+ITERATOR_CACHE = IteratorCache()
+register('one_to_one', functools.partial, one_to_one_iterator)
+register('all_to_one', functools.partial, all_to_one_iterator)
+register('one_to_all', functools.partial, one_to_all_iterator)
+
+register('one_to_one', list, one_to_one_list_iterator)
+register('one_to_all', list, one_to_all_list_iterator)
+register('all_to_one', list, all_to_one_list_iterator)
+
+
+register('one_to_one', types.FunctionType, lambda_iterator)
diff --git a/bmtk-vb/bmtk/builder/iterator.pyc b/bmtk-vb/bmtk/builder/iterator.pyc
new file mode 100644
index 0000000..b642e62
Binary files /dev/null and b/bmtk-vb/bmtk/builder/iterator.pyc differ
diff --git a/bmtk-vb/bmtk/builder/network.py b/bmtk-vb/bmtk/builder/network.py
new file mode 100644
index 0000000..90d3ac1
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/network.py
@@ -0,0 +1,478 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import numpy as np
+import types
+import csv
+import six
+
+from .node_pool import NodePool
+from .connection_map import ConnectionMap
+from .node_set import NodeSet
+from .id_generator import IDGenerator
+
+
+class Network (object):
+ def __init__(self, name, **network_props):
+ if len(name) == 0:
+ raise Exception('Network name missing.')
+
+ self._network_name = name
+
+ self._nnodes = 0
+ self._nodes_built = False
+ self._nedges = 0
+ self._edges_built = False
+
+ self._node_sets = []
+ self.__external_node_sets = []
+ self.__node_id_counter = 0
+
+ self._node_types_properties = {}
+ self._node_types_columns = set(['node_type_id'])
+ # self._edge_type_properties = {}
+ # self._edge_types_columns = set(['edge_type_id'])
+ self._connection_maps = []
+ #self._connection_maps = ConnectionTable()
+
+ self._node_id_gen = IDGenerator()
+ self._node_type_id_gen = IDGenerator(100)
+ self._edge_type_id_gen = IDGenerator(100)
+
+ #self._connection_table = []
+ #self._source_networks = []
+ #self._target_networks = []
+ self._network_conns = set()
+ self._connected_networks = {}
+
+ @property
+ def name(self):
+ return self._network_name
+
+ @property
+ def nodes_built(self):
+ return self._nodes_built
+
+ @property
+ def edges_built(self):
+ return self._edges_built
+
+ @property
+ def nnodes(self):
+ raise NotImplementedError
+
+ @property
+ def nedges(self):
+ raise NotImplementedError
+
+ def get_connections(self):
+ return self._connection_maps
+
+ def _add_node_type(self, props):
+ node_type_id = props.get('node_type_id', None)
+ if node_type_id is None:
+ node_type_id = self._node_type_id_gen.next()
+ else:
+ if node_type_id in self._node_types_properties:
+ raise Exception('node_type_id {} already exists.'.format(node_type_id))
+ self._node_type_id_gen.remove_id(node_type_id)
+
+ props['node_type_id'] = node_type_id
+ self._node_types_properties[node_type_id] = props
+
+ def add_nodes(self, N=1, **properties):
+ self._clear()
+
+ # categorize properties as either a node-params (for nodes file) or node-type-property (for node_types files)
+ node_params = {}
+ node_properties = {}
+ for prop_name, prop_value in properties.items():
+ if isinstance(prop_value, (list, np.ndarray)): # TODO: what about pandas series
+ n_props = len(prop_value)
+ if n_props != N:
+ raise Exception('Trying to pass in array of length {} into N={} nodes'.format(n_props, N))
+ node_params[prop_name] = prop_value
+
+ elif isinstance(prop_value, types.GeneratorType):
+ vals = list(prop_value)
+ assert(len(vals) == N)
+ node_params[prop_name] = vals
+
+ else:
+ node_properties[prop_name] = prop_value
+ self._node_types_columns.add(prop_name)
+
+ # If node-type-id exists, make sure there is no clash, otherwise generate a new id.
+ if 'node_type_id' in node_params:
+ raise Exception('There can be only one "node_type_id" per set of nodes.')
+
+ self._add_node_type(node_properties)
+ self._node_sets.append(NodeSet(N, node_params, node_properties))
+
+ def add_edges(self, source=None, target=None, connection_rule=1, connection_params=None, iterator='one_to_one',
+ **edge_type_properties):
+ # TODO: check edge_type_properties for 'edge_type_id' and make sure there isn't a collision. Otherwise create
+ # a new id.
+ if not isinstance(source, NodePool):
+ source = NodePool(self, **source or {})
+
+ if not isinstance(target, NodePool):
+ target = NodePool(self, **target or {})
+
+ self._network_conns.add((source.network_name, target.network_name))
+ self._connected_networks[source.network_name] = source.network
+ self._connected_networks[target.network_name] = target.network
+
+ # TODO: make sure that they don't add a dictionary or some other wried property type.
+ edge_type_id = edge_type_properties.get('edge_type_id', None)
+ if edge_type_id is None:
+ edge_type_id = self._edge_type_id_gen.next()
+ edge_type_properties['edge_type_id'] = edge_type_id
+ elif edge_type_id in self._edge_type_id_gen:
+ raise Exception('edge_type_id {} already exists.'.format(edge_type_id))
+ else:
+ self._edge_type_id_gen.remove_id(edge_type_id)
+
+ edge_type_properties['source_query'] = source.filter_str
+ edge_type_properties['target_query'] = target.filter_str
+
+ if 'nsyns' in edge_type_properties:
+ connection_rule = edge_type_properties['nsyns']
+ del edge_type_properties['nsyns']
+
+ # self._edge_types_columns.update(edge_type_properties.keys())
+ connection = ConnectionMap(source, target, connection_rule, connection_params, iterator, edge_type_properties)
+ self._connection_maps.append(connection)
+ # self._connection_maps.add(source.network_name, target.network_name, connection)
+ return connection
+
+ def nodes(self, **properties):
+ if not self.nodes_built:
+ self._build_nodes()
+
+ return NodePool(self, **properties)
+
+ def nodes_iter(self, nids=None):
+ raise NotImplementedError
+
+ def edges(self, target_nodes=None, source_nodes=None, target_network=None, source_network=None, **properties):
+ """Returns a list of dictionary-like Edge objects, given filter parameters.
+
+ To get all edges from a network
+ edges = net.edges()
+
+ To specify the target and/or source node-set
+ edges = net.edges(target_nodes=net.nodes(type='biophysical'), source_nodes=net.nodes(ei='i'))
+
+ To only get edges with a given edge_property
+ edges = net.edges(weight=100, syn_type='AMPA_Exc2Exc')
+
+ :param target_nodes: gid, list of gid, dict or node-pool. Set of target nodes for a given edge.
+ :param source_nodes: gid, list of gid, dict or node-pool. Set of source nodes for a given edge.
+ :param target_network: name of network containing target nodes.
+ :param source_network: name of network containing source nodes.
+ :param properties: edge-properties used to filter out only certain edges.
+ :return: list of bmtk.builder.edge.Edge properties.
+ """
+ def nodes2gids(nodes, network):
+ """helper function for converting target and source nodes into list of gids"""
+ if nodes is None or isinstance(nodes, list):
+ return nodes, network
+ if isinstance(nodes, int):
+ return [nodes], network
+ if isinstance(nodes, dict):
+ network = network or self._network_name
+ nodes = self._connected_networks[network].nodes(**nodes)
+ if isinstance(nodes, NodePool):
+ if network is not None and nodes.network_name != network:
+ print('Warning. nodes and network don not match')
+ return [n.node_id for n in nodes], nodes.network_name
+ else:
+ raise Exception('Couldnt convert nodes')
+
+ def filter_edges(e):
+ """Returns true only if all the properities match for a given edge"""
+ for k, v in properties.items():
+ if k not in e:
+ return False
+ if e[k] != v:
+ return False
+ return True
+
+ if not self.edges_built:
+ self.build()
+
+ # trg_gids can't be none for edges_itr. if target-nodes is not explicity states get all target_gids that
+ # synapse onto or from current network.
+ if target_nodes is None:
+ trg_gid_set = set(n.node_id for cm in self._connection_maps for n in cm.target_nodes)
+ target_nodes = sorted(trg_gid_set)
+
+ # convert target/source nodes into a list of their gids
+ trg_gids, trg_net = nodes2gids(target_nodes, target_network)
+ src_gids, src_net = nodes2gids(source_nodes, source_network)
+
+ # use the iterator to get edges and return as a list
+ if properties is None:
+ edges = list(self.edges_iter(trg_gids=trg_gids, trg_network=trg_net, src_network=src_net))
+ else:
+ # filter out certain edges using the properties parameters
+ edges = [e for e in self.edges_iter(trg_gids=trg_gids, trg_network=trg_net, src_network=src_net)
+ if filter_edges(e)]
+
+ if src_gids is not None:
+ # if src_gids are set filter out edges some more
+ edges = [e for e in edges if e.source_gid in src_gids]
+
+ return edges
+
+ def edges_iter(self, trg_gids, src_network=None, trg_network=None):
+ """Given a list of target gids, returns a generator for iteratoring over all possible edges.
+
+ It is preferable to use edges() method instead, it allows more flexibibility in the input and can better
+ indicate if their is a problem.
+
+ The order of the edges returned will be in the same order as the trg_gids list, but does not guarentee any
+ secondary ordering by source-nodes and/or edge-type. If their isn't a edge with a matching target-id then
+ it will skip that gid in the list, the size of the generator can 0 to arbitrarly large.
+
+ :param trg_gids: list of gids to match with an edge's target.
+ :param src_network: str, only returns edges coming from the specified source network.
+ :param trg_network: str, only returns edges coming from the specified target network.
+ :return: iteration of bmtk.build.edge.Edge objects representing given edge.
+ """
+ raise NotImplementedError
+
+ def clear(self):
+ self._nodes_built = False
+ self._edges_built = False
+ self._clear()
+
+ def _node_id(self, N):
+ for i in six.moves.range(N):
+ yield self.__node_id_counter
+ self.__node_id_counter += 1
+
+ def _build_nodes(self):
+ """Builds or rebuilds all the nodes, clear out both node and edge sets."""
+ # print 'build_nodes'
+ self._clear()
+ self._initialize()
+
+ for ns in self._node_sets:
+ nodes = ns.build(nid_generator=self._node_id)
+ self._add_nodes(nodes)
+ self._nodes_built = True
+
+ def __build_edges(self):
+ """Builds network edges"""
+ if not self.nodes_built:
+ # only rebuild nodes if necessary.
+ self._build_nodes()
+
+ for i, conn_map in enumerate(self._connection_maps):
+ # print conn_map
+ self._add_edges(conn_map, i)
+
+ self._edges_built = True
+
+ def build(self, force=False):
+ """ Builds nodes (assigns gids) and edges.
+
+ Args:
+ force (bool): set true to force complete rebuilding of nodes and edges, if nodes() or save_nodes() has been
+ called before then forcing a rebuild may change gids of each node.
+ """
+
+ # if nodes() or save_nodes() is called by user prior to calling build() - make sure the nodes
+ # are completely rebuilt (unless a node set has been added).
+ if force:
+ self._clear()
+ self._initialize()
+ self._build_nodes()
+
+ # always build the edges.
+ self.__build_edges()
+
+ def __get_path(self, filename, path_dir, ftype):
+ if filename is None:
+ fname = '{}_{}'.format(self.name, ftype)
+ return os.path.join(path_dir, fname)
+ elif os.path.isabs(filename):
+ return filename
+ else:
+ return os.path.join(path_dir, filename)
+
+ def save(self, output_dir='.'):
+ self.save_nodes(output_dir=output_dir)
+ self.save_edges(output_dir=output_dir)
+
+ def save_nodes(self, nodes_file_name=None, node_types_file_name=None, output_dir='.', force_overwrite=True):
+ nodes_file = self.__get_path(nodes_file_name, output_dir, 'nodes.h5')
+ if not force_overwrite and os.path.exists(nodes_file):
+ raise Exception('File {} exists. Please use different name or use force_overwrite'.format(nodes_file))
+ nf_dir = os.path.dirname(nodes_file)
+ if not os.path.exists(nf_dir):
+ os.makedirs(nf_dir)
+
+ node_types_file = self.__get_path(node_types_file_name, output_dir, 'node_types.csv')
+ if not force_overwrite and os.path.exists(node_types_file):
+ raise Exception('File {} exists. Please use different name or use force_overwrite'.format(node_types_file))
+ ntf_dir = os.path.dirname(node_types_file)
+ if not os.path.exists(ntf_dir):
+ os.makedirs(ntf_dir)
+
+ self._save_nodes(nodes_file)
+ self._save_node_types(node_types_file)
+
+ def _save_nodes(self, nodes_file_name):
+ raise NotImplementedError
+
+ def _save_node_types(self, node_types_file_name):
+ node_types_cols = ['node_type_id'] + [col for col in self._node_types_columns if col != 'node_type_id']
+ with open(node_types_file_name, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=' ')
+ csvw.writerow(node_types_cols)
+ for node_type in self._node_types_properties.values():
+ csvw.writerow([node_type.get(cname, 'NULL') for cname in node_types_cols])
+
+ def import_nodes(self, nodes_file_name, node_types_file_name):
+ raise NotImplementedError
+
+ def save_edges(self, edges_file_name=None, edge_types_file_name=None, output_dir='.', src_network=None,
+ trg_network=None, name=None, force_build=True, force_overwrite=False):
+ # Make sure edges exists and are built
+ if len(self._connection_maps) == 0:
+ print("Warning: no edges have been made for this network, skipping saving.")
+ return
+
+ if self._edges_built is False:
+ if force_build:
+ print("Message: building edges")
+ self.__build_edges()
+ else:
+ print("Warning: Edges are not built. Either call build() or use force_build parameter. Skip saving.")
+ return
+
+ network_params = [(s, t, s+'_'+t+'_edges.h5', s+'_'+t+'_edge_types.csv') for s, t in list(self._network_conns)]
+ if src_network is not None:
+ network_params = [p for p in network_params if p[0] == src_network]
+
+ if trg_network is not None:
+ network_params = [p for p in network_params if p[1] == trg_network]
+
+ if len(network_params) == 0:
+ print("Warning: couldn't find connections. Skip saving.")
+ return
+
+ if (edges_file_name or edge_types_file_name) is not None:
+ network_params = [(network_params[0][0], network_params[0][1], edges_file_name, edge_types_file_name)]
+
+ if not os.path.exists(output_dir):
+ os.mkdir(output_dir)
+
+ for p in network_params:
+ if p[3] is not None:
+ self._save_edge_types(os.path.join(output_dir, p[3]), p[0], p[1])
+
+ if p[2] is not None:
+ self._save_edges(os.path.join(output_dir, p[2]), p[0], p[1], name)
+
+ def _save_edge_types(self, edge_types_file_name, src_network, trg_network):
+
+ # Get edge-type properties for connections with matching source/target networks
+ matching_et = [c.edge_type_properties for c in self._connection_maps
+ if c.source_network_name == src_network and c.target_network_name == trg_network]
+
+ # Get edge-type properties that are only relevant for this source-target network pair
+ cols = ['edge_type_id', 'target_query', 'source_query'] # manditory and should come first
+ merged_keys = [k for et in matching_et for k in et.keys() if k not in cols]
+ cols += list(set(merged_keys))
+
+ # Write to csv
+ with open(edge_types_file_name, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=' ')
+ csvw.writerow(cols)
+ for edge_type in matching_et:
+ csvw.writerow([edge_type.get(cname, 'NULL') if edge_type.get(cname, 'NULL') is not None else 'NULL'
+ for cname in cols])
+
+ def _save_edges(self, edges_file_name, src_network, trg_network):
+ raise NotImplementedError
+
+ def _initialize(self):
+ raise NotImplementedError
+
+ def _add_nodes(self, node_tuples):
+ raise NotImplementedError
+
+ def _add_edges(self, edge_tuples, i):
+ raise NotImplementedError
+
+ def _clear(self):
+ raise NotImplementedError
+
+ """
+ def _edges_iter(targets=None, sources=None):
+ raise NotImplementedError
+ """
+
+"""
+class ConnectionTable(object):
+ def __init__(self):
+ self.__targets = {}
+ self.__sources = {}
+ self.__connections = []
+
+ def add(self, source_network, target_network, connection_map):
+ # TODO: If the source/target are network objects we can get the network_name
+ assert(isinstance(source_network, basestring))
+ assert(isinstance(target_network, basestring))
+ assert(isinstance(connection_map, ConnectionMap))
+
+ if source_network not in self.__sources:
+ self.__sources[source_network] = []
+ if target_network not in self.__targets:
+ self.__targets[target_network] = []
+
+ cm_index = len(self.__connections)
+ self.__connections.append(connection_map)
+ self.__sources[source_network].append(cm_index)
+ self.__targets[target_network].append(cm_index)
+
+ def get(self, source_network=None, target_network=None):
+ # TODO: Add warning if source/target network is not found
+ cm_indicies = set(range(len(self.__connections)))
+ if source_network is not None:
+ cm_indicies &= set(self.__sources.get(source_network, []))
+
+ if target_network is not None:
+ cm_indicies &= set(self.__targets.get(target_network, []))
+
+ return self.__connections[cm_indicies]
+"""
+
+
+
+
+
diff --git a/bmtk-vb/bmtk/builder/network.pyc b/bmtk-vb/bmtk/builder/network.pyc
new file mode 100644
index 0000000..934a435
Binary files /dev/null and b/bmtk-vb/bmtk/builder/network.pyc differ
diff --git a/bmtk-vb/bmtk/builder/networks/__init__.py b/bmtk-vb/bmtk/builder/networks/__init__.py
new file mode 100644
index 0000000..45b0922
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/networks/__init__.py
@@ -0,0 +1,30 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .dm_network import DenseNetwork
+NetworkBuilder = dm_network.DenseNetwork
+
+try:
+ # If mpi4py is installed let users access MPIBuilder for parallel building networks
+ from .mpi_network import MPINetwork, MPINetwork as MPIBuilder
+except ImportError as err:
+ pass
diff --git a/bmtk-vb/bmtk/builder/networks/__init__.pyc b/bmtk-vb/bmtk/builder/networks/__init__.pyc
new file mode 100644
index 0000000..4fad6a5
Binary files /dev/null and b/bmtk-vb/bmtk/builder/networks/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/builder/networks/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/builder/networks/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..9b5d19f
Binary files /dev/null and b/bmtk-vb/bmtk/builder/networks/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/networks/__pycache__/dm_network.cpython-37.pyc b/bmtk-vb/bmtk/builder/networks/__pycache__/dm_network.cpython-37.pyc
new file mode 100644
index 0000000..6f50aa6
Binary files /dev/null and b/bmtk-vb/bmtk/builder/networks/__pycache__/dm_network.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/networks/__pycache__/mpi_network.cpython-37.pyc b/bmtk-vb/bmtk/builder/networks/__pycache__/mpi_network.cpython-37.pyc
new file mode 100644
index 0000000..68baa18
Binary files /dev/null and b/bmtk-vb/bmtk/builder/networks/__pycache__/mpi_network.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/builder/networks/dm_network.py b/bmtk-vb/bmtk/builder/networks/dm_network.py
new file mode 100644
index 0000000..b6547dc
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/networks/dm_network.py
@@ -0,0 +1,487 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import numpy as np
+import h5py
+import six
+import csv
+
+from ..network import Network
+from bmtk.builder.node import Node
+from bmtk.builder.edge import Edge
+from bmtk.utils import sonata
+
+
+class DenseNetwork(Network):
+ def __init__(self, name, **network_props):
+ super(DenseNetwork, self).__init__(name, **network_props or {})
+
+ self.__edges_types = {}
+ self.__src_mapping = {}
+
+ self.__networks = {}
+ self.__node_count = 0
+ self._nodes = []
+
+ self.__edges_tables = []
+ self._target_networks = {}
+
+ def _initialize(self):
+ self.__id_map = []
+ self.__lookup = []
+
+ def _add_nodes(self, nodes):
+ self._nodes.extend(nodes)
+ self._nnodes = len(self._nodes)
+
+ """
+ id_label = 'node_id' if 'node_id' in nodes[0].keys() else 'id'
+
+ start_idx = len(self.__id_map) #
+ self.__id_map += [n[id_label] for n in nodes]
+ self.__nodes += [(interal_id, nodes[node_idx])
+ for node_idx, interal_id in enumerate(xrange(start_idx, len(self.__id_map)))]
+
+ assert(len(self.__id_map) == len(self.__nodes))
+ """
+
+ def edges_table(self):
+ return self.__edges_tables
+
+ def _save_nodes(self, nodes_file_name):
+ if not self._nodes_built:
+ self._build_nodes()
+
+ # save the node_types file
+ # TODO: how do we add attributes to the h5
+ group_indx = 0
+ groups_lookup = {}
+ group_indicies = {}
+ group_props = {}
+ for ns in self._node_sets:
+ if ns.params_hash in groups_lookup:
+ continue
+ else:
+ groups_lookup[ns.params_hash] = group_indx
+ group_indicies[group_indx] = 0
+ group_props[group_indx] = {k: [] for k in ns.params_keys if k != 'node_id'}
+ group_indx += 1
+
+ node_gid_table = np.zeros(self._nnodes) # todo: set dtypes
+ node_type_id_table = np.zeros(self._nnodes)
+ node_group_table = np.zeros(self._nnodes)
+ node_group_index_tables = np.zeros(self._nnodes)
+
+ for i, node in enumerate(self.nodes()):
+ node_gid_table[i] = node.node_id
+ node_type_id_table[i] = node.node_type_id
+ group_id = groups_lookup[node.params_hash]
+ node_group_table[i] = group_id
+ node_group_index_tables[i] = group_indicies[group_id]
+ group_indicies[group_id] += 1
+
+ group_dict = group_props[group_id]
+ for key, prop_ds in group_dict.items():
+ prop_ds.append(node.params[key])
+
+ # TODO: open in append mode
+ with h5py.File(nodes_file_name, 'w') as hf:
+ # Add magic and version attribute
+ add_hdf5_attrs(hf)
+
+ pop_grp = hf.create_group('/nodes/{}'.format(self.name))
+ pop_grp.create_dataset('node_id', data=node_gid_table, dtype='uint64')
+ pop_grp.create_dataset('node_type_id', data=node_type_id_table, dtype='uint64')
+ pop_grp.create_dataset('node_group_id', data=node_group_table, dtype='uint32')
+ pop_grp.create_dataset('node_group_index', data=node_group_index_tables, dtype='uint64')
+
+ for grp_id, props in group_props.items():
+ model_grp = pop_grp.create_group('{}'.format(grp_id))
+
+ for key, dataset in props.items():
+ # ds_path = 'nodes/{}/{}'.format(grp_id, key)
+ try:
+ model_grp.create_dataset(key, data=dataset)
+ except TypeError:
+ str_list = [str(d) for d in dataset]
+ hf.create_dataset(key, data=str_list)
+
+ def nodes_iter(self, node_ids=None):
+ if node_ids is not None:
+ return [n for n in self._nodes if n.node_id in node_ids]
+ else:
+ return self._nodes
+
+ def _process_nodepool(self, nodepool):
+ return nodepool
+
+ def import_nodes(self, nodes_file_name, node_types_file_name, population=None):
+ sonata_file = sonata.File(data_files=nodes_file_name, data_type_files=node_types_file_name)
+ if sonata_file.nodes is None:
+ raise Exception('nodes file {} does not have any nodes.'.format(nodes_file_name))
+
+ populations = sonata_file.nodes.populations
+ if len(populations) == 1:
+ node_pop = populations[0]
+ elif population is None:
+ raise Exception('The nodes file {} contains multiple populations.'.format(nodes_file_name) +
+ 'Please specify population parameter.')
+ else:
+ for pop in populations:
+ if pop.name == population:
+ node_pop = pop
+ break
+ else:
+ raise Exception('Nodes file {} does not contain population {}.'.format(nodes_file_name, population))
+
+ # print node_pop.node_types_table
+ for node_type_props in node_pop.node_types_table:
+ self._add_node_type(node_type_props)
+
+ for node in node_pop:
+ self._node_id_gen.remove_id(node.node_id)
+ self._nodes.append(Node(node.node_id, node.group_props, node.node_type_properties))
+
+ def _add_edges(self, connection_map, i):
+ syn_table = self.EdgeTable(connection_map)
+ connections = connection_map.connection_itr()
+ for con in connections:
+ if con[2] is not None:
+ syn_table[con[0], con[1]] = con[2]
+
+ target_net = connection_map.target_nodes
+ self._target_networks[target_net.network_name] = target_net.network
+
+ nsyns = np.sum(syn_table.nsyn_table)
+ self._nedges += int(nsyns)
+ edge_table = {'syn_table': syn_table,
+ 'nsyns': nsyns,
+ 'edge_types': connection_map.edge_type_properties,
+ 'edge_type_id': connection_map.edge_type_properties['edge_type_id'],
+ 'source_network': connection_map.source_nodes.network_name,
+ 'target_network': connection_map.target_nodes.network_name,
+ 'params': {},
+ 'params_dtypes': {},
+ 'source_query': connection_map.source_nodes.filter_str,
+ 'target_query': connection_map.target_nodes.filter_str}
+
+
+ for param in connection_map.params:
+ rule = param.rule
+ param_names = param.names
+ edge_table['params_dtypes'].update(param.dtypes)
+ if isinstance(param_names, list) or isinstance(param_names, tuple):
+ tmp_tables = [self.PropertyTable(nsyns) for _ in range(len(param_names))]
+ for source in connection_map.source_nodes:
+ src_node_id = source.node_id
+ for target in connection_map.target_nodes:
+ trg_node_id = target.node_id # TODO: pull this out and put in it's own list
+ for _ in range(syn_table[src_node_id, trg_node_id]):
+ pvals = rule(source, target)
+ for i in range(len(param_names)):
+ tmp_tables[i][src_node_id, trg_node_id] = pvals[i]
+
+ for i, name in enumerate(param_names):
+ # TODO: I think a copy constructor might get called, move this out.
+ edge_table['params'][name] = tmp_tables[i]
+
+ else:
+ pt = self.PropertyTable(np.sum(nsyns))
+ for source in connection_map.source_nodes:
+ src_node_id = source.node_id
+ for target in connection_map.target_nodes:
+ trg_node_id = target.node_id # TODO: pull this out and put in it's own list
+ #print('{}, {}: {}'.format(src_node_id, trg_node_id, edge_table[src_node_id, trg_node_id]))
+ for _ in range(syn_table[src_node_id, trg_node_id]):
+ pt[src_node_id, trg_node_id] = rule(source, target)
+ edge_table['params'][param_names] = pt
+
+ self.__edges_tables.append(edge_table)
+
+ def _save_edges(self, edges_file_name, src_network, trg_network, name=None):
+ groups = {}
+ group_dtypes = {} # TODO: this should be stored in PropertyTable
+ grp_id_itr = 0
+ groups_lookup = {}
+ total_syns = 0
+
+ matching_edge_tables = [et for et in self.__edges_tables
+ if et['source_network'] == src_network and et['target_network'] == trg_network]
+
+ for ets in matching_edge_tables:
+ params_hash = str(ets['params'].keys())
+ group_id = groups_lookup.get(params_hash, None)
+ if group_id is None:
+ group_id = grp_id_itr
+ groups_lookup[params_hash] = group_id
+ grp_id_itr += 1
+
+ ets['group_id'] = group_id
+ groups[group_id] = {}
+ group_dtypes[group_id] = ets['params_dtypes']
+ for param_name in ets['params'].keys():
+ groups[group_id][param_name] = []
+
+ total_syns += int(ets['nsyns'])
+
+ group_index_itrs = [0 for _ in range(grp_id_itr)]
+ trg_gids = np.zeros(total_syns) # set dtype to uint64
+ src_gids = np.zeros(total_syns)
+ edge_groups = np.zeros(total_syns) # dtype uint16 or uint8
+ edge_group_index = np.zeros(total_syns) # uint32
+ edge_type_ids = np.zeros(total_syns) # uint32
+
+ # TODO: Another potential issue if node-ids don't start with 0
+ index_ptrs = np.zeros(len(self._target_networks[trg_network].nodes()) + 1)
+ #index_ptrs = np.zeros(len(self._nodes)+1) # TODO: issue when target nodes come from another network
+ index_ptr_itr = 0
+
+ gid_indx = 0
+ for trg_node in self._target_networks[trg_network].nodes():
+ index_ptrs[index_ptr_itr] = gid_indx
+ index_ptr_itr += 1
+
+ for ets in matching_edge_tables:
+ edge_group_id = ets['group_id']
+ group_table = groups[edge_group_id]
+
+ syn_table = ets['syn_table']
+ if syn_table.has_target(trg_node.node_id):
+ if ets['params']:
+ for src_id, nsyns in syn_table.trg_itr(trg_node.node_id):
+ # Add on to the edges index
+ indx_end = gid_indx+nsyns
+ while gid_indx < indx_end:
+ trg_gids[gid_indx] = trg_node.node_id
+ src_gids[gid_indx] = src_id
+ edge_type_ids[gid_indx] = ets['edge_type_id']
+ edge_groups[gid_indx] = edge_group_id
+ edge_group_index[gid_indx] = group_index_itrs[edge_group_id]
+ group_index_itrs[edge_group_id] += 1
+ gid_indx += 1
+
+ for param_name, param_table in ets['params'].items():
+ param_vals = group_table[param_name]
+ for val in param_table.itr_vals(src_id, trg_node.node_id):
+ param_vals.append(val)
+
+ else:
+ # If no properties just print nsyns table.
+ if 'nsyns' not in group_table:
+ group_table['nsyns'] = []
+ group_dtypes[edge_group_id]['nsyns'] = 'uint16'
+ for src_id, nsyns in syn_table.trg_itr(trg_node.node_id):
+ trg_gids[gid_indx] = trg_node.node_id
+ src_gids[gid_indx] = src_id
+ edge_type_ids[gid_indx] = ets['edge_type_id']
+ edge_groups[gid_indx] = edge_group_id
+ edge_group_index[gid_indx] = group_index_itrs[edge_group_id]
+ # group_dtypes
+ group_index_itrs[edge_group_id] += 1
+ gid_indx += 1
+
+ group_table['nsyns'].append(nsyns)
+
+ trg_gids = trg_gids[:gid_indx]
+ src_gids = src_gids[:gid_indx]
+ edge_groups = edge_groups[:gid_indx]
+ edge_group_index = edge_group_index[:gid_indx]
+ edge_type_ids = edge_type_ids[:gid_indx]
+
+ pop_name = '{}_to_{}'.format(src_network, trg_network) if name is None else name
+
+ index_ptrs[index_ptr_itr] = gid_indx
+ with h5py.File(edges_file_name, 'w') as hf:
+ add_hdf5_attrs(hf)
+ pop_grp = hf.create_group('/edges/{}'.format(pop_name))
+ pop_grp.create_dataset('target_node_id', data=trg_gids, dtype='uint64')
+ pop_grp['target_node_id'].attrs['node_population'] = trg_network
+ pop_grp.create_dataset('source_node_id', data=src_gids, dtype='uint64')
+ pop_grp['source_node_id'].attrs['node_population'] = src_network
+
+ pop_grp.create_dataset('edge_group_id', data=edge_groups, dtype='uint16')
+ pop_grp.create_dataset('edge_group_index', data=edge_group_index, dtype='uint32')
+ pop_grp.create_dataset('edge_type_id', data=edge_type_ids, dtype='uint32')
+ # pop_grp.create_dataset('edges/index_pointer', data=index_ptrs, dtype='uint32')
+
+ for group_id, params_dict in groups.items():
+ model_grp = pop_grp.create_group(str(group_id))
+ for params_key, params_vals in params_dict.items():
+ #group_path = 'edges/{}/{}'.format(group_id, params_key)
+ dtype = group_dtypes[group_id][params_key]
+ if dtype is not None:
+ model_grp.create_dataset(params_key, data=list(params_vals), dtype=dtype)
+ else:
+ model_grp.create_dataset(params_key, data=list(params_vals))
+
+ self._create_index(pop_grp['target_node_id'], pop_grp, index_type='target')
+ self._create_index(pop_grp['source_node_id'], pop_grp, index_type='source')
+
+ def _create_index(self, node_ids_ds, output_grp, index_type='target'):
+ if index_type == 'target':
+ edge_nodes = np.array(node_ids_ds, dtype=np.int64)
+ output_grp = output_grp.create_group('indicies/target_to_source')
+ elif index_type == 'source':
+ edge_nodes = np.array(node_ids_ds, dtype=np.int64)
+ output_grp = output_grp.create_group('indicies/source_to_target')
+
+ edge_nodes = np.append(edge_nodes, [-1])
+ n_targets = np.max(edge_nodes)
+ ranges_list = [[] for _ in six.moves.range(n_targets + 1)]
+
+ n_ranges = 0
+ begin_index = 0
+ cur_trg = edge_nodes[begin_index]
+ for end_index, trg_gid in enumerate(edge_nodes):
+ if cur_trg != trg_gid:
+ ranges_list[cur_trg].append((begin_index, end_index))
+ cur_trg = int(trg_gid)
+ begin_index = end_index
+ n_ranges += 1
+
+ node_id_to_range = np.zeros((n_targets + 1, 2))
+ range_to_edge_id = np.zeros((n_ranges, 2))
+ range_index = 0
+ for node_index, trg_ranges in enumerate(ranges_list):
+ if len(trg_ranges) > 0:
+ node_id_to_range[node_index, 0] = range_index
+ for r in trg_ranges:
+ range_to_edge_id[range_index, :] = r
+ range_index += 1
+ node_id_to_range[node_index, 1] = range_index
+
+ output_grp.create_dataset('range_to_edge_id', data=range_to_edge_id, dtype='uint64')
+ output_grp.create_dataset('node_id_to_range', data=node_id_to_range, dtype='uint64')
+
+ def _clear(self):
+ self._nedges = 0
+ self._nnodes = 0
+
+ def edges_iter(self, trg_gids, src_network=None, trg_network=None):
+ matching_edge_tables = self.__edges_tables
+ if trg_network is not None:
+ matching_edge_tables = [et for et in self.__edges_tables if et['target_network'] == trg_network]
+
+ if src_network is not None:
+ matching_edge_tables = [et for et in matching_edge_tables if et['source_network'] == src_network]
+
+ for trg_gid in trg_gids:
+ for ets in matching_edge_tables:
+ syn_table = ets['syn_table']
+ if syn_table.has_target(trg_gid):
+ for src_id, nsyns in syn_table.trg_itr(trg_gid):
+ if ets['params']:
+ synapses = [{} for _ in range(nsyns)]
+ for param_name, param_table in ets['params'].items():
+ for i, val in enumerate(param_table[src_id, trg_gid]):
+ synapses[i][param_name] = val
+ for syn_prop in synapses:
+ yield Edge(src_gid=src_id, trg_gid=trg_gid, edge_type_props=ets['edge_types'],
+ syn_props=syn_prop)
+ else:
+ yield Edge(src_gid=src_id, trg_gid=trg_gid, edge_type_props=ets['edge_types'],
+ syn_props={'nsyns': nsyns})
+
+ @property
+ def nnodes(self):
+ if not self.nodes_built:
+ return 0
+ return self._nnodes
+
+ @property
+ def nedges(self):
+ return self._nedges
+
+ class EdgeTable(object):
+ def __init__(self, connection_map):
+ # TODO: save column and row lengths
+ # Create maps between source_node gids and their row in the matrix.
+ self.__idx2src = [n.node_id for n in connection_map.source_nodes]
+ self.__src2idx = {node_id: i for i, node_id in enumerate(self.__idx2src)}
+
+ # Create maps betwee target_node gids and their column in the matrix
+ self.__idx2trg = [n.node_id for n in connection_map.target_nodes]
+ self.__trg2idx = {node_id: i for i, node_id in enumerate(self.__idx2trg)}
+
+ self._nsyn_table = np.zeros((len(self.__idx2src), len(self.__idx2trg)), dtype=np.uint8)
+
+ def __getitem__(self, item):
+ # TODO: make sure matrix is column oriented, or swithc trg and srcs.
+ indexed_pair = (self.__src2idx[item[0]], self.__trg2idx[item[1]])
+ return self._nsyn_table[indexed_pair]
+
+ def __setitem__(self, key, value):
+ assert(len(key) == 2)
+ indexed_pair = (self.__src2idx[key[0]], self.__trg2idx[key[1]])
+ self._nsyn_table[indexed_pair] = value
+
+ def has_target(self, node_id):
+ return node_id in self.__trg2idx
+
+ @property
+ def nsyn_table(self):
+ return self._nsyn_table
+
+ @property
+ def target_ids(self):
+ return self.__idx2trg
+
+ @property
+ def source_ids(self):
+ return self.__idx2src
+
+ def trg_itr(self, trg_id):
+ trg_i = self.__trg2idx[trg_id]
+ for src_j, src_id in enumerate(self.__idx2src):
+ nsyns = self._nsyn_table[src_j, trg_i]
+ if nsyns:
+ yield src_id, nsyns
+
+ class PropertyTable(object):
+ # TODO: add support for strings
+ def __init__(self, nvalues):
+ self._prop_array = np.zeros(nvalues)
+ # self._prop_table = np.zeros((nvalues, 1)) # TODO: set dtype
+ self._index = np.zeros((nvalues, 2), dtype=np.uint32)
+ self._itr_index = 0
+
+ def itr_vals(self, src_id, trg_id):
+ indicies = np.where((self._index[:, 0] == src_id) & (self._index[:, 1] == trg_id))
+ for val in self._prop_array[indicies]:
+ yield val
+
+ def __setitem__(self, key, value):
+ self._index[self._itr_index, 0] = key[0] # src_node_id
+ self._index[self._itr_index, 1] = key[1] # trg_node_id
+ self._prop_array[self._itr_index] = value
+ self._itr_index += 1
+
+ def __getitem__(self, item):
+ indicies = np.where((self._index[:, 0] == item[0]) & (self._index[:, 1] == item[1]))
+ return self._prop_array[indicies]
+
+
+def add_hdf5_attrs(hdf5_handle):
+ # TODO: move this as a utility function
+ hdf5_handle['/'].attrs['magic'] = np.uint32(0x0A7A)
+ hdf5_handle['/'].attrs['version'] = [np.uint32(0), np.uint32(1)]
diff --git a/bmtk-vb/bmtk/builder/networks/dm_network.pyc b/bmtk-vb/bmtk/builder/networks/dm_network.pyc
new file mode 100644
index 0000000..e8c42c7
Binary files /dev/null and b/bmtk-vb/bmtk/builder/networks/dm_network.pyc differ
diff --git a/bmtk-vb/bmtk/builder/networks/input_network.py b/bmtk-vb/bmtk/builder/networks/input_network.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/networks/input_network.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/bmtk/builder/networks/mpi_network.py b/bmtk-vb/bmtk/builder/networks/mpi_network.py
new file mode 100644
index 0000000..aa6a51e
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/networks/mpi_network.py
@@ -0,0 +1,171 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .dm_network import DenseNetwork
+from mpi4py import MPI
+from heapq import heappush, heappop
+import h5py
+
+comm = MPI.COMM_WORLD
+rank = comm.Get_rank()
+nprocs = comm.Get_size()
+
+
+class MPINetwork(DenseNetwork):
+ def __init__(self, name, **network_props):
+ super(MPINetwork, self).__init__(name, **network_props or {})
+ self._edge_assignment = None
+
+ def _add_edges(self, connection_map, i):
+ if self._assign_to_rank(i):
+ super(MPINetwork, self)._add_edges(connection_map, i)
+
+ def save_nodes(self, nodes_file_name, node_types_file_name):
+ if rank == 0:
+ super(MPINetwork, self).save_nodes(nodes_file_name, node_types_file_name)
+ comm.Barrier()
+
+ """
+ def save_edges(self, edges_file_name=None, edge_types_file_name=None, output_dir='.', src_network=None,
+ trg_network=None, force_build=True, force_overwrite=False):
+
+ if rank == 0:
+ # print rank, len(self.edges_table())
+ super(MPINetwork, self).save_edges(edges_file_name, edge_types_file_name, output_dir, src_network,
+ trg_network, force_build, force_overwrite)
+
+ comm.Barrier()
+ """
+
+ def edges_iter(self, trg_gids, src_network=None, trg_network=None):
+ for trg_gid in trg_gids:
+ edges = list(super(MPINetwork, self).edges_iter([trg_gid], src_network, trg_network))
+ collected_edges = comm.gather(edges, root=0)
+ if rank == 0:
+ for edge_list in collected_edges:
+ for edge in edge_list:
+ # print 'b'
+ yield edge
+ else:
+ yield None
+
+ comm.Barrier()
+
+ def _save_edges(self, edges_file_name, src_network, trg_network):
+ target_gids = [n.node_id for n in self._target_networks[trg_network].nodes()]
+ # TODO: make sure target_gids are sorted
+
+ trg_gids_ds = []
+ src_gids_ds = []
+ edge_type_id_ds = []
+ edge_group_ds = []
+ edge_group_index_ds = []
+
+ eg_collection = {}
+ eg_ids = 0
+ eg_lookup = {}
+ eg_table = {}
+ eg_indices = {}
+ for cm in self.get_connections():
+ col_key = cm.properties_keys()
+ if col_key in eg_collection:
+ group_id = eg_collection[col_key]
+ else:
+ group_id = eg_ids
+ eg_collection[col_key] = group_id
+ eg_ids += 1
+ eg_lookup[cm.edge_type_id] = group_id
+ eg_indices[group_id] = 0
+ eg_table[group_id] = {k: [] for k in cm.property_names}
+
+ for e in self.edges_iter(target_gids, src_network=src_network, trg_network=trg_network):
+ if rank == 0:
+ trg_gids_ds.append(e.target_gid)
+ src_gids_ds.append(e.source_gid)
+ edge_type_id_ds.append(e.edge_type_id)
+
+ group_id = eg_lookup[e.edge_type_id]
+ edge_group_ds.append(group_id)
+ group_id_index = eg_indices[group_id]
+ edge_group_index_ds.append(group_id_index)
+ eg_indices[group_id] += 1
+
+ for k, v in e.synaptic_properties.items():
+ eg_table[group_id][k].append(v)
+
+ if rank == 0:
+ # Create index from target_gids dataset
+ index_pointer_ds = []
+ cur_gid = 0
+ index = 0
+ while index < len(trg_gids_ds):
+ if trg_gids_ds[index] == cur_gid:
+ index += 1
+ else:
+ cur_gid += 1
+ index_pointer_ds.append(index)
+ index_pointer_ds.append(len(trg_gids_ds)+1)
+
+
+ with h5py.File(edges_file_name, 'w') as hf:
+ hf.create_dataset('edges/target_gid', data=trg_gids_ds, dtype='uint64')
+ hf['edges/target_gid'].attrs['network'] = trg_network
+ hf.create_dataset('edges/source_gid', data=src_gids_ds, dtype='uint64')
+ hf['edges/source_gid'].attrs['network'] = src_network
+
+ hf.create_dataset('edges/edge_group', data=edge_group_ds, dtype='uint16')
+ hf.create_dataset('edges/edge_group_index', data=edge_group_index_ds, dtype='uint32')
+ hf.create_dataset('edges/edge_type_id', data=edge_type_id_ds, dtype='uint32')
+ hf.create_dataset('edges/index_pointer', data=index_pointer_ds, dtype='uint32')
+
+ for gid, group in eg_table.items():
+ for col_key, col_ds in group.items():
+ ds_loc = 'edges/{}/{}'.format(gid, col_key)
+ hf.create_dataset(ds_loc, data=col_ds)
+
+ comm.Barrier()
+
+ def _assign_to_rank(self, i):
+ if self._edge_assignment is None:
+ self._build_rank_assignments()
+
+ return rank == self._edge_assignment[i]
+
+ def _build_rank_assignments(self):
+ """Builds the _edge_assignment array.
+
+ Division of connections is decided by the maximum possible edges (i.e. number of source and target nodes). In
+ the end assignment should balance the connection matrix sizes need by each rank.
+ """
+ rank_heap = [] # A heap of tuples (weight, rank #)
+ for a in range(nprocs):
+ heappush(rank_heap, (0, a))
+
+ # find the rank with the lowest weight, assign that rank to build the i'th connection matrix, update the rank's
+ # weight and re-add to the heap.
+ # TODO: sort connection_maps in descending order to get better balance
+ self._edge_assignment = []
+ for cm in self.get_connections():
+ r = heappop(rank_heap)
+ self._edge_assignment.append(r[1])
+ heappush(rank_heap, (r[0] + cm.max_connections(), r[1]))
+
diff --git a/bmtk-vb/bmtk/builder/networks/mpi_network.pyc b/bmtk-vb/bmtk/builder/networks/mpi_network.pyc
new file mode 100644
index 0000000..cdd6730
Binary files /dev/null and b/bmtk-vb/bmtk/builder/networks/mpi_network.pyc differ
diff --git a/bmtk-vb/bmtk/builder/networks/nxnetwork.py b/bmtk-vb/bmtk/builder/networks/nxnetwork.py
new file mode 100644
index 0000000..3424fd6
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/networks/nxnetwork.py
@@ -0,0 +1,80 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import networkx as nx
+
+from bmtk.builder.network import Network
+from bmtk.builder.node import Node
+
+
+class NxNetwork(Network):
+ def __init__(self, name, **network_props):
+ super(NxNetwork, self).__init__(name, **network_props or {})
+
+ self.net = nx.MultiDiGraph()
+ self.__nodes = []
+
+
+ def _initialize(self):
+ self.net.clear()
+
+ def _add_nodes(self, nodes):
+ self.__nodes += nodes
+ self.net.add_nodes_from(nodes)
+
+ def _add_edges(self, edge, connections):
+ for src, trg, nsyns in connections:
+ self.net.add_edge(src, trg, nsyns=nsyns, edge_type_id=edge.edge_type_id)
+
+
+ def _clear(self):
+ self.net.clear()
+
+ def _nodes_iter(self, nids=None):
+ if nids is not None:
+ return ((nid, d)
+ for nid, d in self.__nodes
+ if nid in nids )
+ else:
+ return self.__nodes
+ #return self.net.nodes_iter(data=True)
+
+ def _edges_iter(self, nids=None, rank=0):
+ if nids == None or len(nids) == 0:
+ for e in self.net.edges(data=True):
+ yield (e[0], e[1], e[2]['nsyns'], e[2]['edge_type_id'])
+ #return self.net.edges(data=True)
+ elif rank == 0:
+ for e in self.net.out_edges(nids, data=True):
+ yield (e[0], e[1], e[2]['nsyns'], e[2]['edge_type_id'])
+ else:
+ for e in self.net.in_edges(nids, data=True):
+ yield (e[0], e[1], e[2]['nsyns'], e[2]['edge_type_id'])
+ #return self.net.in_edges(nids, data=True)
+
+ @property
+ def nnodes(self):
+ return nx.number_of_nodes(self.net)
+
+ @property
+ def nedges(self):
+ return nx.number_of_edges(self.net)
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/builder/networks/sparse_network.py b/bmtk-vb/bmtk/builder/networks/sparse_network.py
new file mode 100644
index 0000000..035aaeb
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/networks/sparse_network.py
@@ -0,0 +1,26 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from bmtk.builder.network import Network
+
+class SparseNetwork(Network):
+ pass
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/builder/node.py b/bmtk-vb/bmtk/builder/node.py
new file mode 100644
index 0000000..6d1b295
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/node.py
@@ -0,0 +1,76 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+class Node(dict):
+ def __init__(self, node_id, node_params, node_type_properties, params_hash=-1):
+ super(Node, self).__init__({})
+
+ self._node_params = node_params
+ self._node_params['node_id'] = node_id
+ self._node_type_properties = node_type_properties
+ self._params_hash = params_hash
+ self._node_id = node_id
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def node_type_id(self):
+ return self._node_type_properties['node_type_id']
+
+ @property
+ def params(self):
+ return self._node_params
+
+ @property
+ def node_type_properties(self):
+ return self._node_type_properties
+
+ @property
+ def params_hash(self):
+ return self._params_hash
+
+ def get(self, key, default=None):
+ if key in self._node_params:
+ return self._node_params[key]
+ elif key in self._node_type_properties:
+ return self._node_type_properties[key]
+ else:
+ return default
+
+ def __contains__(self, item):
+ return item in self._node_type_properties or item in self._node_params
+
+ def __getitem__(self, item):
+ if item in self._node_params:
+ return self._node_params[item]
+ else:
+ return self._node_type_properties[item]
+
+ def __hash__(self):
+ return hash(self.node_id)
+
+ def __repr__(self):
+ tmp_dict = dict(self._node_type_properties)
+ tmp_dict.update(self._node_params)
+ return tmp_dict.__repr__()
diff --git a/bmtk-vb/bmtk/builder/node.pyc b/bmtk-vb/bmtk/builder/node.pyc
new file mode 100644
index 0000000..fda7bcb
Binary files /dev/null and b/bmtk-vb/bmtk/builder/node.pyc differ
diff --git a/bmtk-vb/bmtk/builder/node_pool.py b/bmtk-vb/bmtk/builder/node_pool.py
new file mode 100644
index 0000000..2e1bb18
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/node_pool.py
@@ -0,0 +1,106 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from ast import literal_eval
+from six import string_types
+
+
+class NodePool(object):
+ """Stores a collection of nodes based off some query of the network.
+
+ Returns the results of a query of nodes from a network using the nodes() method. Nodes are still generated and
+ saved by the network, this just stores the query information and provides iterator methods for accessing different
+ nodes.
+
+ TODO:
+ * Implement a collection-set algebra including | and not operators. ie.
+ nodes = net.nodes(type=1) | net.nodes(type=2)
+ * Implement operators on properties
+ nodes = net.nodes(val) > 100
+ nodes = 100 in net.nodes(val)
+ """
+
+ def __init__(self, network, **properties):
+ self.__network = network
+ self.__properties = properties
+ self.__filter_str = None
+
+ def __len__(self):
+ return sum(1 for _ in self)
+
+ def __iter__(self):
+ return (n for n in self.__network.nodes_iter() if self.__query_object_properties(n, self.__properties))
+
+ @property
+ def network(self):
+ return self.__network
+
+ @property
+ def network_name(self):
+ return self.__network.name
+
+ @property
+ def filter_str(self):
+ if self.__filter_str is None:
+ if len(self.__properties) == 0:
+ self.__filter_str = '*'
+ else:
+ self.__filter_str = ''
+ for k, v in self.__properties.items():
+ conditional = "{}=='{}'".format(k, v)
+ self.__filter_str += conditional + '&'
+ if self.__filter_str.endswith('&'):
+ self.__filter_str = self.__filter_str[0:-1]
+
+ return self.__filter_str
+
+ @classmethod
+ def from_filter(cls, network, filter_str):
+ assert(isinstance(filter_str, string_types))
+ if len(filter_str) == 0 or filter_str == '*':
+ return cls(network, position=None)
+
+ properties = {}
+ for condtional in filter_str.split('&'):
+ var, val = condtional.split('==')
+ properties[var] = literal_eval(val)
+ return cls(network, position=None, **properties)
+
+ def __query_object_properties(self, obj, props):
+ if props is None:
+ return True
+
+ for k, v in props.items():
+ ov = obj.get(k, None)
+ if ov is None:
+ return False
+
+ if hasattr(v, '__call__'):
+ if not v(ov):
+ return False
+ elif isinstance(v, list):
+ if ov not in v:
+ return False
+ elif ov != v:
+ return False
+
+ return True
diff --git a/bmtk-vb/bmtk/builder/node_pool.pyc b/bmtk-vb/bmtk/builder/node_pool.pyc
new file mode 100644
index 0000000..2448378
Binary files /dev/null and b/bmtk-vb/bmtk/builder/node_pool.pyc differ
diff --git a/bmtk-vb/bmtk/builder/node_set.py b/bmtk-vb/bmtk/builder/node_set.py
new file mode 100644
index 0000000..59c1918
--- /dev/null
+++ b/bmtk-vb/bmtk/builder/node_set.py
@@ -0,0 +1,71 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import six
+from .node import Node
+
+
+class NodeSet(object):
+ def __init__(self, N, node_params, node_type_properties):
+ self.__N = N
+ self.__node_params = node_params
+ self.__node_type_properties = node_type_properties
+
+ assert('node_type_id' in node_type_properties)
+ self.__node_type_id = node_type_properties['node_type_id']
+
+ # Used for determining which node_sets share the same params columns
+ columns = list(self.__node_params.keys())
+ columns.sort()
+ self.__params_col_hash = hash(str(columns))
+
+ @property
+ def N(self):
+ return self.__N
+
+ @property
+ def node_type_id(self):
+ return self.__node_type_id
+
+ @property
+ def params_keys(self):
+ return self.__node_params.keys()
+
+ @property
+ def params_hash(self):
+ return self.__params_col_hash
+
+ def build(self, nid_generator):
+ # fetch existing node ids or create new ones
+ node_ids = self.__node_params.get('node_id', None)
+ if node_ids is None:
+ node_ids = [nid for nid in nid_generator(self.N)]
+
+ # turn node_params from dictionary of lists to a list of dictionaries.
+ ap_flat = [{} for _ in six.moves.range(self.N)]
+ for key, plist in self.__node_params.items():
+ for i, val in enumerate(plist):
+ ap_flat[i][key] = val
+
+ # create node objects
+ return [Node(nid, params, self.__node_type_properties, self.__params_col_hash)
+ for (nid, params) in zip(node_ids, ap_flat)]
diff --git a/bmtk-vb/bmtk/builder/node_set.pyc b/bmtk-vb/bmtk/builder/node_set.pyc
new file mode 100644
index 0000000..9692157
Binary files /dev/null and b/bmtk-vb/bmtk/builder/node_set.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/__init__.py b/bmtk-vb/bmtk/simulator/__init__.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/bmtk/simulator/__init__.pyc b/bmtk-vb/bmtk/simulator/__init__.pyc
new file mode 100644
index 0000000..abfe6a5
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/simulator/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..065f4a1
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/#biosimulator.py# b/bmtk-vb/bmtk/simulator/bionet/#biosimulator.py#
new file mode 100644
index 0000000..773d796
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/#biosimulator.py#
@@ -0,0 +1,361 @@
+
+
+
+
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import time
+from six import string_types
+from neuron import h
+from bmtk.simulator.core.simulator import Simulator
+from bmtk.simulator.bionet.io_tools import io
+from bmtk.simulator.bionet.iclamp import IClamp
+from bmtk.simulator.bionet import modules as mods
+from bmtk.simulator.core.node_sets import NodeSet
+import bmtk.simulator.utils.simulation_reports as reports
+import bmtk.simulator.utils.simulation_inputs as inputs
+from bmtk.utils.io import spike_trains
+
+
+pc = h.ParallelContext() # object to access MPI methods
+
+
+class BioSimulator(Simulator):
+ """Includes methods to run and control the simulation"""
+
+ def __init__(self, network, dt, tstop, v_init, celsius, cao0, nsteps_block, start_from_state=False):
+ self.net = network
+
+ self._start_from_state = start_from_state
+ self.dt = dt
+ self.tstop = tstop
+
+ self._v_init = v_init
+ self._celsius = celsius
+ self._cao0 = cao0
+ self._h = h
+
+ self.tstep = int(round(h.t / h.dt))
+ self.tstep_start_block = self.tstep
+ self.nsteps = int(round(h.tstop/h.dt))
+
+ # make sure the block size isn't small than the total number of steps
+ # TODO: should we send a warning that block-step size is being reset?
+ self._nsteps_block = nsteps_block if self.nsteps > nsteps_block else self.nsteps
+
+ self.__tstep_end_block = 0
+ self.__tstep_start_block = 0
+
+ h.runStopAt = h.tstop
+ h.steps_per_ms = 1/h.dt
+
+ self._set_init_conditions() # call to save state
+ h.cvode.cache_efficient(1)
+
+ h.pysim = self # use this objref to be able to call postFadvance from proc advance in advance.hoc
+ self._iclamps = []
+
+ self._output_dir = 'output'
+ self._log_file = 'output/log.txt'
+
+ self._spikes = {} # for keeping track of different spike times, key of cell gids
+
+ self._cell_variables = [] # location of saved cell variables
+ self._cell_vars_dir = 'output/cellvars'
+
+ self._sim_mods = [] # list of modules.SimulatorMod's
+
+ @property
+ def dt(self):
+ return h.dt
+
+ @dt.setter
+ def dt(self, ms):
+ h.dt = ms
+
+ @property
+ def tstop(self):
+ return h.tstop
+
+ @tstop.setter
+ def tstop(self, ms):
+ h.tstop = ms
+
+ @property
+ def v_init(self):
+ return self._v_init
+
+ @v_init.setter
+ def v_init(self, voltage):
+ self._v_init = voltage
+
+ @property
+ def celsius(self):
+ return self._celsius
+
+ @celsius.setter
+ def celsius(self, c):
+ self._celsius = c
+
+ @property
+ def cao0(self):
+ return self._cao0
+
+ @cao0.setter
+ def cao0(self, cao):
+ self._cao0 = cao
+
+ @property
+ def n_steps(self):
+ return int(round(self.tstop/self.dt))
+
+ @property
+ def cell_variables(self):
+ return self._cell_variables
+
+ @property
+ def cell_var_output(self):
+ return self._cell_vars_dir
+
+ @property
+ def spikes_table(self):
+ return self._spikes
+
+ @property
+ def nsteps_block(self):
+ return self._nsteps_block
+
+ @property
+ def h(self):
+ return self._h
+
+ @property
+ def biophysical_gids(self):
+ return self.net.cell_type_maps('biophysical').keys()
+
+ @property
+ def local_gids(self):
+ # return self.net.get
+ return self.net.local_gids
+
+ def __elapsed_time(self, time_s):
+ if time_s < 120:
+ return '{:.4} seconds'.format(time_s)
+ elif time_s < 7200:
+ mins, secs = divmod(time_s, 60)
+ return '{} minutes, {:.4} seconds'.format(mins, secs)
+ else:
+ mins, secs = divmod(time_s, 60)
+ hours, mins = divmod(mins, 60)
+ return '{} hours, {} minutes and {:.4} seconds'.format(hours, mins, secs)
+
+ def _set_init_conditions(self):
+ """Set up the initial conditions: either read from the h.SaveState or from config["condidtions"]"""
+ pc.set_maxstep(10)
+ h.stdinit()
+ self.tstep = int(round(h.t/h.dt))
+ self.tstep_start_block = self.tstep
+
+ if self._start_from_state:
+ # io.read_state()
+ io.log_info('Read the initial state saved at t_sim: {} ms'.format(h.t))
+ else:
+ h.v_init = self.v_init
+
+ h.celsius = self.celsius
+ h.cao0_ca_ion = self.cao0
+
+ def set_spikes_recording(self):
+ for gid, _ in self.net.get_local_cells().items():
+ tvec = self.h.Vector()
+ gidvec = self.h.Vector()
+ pc.spike_record(gid, tvec, gidvec)
+ self._spikes[gid] = tvec
+
+ def attach_current_clamp(self, amplitude, delay, duration, gids=None):
+ # TODO: verify current clamp works with MPI
+ # TODO: Create appropiate module
+ if gids is None:
+ gids = self.gids['biophysical']
+ if isinstance(gids, int):
+ gids = [gids]
+ elif isinstance(gids, string_types):
+ gids = [int(gids)]
+ elif isinstance(gids, NodeSet):
+ gids = gids.gids()
+
+
+ gids = list(set(self.local_gids) & set(gids))
+ for gid in gids:
+ cell = self.net.get_cell_gid(gid)
+ Ic = IClamp(amplitude, delay, duration)
+ Ic.attach_current(cell)
+ self._iclamps.append(Ic)
+
+ def add_mod(self, module):
+ self._sim_mods.append(module)
+
+ def run(self):
+ """Run the simulation:
+ if beginning from a blank state, then will use h.run(),
+ if continuing from the saved state, then will use h.continuerun()
+ """
+ for mod in self._sim_mods:
+ mod.initialize(self)
+
+ self.start_time = h.startsw()
+ s_time = time.time()
+ pc.timeout(0)
+
+ pc.barrier() # wait for all hosts to get to this point
+ io.log_info('Running simulation for {:.3f} ms with the time step {:.3f} ms'.format(self.tstop, self.dt))
+ io.log_info('Starting timestep: {} at t_sim: {:.3f} ms'.format(self.tstep, h.t))
+ io.log_info('Block save every {} steps'.format(self.nsteps_block))
+
+ if self._start_from_state:
+ h.continuerun(h.tstop)
+ else:
+ h.run(h.tstop) # <- runs simuation: works in parallel
+
+ pc.barrier()
+
+ for mod in self._sim_mods:
+ mod.finalize(self)
+ pc.barrier()
+
+ end_time = time.time()
+
+ sim_time = self.__elapsed_time(end_time - s_time)
+ io.log_info('Simulation completed in {} '.format(sim_time))
+
+ def report_load_balance(self):
+ comptime = pc.step_time()
+ avgcomp = pc.allreduce(comptime, 1)/pc.nhost()
+ maxcomp = pc.allreduce(comptime, 2)
+ io.log_info('Maximum compute time is {} seconds.'.format(maxcomp))
+ io.log_info('Approximate exchange time is {} seconds.'.format(comptime - maxcomp))
+ if maxcomp != 0.0:
+ io.log_info('Load balance is {}.'.format(avgcomp/maxcomp))
+
+ def post_fadvance(self):
+ """
+ Runs after every execution of fadvance (see advance.hoc)
+ Called after every time step to perform computation and save data to memory block or to disk.
+ The initial condition tstep=0 is not being saved
+ """
+ for mod in self._sim_mods:
+ mod.step(self, self.tstep)
+
+ self.tstep += 1
+
+ if (self.tstep % self.nsteps_block == 0) or self.tstep == self.nsteps:
+ io.log_info(' step:{} t_sim:{:.2f} ms'.format(self.tstep, h.t))
+ self.__tstep_end_block = self.tstep
+ time_step_interval = (self.__tstep_start_block, self.__tstep_end_block)
+
+ for mod in self._sim_mods:
+ mod.block(self, time_step_interval)
+
+ self.__tstep_start_block = self.tstep # starting point for the next block
+
+ @classmethod
+ def from_config(cls, config, network, set_recordings=True):
+ # TODO: convert from json to sonata config if necessary
+
+ sim = cls(network=network,
+ dt=config.dt,
+ tstop=config.tstop,
+ v_init=config.v_init,
+ celsius=config.celsius,
+ cao0=config.cao0,
+ nsteps_block=config.block_step)
+
+ network.io.log_info('Building cells.')
+ network.build_nodes()
+
+ network.io.log_info('Building recurrent connections')
+ network.build_recurrent_edges()
+
+ # TODO: Need to create a gid selector
+ for sim_input in inputs.from_config(config):
+ node_set = network.get_node_set(sim_input.node_set)
+ if sim_input.input_type == 'spikes':
+ spikes = spike_trains.SpikesInput.load(name=sim_input.name, module=sim_input.module,
+ input_type=sim_input.input_type, params=sim_input.params)
+ io.log_info('Build virtual cell stimulations for {}'.format(sim_input.name))
+ network.add_spike_trains(spikes, node_set)
+
+ elif sim_input.module == 'IClamp':
+ # TODO: Parse from csv file
+ amplitude = sim_input.params['amp']
+ delay = sim_input.params['delay']
+ duration = sim_input.params['duration']
+ gids = sim_input.params['node_set']
+ sim.attach_current_clamp(amplitude, delay, duration, node_set)
+
+ elif sim_input.module == 'xstim':
+ sim.add_mod(mods.XStimMod(**sim_input.params))
+
+ else:
+ io.log_exception('Can not parse input format {}'.format(sim_input.name))
+
+ if config.calc_ecp:
+ for gid, cell in network.cell_type_maps('biophysical').items():
+ cell.setup_ecp()
+ sim.h.cvode.use_fast_imem(1)
+
+ # Parse the "reports" section of the config and load an associated output module for each report
+ sim_reports = reports.from_config(config)
+ for report in sim_reports:
+ if isinstance(report, reports.SpikesReport):
+ mod = mods.SpikesMod(**report.params)
+
+ elif isinstance(report, reports.SectionReport):
+ mod = mods.SectionReport(**report.params)
+
+ elif isinstance(report, reports.MembraneReport):
+ if report.params['sections'] == 'soma':
+ mod = mods.SomaReport(**report.params)
+
+ else:
+ mod = mods.MembraneReport(**report.params)
+
+ elif isinstance(report, reports.ECPReport):
+ assert config.calc_ecp
+ mod = mods.EcpMod(**report.params)
+ # Set up the ability for ecp on all relevant cells
+ # TODO: According to spec we need to allow a different subset other than only biophysical cells
+ # for gid, cell in network.cell_type_maps('biophysical').items():
+ # cell.setup_ecp()
+
+ elif report.module == 'save_synapses':
+ mod = mods.SaveSynapses(**report.params)
+
+ else:
+ # TODO: Allow users to register customized modules using pymodules
+ io.log_warning('Unrecognized module {}, skipping.'.format(report.module))
+ continue
+
+ sim.add_mod(mod)
+
+ return sim
diff --git a/bmtk-vb/bmtk/simulator/bionet/README.md b/bmtk-vb/bmtk/simulator/bionet/README.md
new file mode 100644
index 0000000..5448a66
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/README.md
@@ -0,0 +1,4 @@
+## BioNet source code
+
+For instruction on how to install BioNet please consult the [BioNet tutorial](https://alleninstitute.github.io/bmtk/bionet.html)
+
diff --git a/bmtk-vb/bmtk/simulator/bionet/__init__.py b/bmtk-vb/bmtk/simulator/bionet/__init__.py
new file mode 100644
index 0000000..7c86d80
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/__init__.py
@@ -0,0 +1,31 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from bmtk.simulator.bionet.pyfunction_cache import synapse_model, synaptic_weight, cell_model
+from bmtk.simulator.bionet.config import Config
+from bmtk.simulator.bionet.bionetwork import BioNetwork
+from bmtk.simulator.bionet.biosimulator import BioSimulator
+#from bmtk.simulator.bionet.io_tools import io
+
+#io = NEURONIOUtils()
+
+
diff --git a/bmtk-vb/bmtk/simulator/bionet/__init__.pyc b/bmtk-vb/bmtk/simulator/bionet/__init__.pyc
new file mode 100644
index 0000000..5c8584a
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..96ea6be
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/biocell.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/biocell.cpython-37.pyc
new file mode 100644
index 0000000..31641c6
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/biocell.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/bionetwork.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/bionetwork.cpython-37.pyc
new file mode 100644
index 0000000..7e43da0
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/bionetwork.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/biosimulator.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/biosimulator.cpython-37.pyc
new file mode 100644
index 0000000..76f720b
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/biosimulator.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/cell.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/cell.cpython-37.pyc
new file mode 100644
index 0000000..a0c0452
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/cell.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/config.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/config.cpython-37.pyc
new file mode 100644
index 0000000..b07f0f2
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/config.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/iclamp.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/iclamp.cpython-37.pyc
new file mode 100644
index 0000000..c145b51
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/iclamp.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/io_tools.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/io_tools.cpython-37.pyc
new file mode 100644
index 0000000..928891f
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/io_tools.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/morphology.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/morphology.cpython-37.pyc
new file mode 100644
index 0000000..aadd109
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/morphology.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/nml_reader.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/nml_reader.cpython-37.pyc
new file mode 100644
index 0000000..6bc9bf4
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/nml_reader.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/nrn.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/nrn.cpython-37.pyc
new file mode 100644
index 0000000..6b637d5
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/nrn.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/pointprocesscell.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/pointprocesscell.cpython-37.pyc
new file mode 100644
index 0000000..c43d61e
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/pointprocesscell.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/pointsomacell.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/pointsomacell.cpython-37.pyc
new file mode 100644
index 0000000..8a53bd2
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/pointsomacell.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/pyfunction_cache.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/pyfunction_cache.cpython-37.pyc
new file mode 100644
index 0000000..0cf316a
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/pyfunction_cache.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/sonata_adaptors.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/sonata_adaptors.cpython-37.pyc
new file mode 100644
index 0000000..ea93924
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/sonata_adaptors.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/utils.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/utils.cpython-37.pyc
new file mode 100644
index 0000000..44a2b80
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/utils.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/__pycache__/virtualcell.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/__pycache__/virtualcell.cpython-37.pyc
new file mode 100644
index 0000000..5d51b9d
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/__pycache__/virtualcell.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/biocell.py b/bmtk-vb/bmtk/simulator/bionet/biocell.py
new file mode 100644
index 0000000..1807042
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/biocell.py
@@ -0,0 +1,323 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+from scipy.stats import norm
+from bmtk.simulator.bionet import utils, nrn
+from bmtk.simulator.bionet.cell import Cell
+import six
+
+from neuron import h
+
+pc = h.ParallelContext() # object to access MPI methods
+
+
+class BioCell(Cell):
+ """Implemntation of a morphologically and biophysically detailed type cell.
+
+ """
+ def __init__(self, node, bionetwork):
+ super(BioCell, self).__init__(node)
+
+ # Set up netcon object that can be used to detect and communicate cell spikes.
+ self.set_spike_detector(bionetwork.spike_threshold)
+
+ self._morph = None
+ self._seg_coords = {}
+
+ # Determine number of segments and store a list of all sections.
+ self._nseg = 0
+ self.set_nseg(bionetwork.dL)
+ self._secs = []
+ self._secs_by_id = []
+ self.set_sec_array()
+
+ self._save_conn = False # bionetwork.save_connection
+ self._synapses = []
+ self._syn_src_net = []
+ self._syn_src_gid = []
+ self._syn_seg_ix = []
+ self._syn_sec_x = []
+ self._edge_type_ids = []
+ self._segments = None
+
+ # potentially used by ecp module
+ self.im_ptr = None
+ self.imVec = None
+
+ # used by xstim module
+ self.ptr2e_extracellular = None
+
+ self.__extracellular_mech = False
+
+ def set_spike_detector(self, spike_threshold):
+ nc = h.NetCon(self.hobj.soma[0](0.5)._ref_v, None, sec=self.hobj.soma[0]) # attach spike detector to cell
+ nc.threshold = spike_threshold
+ pc.cell(self.gid, nc) # associate gid with spike detector
+
+ def set_nseg(self, dL):
+ """Define number of segments in a cell"""
+ self._nseg = 0
+ for sec in self.hobj.all:
+ sec.nseg = 1 + 2 * int(sec.L/(2*dL))
+ self._nseg += sec.nseg # get the total number of segments in the cell
+
+ def calc_seg_coords(self, morph_seg_coords):
+ """Update the segment coordinates (after rotations) for individual cells"""
+ phi_y = self._node.rotation_angle_yaxis
+ phi_z = self._node.rotation_angle_zaxis
+ phi_x = self._node.rotation_angle_xaxis
+
+ # Rotate cell
+ # TODO: Rotations should follow as described in sonata (https://github.com/AllenInstitute/sonata/blob/master/docs/SONATA_DEVELOPER_GUIDE.md).
+ # Need someone with graphics experience to check they are being done correctly (I'm not sure atm).
+ RotX = utils.rotation_matrix([1, 0, 0], phi_x)
+ RotY = utils.rotation_matrix([0, 1, 0], phi_y) # rotate segments around yaxis normal to pia
+ RotZ = utils.rotation_matrix([0, 0, 1], -phi_z) # rotate segments around zaxis to get a proper orientation
+ RotXYZ = np.dot(RotX, RotY.dot(RotZ))
+
+ # rotated coordinates around z axis first then shift relative to the soma
+ self._seg_coords['p0'] = self._pos_soma + np.dot(RotXYZ, morph_seg_coords['p0'])
+ self._seg_coords['p1'] = self._pos_soma + np.dot(RotXYZ, morph_seg_coords['p1'])
+ self._seg_coords['p05'] = self._pos_soma + np.dot(RotXYZ, morph_seg_coords['p05'])
+
+ self._seg_coords['d0'] = morph_seg_coords['d0']
+ self._seg_coords['d1'] = morph_seg_coords['d1']
+
+ def get_seg_coords(self):
+ return self._seg_coords
+
+ def get_part_ids(self, enum=None):
+ """
+ Return a list of id's for which "part" of the neuron each section belongs to.
+ The list will be in the same order as self._seg_coords
+
+ enum = dict mapping "part" string (soma, dend, apic, basal, axon)
+ Default: 0 = soma, 1 = dend, 2 = apic, 3 = basal, 4 = axon
+ """
+ enum = enum or {'soma': 0, 'dend': 1, 'apic': 2, 'basal': 3, 'axon': 4}
+ return [enum[sec.name().split('.')[-1].split('[')[0]] for sec in self._secs]
+
+
+ @property
+ def morphology_file(self):
+ # TODO: Get from self._node.morphology_file
+ return self._node.morphology_file
+
+ @property
+ def morphology(self):
+ return self._morph
+
+ @morphology.setter
+ def morphology(self, morphology_obj):
+ self.set_morphology(morphology_obj)
+
+ def set_morphology(self, morphology_obj):
+ self._morph = morphology_obj
+
+ def get_sections(self):
+ #return self._secs_by_id
+ return self._secs
+
+ def get_sections_id(self):
+ return self._secs_by_id
+
+ def get_section(self, sec_id):
+ return self._secs[sec_id]
+
+ def store_segments(self):
+ self._segments = []
+ for sec in self._secs:
+ for seg in sec:
+ self._segments.append(seg)
+
+ def get_segments(self):
+ return self._segments
+
+ def set_sec_array(self):
+ """Arrange sections in an array to be access by index"""
+ secs = [] # build ref to sections
+ self._secs_by_id = []
+ for sec in self.hobj.all:
+ self._secs_by_id.append(sec)
+ for _ in sec:
+ secs.append(sec) # section to which segments belongs
+
+ self._secs = np.array(secs)
+
+ def set_syn_connection(self, edge_prop, src_node, stim=None):
+ syn_weight = edge_prop.syn_weight(src_node=src_node, trg_node=self._node)
+
+ if edge_prop.preselected_targets:
+ return self._set_connection_preselected(edge_prop, src_node, syn_weight, stim)
+ else:
+ return self._set_connections(edge_prop, src_node, syn_weight, stim)
+
+ def _set_connection_preselected(self, edge_prop, src_node, syn_weight, stim=None):
+ # TODO: synapses should be loaded by edge_prop.load_synapse
+ sec_x = edge_prop['sec_x']
+ sec_id = edge_prop['sec_id']
+ section = self._secs_by_id[sec_id]
+ # section = self._secs[sec_id]
+ delay = edge_prop['delay']
+ synapse_fnc = nrn.py_modules.synapse_model(edge_prop['model_template'])
+ syn = synapse_fnc(edge_prop['dynamics_params'], sec_x, section)
+
+ if stim is not None:
+ nc = h.NetCon(stim.hobj, syn) # stim.hobj - source, syn - target
+ else:
+ nc = pc.gid_connect(src_node.node_id, syn)
+
+ nc.weight[0] = syn_weight
+ nc.delay = delay
+ self._netcons.append(nc)
+ self._synapses.append(syn)
+ if self._save_conn:
+ self._save_connection(src_gid=src_node.node_id, src_net=src_node.network, sec_x=sec_x, seg_ix=sec_id,
+ edge_type_id=edge_prop.edge_type_id)
+
+ return 1
+
+ def _set_connections(self, edge_prop, src_node, syn_weight, stim=None):
+ if 'prob_peaks' in edge_prop and edge_prop['prob_peaks']:
+ # Compute probability based on proximity to the peak depths given at network build time
+ tar_seg_prob = np.zeros(len(self._secs))
+ prob_peaks = [float(x) for x in edge_prop['prob_peaks'].split(',')]
+ prob_peak_std = [float(x) for x in edge_prop['prob_peak_std'].split(',')]
+ _z = lambda idx: self._seg_coords['p05'][1, idx]
+ for mu, std in zip(prob_peaks, prob_peak_std):
+ tar_seg_prob += np.array([norm.pdf(_z(idx), mu, std) for idx in range(len(self._secs))])
+ tar_seg_prob = tar_seg_prob / sum(tar_seg_prob)
+ tar_seg_ix = range(len(self._secs))
+ else:
+ # Compute probability based on segment length
+ tar_seg_ix, tar_seg_prob = self._morph.get_target_segments(edge_prop)
+
+
+ src_gid = src_node.node_id
+ nsyns = edge_prop.nsyns
+
+ # choose nsyn elements from seg_ix with probability proportional to segment area
+ segs_ix = self.prng.choice(tar_seg_ix, nsyns, p=tar_seg_prob)
+ secs = self._secs[segs_ix] # sections where synapases connect
+ xs = self._morph.seg_prop['x'][segs_ix] # distance along the section where synapse connects, i.e., seg_x
+
+ # TODO: this should be done just once
+ synapses = [edge_prop.load_synapses(x, sec) for x, sec in zip(xs, secs)]
+
+ delay = edge_prop['delay']
+ self._synapses.extend(synapses)
+
+ # TODO: Don't save this if not needed
+ self._edge_type_ids.extend([edge_prop.edge_type_id]*len(synapses))
+
+ for syn in synapses:
+ # connect synapses
+ if stim:
+ nc = h.NetCon(stim.hobj, syn)
+ else:
+ nc = pc.gid_connect(src_gid, syn)
+
+ nc.weight[0] = syn_weight
+ nc.delay = delay
+ self.netcons.append(nc)
+
+ return nsyns
+
+ def _save_connection(self, src_gid, src_net, sec_x, seg_ix, edge_type_id):
+ self._syn_src_gid.append(src_gid)
+ self._syn_src_net.append(src_net)
+ self._syn_sec_x.append(sec_x)
+ self._syn_seg_ix.append(seg_ix)
+ self._edge_type_id.append(edge_type_id)
+
+ def get_connection_info(self):
+ # TODO: There should be a more effecient and robust way to return synapse information.
+ return [[self.gid, self._syn_src_gid[i], self.network_name, self._syn_src_net[i], self._syn_seg_ix[i],
+ self._syn_sec_x[i], self.netcons[i].weight[0], self.netcons[i].delay, self._edge_type_id[i], 0]
+ for i in range(len(self._synapses))]
+
+ def init_connections(self):
+ super(BioCell, self).init_connections()
+ self._synapses = []
+ self._syn_src_gid = []
+ self._syn_seg_ix = []
+ self._syn_sec_x = []
+
+ def __set_extracell_mechanism(self):
+ if not self.__extracellular_mech:
+ for sec in self.hobj.all:
+ sec.insert('extracellular')
+ self.__extracellular_mech = True
+
+ def setup_ecp(self):
+ self.im_ptr = h.PtrVector(self._nseg) # pointer vector
+ # used for gathering an array of i_membrane values from the pointer vector
+ self.im_ptr.ptr_update_callback(self.set_im_ptr)
+ self.imVec = h.Vector(self._nseg)
+
+ self.__set_extracell_mechanism()
+ #for sec in self.hobj.all:
+ # sec.insert('extracellular')
+
+ def setup_xstim(self, set_nrn_mechanism=True):
+ self.ptr2e_extracellular = h.PtrVector(self._nseg)
+ self.ptr2e_extracellular.ptr_update_callback(self.set_ptr2e_extracellular)
+
+ # Set the e_extracellular mechanism for all sections on this hoc object
+ if set_nrn_mechanism:
+ self.__set_extracell_mechanism()
+ #for sec in self.hobj.all:
+ # sec.insert('extracellular')
+
+ def set_im_ptr(self):
+ """Set PtrVector to point to the i_membrane_"""
+ jseg = 0
+ for sec in self.hobj.all:
+ for seg in sec:
+ self.im_ptr.pset(jseg, seg._ref_i_membrane_) # notice the underscore at the end
+ jseg += 1
+
+ def get_im(self):
+ """Gather membrane currents from PtrVector into imVec (does not need a loop!)"""
+ self.im_ptr.gather(self.imVec)
+ # Warning: as_numpy() seems to fail with in neuron 7.4 for python 3
+ # return self.imVec.as_numpy() # (nA)
+ return np.array(self.imVec)
+
+ def set_ptr2e_extracellular(self):
+ jseg = 0
+ for sec in self.hobj.all:
+ for seg in sec:
+ self.ptr2e_extracellular.pset(jseg, seg._ref_e_extracellular)
+ jseg += 1
+
+ def set_e_extracellular(self, vext):
+ self.ptr2e_extracellular.scatter(vext)
+
+ def print_synapses(self):
+ rstr = ''
+ for i in six.moves.range(len(self._syn_src_gid)):
+ rstr += '{}> <-- {} ({}, {}, {}, {})\n'.format(i, self._syn_src_gid[i], self.netcons[i].weight[0],
+ self.netcons[i].delay, self._syn_seg_ix[i],
+ self._syn_sec_x[i])
+ return rstr
diff --git a/bmtk-vb/bmtk/simulator/bionet/biocell.pyc b/bmtk-vb/bmtk/simulator/bionet/biocell.pyc
new file mode 100644
index 0000000..14409ba
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/biocell.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/bionetwork.py b/bmtk-vb/bmtk/simulator/bionet/bionetwork.py
new file mode 100644
index 0000000..78ec0ae
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/bionetwork.py
@@ -0,0 +1,262 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+from neuron import h
+
+from bmtk.simulator.core.simulator_network import SimNetwork
+from bmtk.simulator.bionet.biocell import BioCell
+from bmtk.simulator.bionet.pointprocesscell import PointProcessCell
+from bmtk.simulator.bionet.pointsomacell import PointSomaCell
+from bmtk.simulator.bionet.virtualcell import VirtualCell
+from bmtk.simulator.bionet.morphology import Morphology
+from bmtk.simulator.bionet.io_tools import io
+from bmtk.simulator.bionet import nrn
+from bmtk.simulator.bionet.sonata_adaptors import BioNodeAdaptor, BioEdgeAdaptor
+
+# TODO: leave this import, it will initialize some of the default functions for building neurons/synapses/weights.
+import bmtk.simulator.bionet.default_setters
+
+
+pc = h.ParallelContext() # object to access MPI methods
+MPI_size = int(pc.nhost())
+MPI_rank = int(pc.id())
+
+
+class BioNetwork(SimNetwork):
+ model_type_col = 'model_type'
+
+ def __init__(self):
+ # property_schema = property_schema if property_schema is not None else DefaultPropertySchema
+ super(BioNetwork, self).__init__()
+ self._io = io
+
+ # TODO: Find a better way that will allow users to register their own class
+ self._model_type_map = {
+ 'biophysical': BioCell,
+ 'point_process': PointProcessCell,
+ 'point_soma': PointSomaCell,
+ 'virtual': VirtualCell
+ }
+
+ self._morphologies_cache = {}
+ self._morphology_lookup = {}
+
+ self._rank_node_gids = {}
+ self._rank_node_ids = {}
+ self._rank_nodes_by_model = {m_type: {} for m_type in self._model_type_map.keys()}
+ self._remote_node_cache = {}
+ self._virtual_nodes = {}
+
+ self._cells_built = False
+ self._connections_initialized = False
+
+ @property
+ def py_function_caches(self):
+ return nrn
+
+ def get_node_id(self, population, node_id):
+ if node_id in self._rank_node_ids[population]:
+ return self._rank_node_ids[population][node_id].node
+
+ elif node_id in self._remote_node_cache[population]:
+ return self._remote_node_cache[population][node_id]
+
+ else:
+ node_pop = self.get_node_population(population)
+ node = node_pop.get_node(node_id)
+ self._remote_node_cache[population][node_id] = node
+ return node
+
+ def cell_type_maps(self, model_type):
+ return self._rank_nodes_by_model[model_type]
+
+ def get_cell_node_id(self, population, node_id):
+ return self._rank_node_ids[population].get(node_id, None)
+
+ def get_cell_gid(self, gid):
+ return self._rank_node_gids[gid]
+
+ def get_local_cells(self):
+ return self._rank_node_gids
+
+ @property
+ def local_gids(self):
+ return list(self._rank_node_gids.keys())
+
+ def get_virtual_cells(self, population, node_id, spike_trains):
+ if node_id in self._virtual_nodes[population]:
+ return self._virtual_nodes[population][node_id]
+ else:
+ node = self.get_node_id(population, node_id)
+ virt_cell = VirtualCell(node, spike_trains)
+ self._virtual_nodes[population][node_id] = virt_cell
+ return virt_cell
+
+ def _build_cell(self, bionode):
+ if bionode.model_type in self._model_type_map:
+ cell = self._model_type_map[bionode.model_type](bionode, self)
+ self._rank_nodes_by_model[bionode.model_type][cell.gid] = cell
+ return cell
+ else:
+ self.io.log_exception('Unrecognized model_type {}.'.format(bionode.model_type))
+
+ def _register_adaptors(self):
+ super(BioNetwork, self)._register_adaptors()
+ self._node_adaptors['sonata'] = BioNodeAdaptor
+ self._edge_adaptors['sonata'] = BioEdgeAdaptor
+
+ def build_nodes(self):
+ for node_pop in self.node_populations:
+ self._remote_node_cache[node_pop.name] = {}
+ node_ids_map = {}
+ if node_pop.internal_nodes_only:
+ for node in node_pop[MPI_rank::MPI_size]:
+ cell = self._build_cell(node)
+ node_ids_map[node.node_id] = cell
+ self._rank_node_gids[cell.gid] = cell
+
+ elif node_pop.mixed_nodes:
+ # node population contains both internal and virtual (external) nodes and the virtual nodes must be
+ # filtered out
+ self._virtual_nodes[node_pop.name] = {}
+ for node in node_pop[MPI_rank::MPI_size]:
+ if node.model_type == 'virtual':
+ continue
+ else:
+ cell = self._build_cell(node)
+ node_ids_map[node.node_id] = cell
+ self._rank_node_gids[cell.gid] = cell
+
+ elif node_pop.virtual_nodes_only:
+ self._virtual_nodes[node_pop.name] = {}
+
+ self._rank_node_ids[node_pop.name] = node_ids_map
+
+ self.make_morphologies()
+ self.set_seg_props() # set segment properties by creating Morphologies
+ self.calc_seg_coords() # use for computing the ECP
+ self._cells_built = True
+
+ def set_seg_props(self):
+ """Set morphological properties for biophysically (morphologically) detailed cells"""
+ for _, morphology in self._morphologies_cache.items():
+ morphology.set_seg_props()
+
+ def calc_seg_coords(self):
+ """Needed for the ECP calculations"""
+ # TODO: Is there any reason this function can't be moved to make_morphologies()
+ for morphology_file, morphology in self._morphologies_cache.items():
+ morph_seg_coords = morphology.calc_seg_coords() # needed for ECP calculations
+
+ for gid in self._morphology_lookup[morphology_file]:
+ self.get_cell_gid(gid).calc_seg_coords(morph_seg_coords)
+
+ def make_morphologies(self):
+ """Creating a Morphology object for each biophysical model"""
+ # TODO: Let Morphology take care of the cache
+ # TODO: Let other types have morphologies
+ # TODO: Get all available morphologies from TypesTable or group
+ for gid, cell in self._rank_node_gids.items():
+ if not isinstance(cell, BioCell):
+ continue
+
+ morphology_file = cell.morphology_file
+ if morphology_file in self._morphologies_cache:
+ # create a single morphology object for each model_group which share that morphology
+ morph = self._morphologies_cache[morphology_file]
+
+ # associate morphology with a cell
+ cell.set_morphology(morph)
+ self._morphology_lookup[morphology_file].append(cell.gid)
+
+ else:
+ hobj = cell.hobj # get hoc object (hobj) from the first cell with a new morphologys
+ morph = Morphology(hobj)
+
+ # associate morphology with a cell
+ cell.set_morphology(morph)
+
+ # create a single morphology object for each model_group which share that morphology
+ self._morphologies_cache[morphology_file] = morph
+ self._morphology_lookup[morphology_file] = [cell.gid]
+
+ self.io.barrier()
+
+ def _init_connections(self):
+ if not self._connections_initialized:
+ for gid, cell in self._rank_node_gids.items():
+ cell.init_connections()
+ self._connections_initialized = True
+
+ def build_recurrent_edges(self):
+ recurrent_edge_pops = [ep for ep in self._edge_populations if not ep.virtual_connections]
+ if not recurrent_edge_pops:
+ return
+
+ self._init_connections()
+ for edge_pop in recurrent_edge_pops:
+ if edge_pop.recurrent_connections:
+ source_population = edge_pop.source_nodes
+ for trg_nid, trg_cell in self._rank_node_ids[edge_pop.target_nodes].items():
+ for edge in edge_pop.get_target(trg_nid):
+ src_node = self.get_node_id(source_population, edge.source_node_id)
+ trg_cell.set_syn_connection(edge, src_node)
+
+ elif edge_pop.mixed_connections:
+ # When dealing with edges that contain both virtual and recurrent edges we have to check every source
+ # node to see if is virtual (bc virtual nodes can't be built yet). This conditional can significantly
+ # slow down build time so we use a special loop that can be ignored.
+ source_population = edge_pop.source_nodes
+ for trg_nid, trg_cell in self._rank_node_ids[edge_pop.target_nodes].items():
+ for edge in edge_pop.get_target(trg_nid):
+ src_node = self.get_node_id(source_population, edge.source_node_id)
+ if src_node.model_type == 'virtual':
+ continue
+ trg_cell.set_syn_connection(edge, src_node)
+
+ def find_edges(self, source_nodes=None, target_nodes=None):
+ selected_edges = self._edge_populations[:]
+
+ if source_nodes is not None:
+ selected_edges = [edge_pop for edge_pop in selected_edges if edge_pop.source_nodes == source_nodes]
+
+ if target_nodes is not None:
+ selected_edges = [edge_pop for edge_pop in selected_edges if edge_pop.target_nodes == target_nodes]
+
+ return selected_edges
+
+ def add_spike_trains(self, spike_trains, node_set):
+ self._init_connections()
+
+ src_nodes = [node_pop for node_pop in self.node_populations if node_pop.name in node_set.population_names()]
+ for src_node_pop in src_nodes:
+ source_population = src_node_pop.name
+ for edge_pop in self.find_edges(source_nodes=source_population):
+ if edge_pop.virtual_connections:
+ for trg_nid, trg_cell in self._rank_node_ids[edge_pop.target_nodes].items():
+ for edge in edge_pop.get_target(trg_nid):
+ src_cell = self.get_virtual_cells(source_population, edge.source_node_id, spike_trains)
+ trg_cell.set_syn_connection(edge, src_cell, src_cell)
+
+ elif edge_pop.mixed_connections:
+ raise NotImplementedError()
diff --git a/bmtk-vb/bmtk/simulator/bionet/bionetwork.pyc b/bmtk-vb/bmtk/simulator/bionet/bionetwork.pyc
new file mode 100644
index 0000000..ca52c8d
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/bionetwork.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/biosimulator.py b/bmtk-vb/bmtk/simulator/bionet/biosimulator.py
new file mode 100644
index 0000000..4082adc
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/biosimulator.py
@@ -0,0 +1,363 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import time
+from six import string_types
+from neuron import h
+from bmtk.simulator.core.simulator import Simulator
+from bmtk.simulator.bionet.io_tools import io
+from bmtk.simulator.bionet.iclamp import IClamp
+from bmtk.simulator.bionet import modules as mods
+from bmtk.simulator.core.node_sets import NodeSet
+import bmtk.simulator.utils.simulation_reports as reports
+import bmtk.simulator.utils.simulation_inputs as inputs
+from bmtk.utils.io import spike_trains
+
+
+pc = h.ParallelContext() # object to access MPI methods
+
+
+class BioSimulator(Simulator):
+ """Includes methods to run and control the simulation"""
+ # Add extra argument "optocell"
+ def __init__(self, network, dt, tstop, v_init, celsius, cao0, optocell, nsteps_block, start_from_state=False):
+ self.net = network
+
+ self._start_from_state = start_from_state
+ self.dt = dt
+ self.tstop = tstop
+
+ self._v_init = v_init
+ self._celsius = celsius
+ self._cao0 = cao0
+ self._optocell = optocell # Set instance var to optocell
+ self._h = h
+
+ self.tstep = int(round(h.t / h.dt))
+ self.tstep_start_block = self.tstep
+ self.nsteps = int(round(h.tstop/h.dt))
+
+ # make sure the block size isn't small than the total number of steps
+ # TODO: should we send a warning that block-step size is being reset?
+ self._nsteps_block = nsteps_block if self.nsteps > nsteps_block else self.nsteps
+
+ self.__tstep_end_block = 0
+ self.__tstep_start_block = 0
+
+ h.runStopAt = h.tstop
+ h.steps_per_ms = 1/h.dt
+
+ self._set_init_conditions() # call to save state
+ h.cvode.cache_efficient(1)
+
+ h.pysim = self # use this objref to be able to call postFadvance from proc advance in advance.hoc
+ self._iclamps = []
+
+ self._output_dir = 'output'
+ self._log_file = 'output/log.txt'
+
+ self._spikes = {} # for keeping track of different spike times, key of cell gids
+
+ self._cell_variables = [] # location of saved cell variables
+ self._cell_vars_dir = 'output/cellvars'
+
+ self._sim_mods = [] # list of modules.SimulatorMod's
+
+ @property
+ def optocell(self):
+ return self._optocell
+
+ @property
+ def dt(self):
+ return h.dt
+
+ @dt.setter
+ def dt(self, ms):
+ h.dt = ms
+
+ @property
+ def tstop(self):
+ return h.tstop
+
+ @tstop.setter
+ def tstop(self, ms):
+ h.tstop = ms
+
+ @property
+ def v_init(self):
+ return self._v_init
+
+ @v_init.setter
+ def v_init(self, voltage):
+ self._v_init = voltage
+
+ @property
+ def celsius(self):
+ return self._celsius
+
+ @celsius.setter
+ def celsius(self, c):
+ self._celsius = c
+
+ @property
+ def cao0(self):
+ return self._cao0
+
+ @cao0.setter
+ def cao0(self, cao):
+ self._cao0 = cao
+
+ @property
+ def n_steps(self):
+ return int(round(self.tstop/self.dt))
+
+ @property
+ def cell_variables(self):
+ return self._cell_variables
+
+ @property
+ def cell_var_output(self):
+ return self._cell_vars_dir
+
+ @property
+ def spikes_table(self):
+ return self._spikes
+
+ @property
+ def nsteps_block(self):
+ return self._nsteps_block
+
+ @property
+ def h(self):
+ return self._h
+
+ @property
+ def biophysical_gids(self):
+ return self.net.cell_type_maps('biophysical').keys()
+
+ @property
+ def local_gids(self):
+ # return self.net.get
+ return self.net.local_gids
+
+ def __elapsed_time(self, time_s):
+ if time_s < 120:
+ return '{:.4} seconds'.format(time_s)
+ elif time_s < 7200:
+ mins, secs = divmod(time_s, 60)
+ return '{} minutes, {:.4} seconds'.format(mins, secs)
+ else:
+ mins, secs = divmod(time_s, 60)
+ hours, mins = divmod(mins, 60)
+ return '{} hours, {} minutes and {:.4} seconds'.format(hours, mins, secs)
+
+ def _set_init_conditions(self):
+ """Set up the initial conditions: either read from the h.SaveState or from config["condidtions"]"""
+ pc.set_maxstep(10)
+ h.stdinit()
+ self.tstep = int(round(h.t/h.dt))
+ self.tstep_start_block = self.tstep
+
+ if self._start_from_state:
+ # io.read_state()
+ io.log_info('Read the initial state saved at t_sim: {} ms'.format(h.t))
+ else:
+ h.v_init = self.v_init
+
+ h.celsius = self.celsius
+ # h.cao0_ca_ion = self.cao0
+
+ def set_spikes_recording(self):
+ for gid, _ in self.net.get_local_cells().items():
+ tvec = self.h.Vector()
+ gidvec = self.h.Vector()
+ pc.spike_record(gid, tvec, gidvec)
+ self._spikes[gid] = tvec
+
+ def attach_current_clamp(self, amplitude, delay, duration, gids=None):
+ # TODO: verify current clamp works with MPI
+ # TODO: Create appropiate module
+ if gids is None:
+ gids = self.gids['biophysical']
+ if isinstance(gids, int):
+ gids = [gids]
+ elif isinstance(gids, string_types):
+ gids = [int(gids)]
+ elif isinstance(gids, NodeSet):
+ gids = gids.gids()
+
+
+ gids = list(set(self.local_gids) & set(gids))
+ for gid in gids:
+ cell = self.net.get_cell_gid(gid)
+ Ic = IClamp(amplitude, delay, duration)
+ Ic.attach_current(cell)
+ self._iclamps.append(Ic)
+
+ def add_mod(self, module):
+ self._sim_mods.append(module)
+
+ def run(self):
+ """Run the simulation:
+ if beginning from a blank state, then will use h.run(),
+ if continuing from the saved state, then will use h.continuerun()
+ """
+ for mod in self._sim_mods:
+ mod.initialize(self)
+
+ self.start_time = h.startsw()
+ s_time = time.time()
+ pc.timeout(0)
+
+ pc.barrier() # wait for all hosts to get to this point
+ io.log_info('Running simulation for {:.3f} ms with the time step {:.3f} ms'.format(self.tstop, self.dt))
+ io.log_info('Starting timestep: {} at t_sim: {:.3f} ms'.format(self.tstep, h.t))
+ io.log_info('Block save every {} steps'.format(self.nsteps_block))
+
+ if self._start_from_state:
+ h.continuerun(h.tstop)
+ else:
+ h.run(h.tstop) # <- runs simuation: works in parallel
+
+ pc.barrier()
+
+ for mod in self._sim_mods:
+ mod.finalize(self)
+ pc.barrier()
+
+ end_time = time.time()
+
+ sim_time = self.__elapsed_time(end_time - s_time)
+ io.log_info('Simulation completed in {} '.format(sim_time))
+
+ def report_load_balance(self):
+ comptime = pc.step_time()
+ avgcomp = pc.allreduce(comptime, 1)/pc.nhost()
+ maxcomp = pc.allreduce(comptime, 2)
+ io.log_info('Maximum compute time is {} seconds.'.format(maxcomp))
+ io.log_info('Approximate exchange time is {} seconds.'.format(comptime - maxcomp))
+ if maxcomp != 0.0:
+ io.log_info('Load balance is {}.'.format(avgcomp/maxcomp))
+
+ def post_fadvance(self):
+ """
+ Runs after every execution of fadvance (see advance.hoc)
+ Called after every time step to perform computation and save data to memory block or to disk.
+ The initial condition tstep=0 is not being saved
+ """
+ for mod in self._sim_mods:
+ mod.step(self, self.tstep)
+
+ self.tstep += 1
+
+ if (self.tstep % self.nsteps_block == 0) or self.tstep == self.nsteps:
+ io.log_info(' step:{} t_sim:{:.2f} ms'.format(self.tstep, h.t))
+ self.__tstep_end_block = self.tstep
+ time_step_interval = (self.__tstep_start_block, self.__tstep_end_block)
+
+ for mod in self._sim_mods:
+ mod.block(self, time_step_interval)
+
+ self.__tstep_start_block = self.tstep # starting point for the next block
+
+ @classmethod
+ def from_config(cls, config, network, set_recordings=True):
+ # TODO: convert from json to sonata config if necessary
+
+ sim = cls(network=network,
+ dt=config.dt,
+ tstop=config.tstop,
+ v_init=config.v_init,
+ celsius=config.celsius,
+ cao0=config.cao0,
+ optocell=config.optocell,
+ nsteps_block=config.block_step)
+
+ network.io.log_info('Building cells.')
+ network.build_nodes()
+
+ network.io.log_info('Building recurrent connections')
+ network.build_recurrent_edges()
+
+ # TODO: Need to create a gid selector
+ for sim_input in inputs.from_config(config):
+ node_set = network.get_node_set(sim_input.node_set)
+ if sim_input.input_type == 'spikes':
+ spikes = spike_trains.SpikesInput.load(name=sim_input.name, module=sim_input.module,
+ input_type=sim_input.input_type, params=sim_input.params)
+ io.log_info('Build virtual cell stimulations for {}'.format(sim_input.name))
+ network.add_spike_trains(spikes, node_set)
+
+ elif sim_input.module == 'IClamp':
+ # TODO: Parse from csv file
+ amplitude = sim_input.params['amp']
+ delay = sim_input.params['delay']
+ duration = sim_input.params['duration']
+ gids = sim_input.params['node_set']
+ sim.attach_current_clamp(amplitude, delay, duration, node_set)
+
+ elif sim_input.module == 'xstim':
+ sim.add_mod(mods.XStimMod(**sim_input.params))
+
+ else:
+ io.log_exception('Can not parse input format {}'.format(sim_input.name))
+
+ if config.calc_ecp:
+ for gid, cell in network.cell_type_maps('biophysical').items():
+ cell.setup_ecp()
+ sim.h.cvode.use_fast_imem(1)
+
+ # Parse the "reports" section of the config and load an associated output module for each report
+ sim_reports = reports.from_config(config)
+ for report in sim_reports:
+ if isinstance(report, reports.SpikesReport):
+ mod = mods.SpikesMod(**report.params)
+
+ elif isinstance(report, reports.SectionReport):
+ mod = mods.SectionReport(**report.params)
+
+ elif isinstance(report, reports.MembraneReport):
+ if report.params['sections'] == 'soma':
+ mod = mods.SomaReport(**report.params)
+
+ else:
+ mod = mods.MembraneReport(**report.params)
+
+ elif isinstance(report, reports.ECPReport):
+ assert config.calc_ecp
+ mod = mods.EcpMod(**report.params)
+ # Set up the ability for ecp on all relevant cells
+ # TODO: According to spec we need to allow a different subset other than only biophysical cells
+ # for gid, cell in network.cell_type_maps('biophysical').items():
+ # cell.setup_ecp()
+
+ elif report.module == 'save_synapses':
+ mod = mods.SaveSynapses(**report.params)
+
+ else:
+ # TODO: Allow users to register customized modules using pymodules
+ io.log_warning('Unrecognized module {}, skipping.'.format(report.module))
+ continue
+
+ sim.add_mod(mod)
+
+ return sim
diff --git a/bmtk-vb/bmtk/simulator/bionet/biosimulator.pyc b/bmtk-vb/bmtk/simulator/bionet/biosimulator.pyc
new file mode 100644
index 0000000..9b47d1d
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/biosimulator.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/cell.py b/bmtk-vb/bmtk/simulator/bionet/cell.py
new file mode 100644
index 0000000..190836a
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/cell.py
@@ -0,0 +1,104 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from neuron import h
+import numpy as np
+
+
+pc = h.ParallelContext() # object to access MPI methods
+MPI_RANK = int(pc.id())
+
+
+class Cell(object):
+ """A abstract base class for any cell object.
+
+ A base class for implementation of a cell-type objects like biophysical cells, LIF cells, etc. Do not instantiate
+ a Cell object directly. Cell classes act as wrapper around HOC cell object with extra functionality for setting
+ positions, synapses, and other parameters depending on the desired cell class.
+ """
+ def __init__(self, node):
+ self._node = node
+ self._gid = node.gid
+ self._node_id = node.node_id
+ self._props = node
+ self._netcons = [] # list of NEURON network connection object attached to this cell
+
+ self._pos_soma = []
+ self.set_soma_position()
+
+ # register the cell
+ pc.set_gid2node(self.gid, MPI_RANK)
+
+ # Load the NEURON HOC object
+ self._hobj = node.load_cell()
+
+ @property
+ def node(self):
+ return self._node
+
+ @property
+ def hobj(self):
+ return self._hobj
+
+ @property
+ def gid(self):
+ return self._gid
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def group_id(self):
+ return self._node.group_id
+
+ @property
+ def network_name(self):
+ return self._node.network
+
+ @property
+ def netcons(self):
+ return self._netcons
+
+ @property
+ def soma_position(self):
+ return self._pos_soma
+
+ def set_soma_position(self):
+ positions = self._node.position
+ if positions is not None:
+ self._pos_soma = positions.reshape(3, 1)
+
+ def init_connections(self):
+ self.rand_streams = []
+ self.prng = np.random.RandomState(self.gid) # generate random stream based on gid
+
+ def scale_weights(self, factor):
+ for nc in self.netcons:
+ weight = nc.weight[0]
+ nc.weight[0] = weight*factor
+
+ def get_connection_info(self):
+ return []
+
+ def set_syn_connections(self, edge_prop, src_node, stim=None):
+ raise NotImplementedError
diff --git a/bmtk-vb/bmtk/simulator/bionet/cell.pyc b/bmtk-vb/bmtk/simulator/bionet/cell.pyc
new file mode 100644
index 0000000..cd5671d
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/cell.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/config.py b/bmtk-vb/bmtk/simulator/bionet/config.py
new file mode 100644
index 0000000..e81a0ba
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/config.py
@@ -0,0 +1,88 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+
+from neuron import h
+
+#import bmtk.simulator.utils.config as msdk_config
+#from bmtk.utils.sonata.config import SonataConfig
+#from bmtk.simulator.core.config import ConfigDict
+from bmtk.simulator.utils.config import ConfigDict
+from bmtk.simulator.utils.sim_validator import SimConfigValidator
+from bmtk.simulator.bionet.io_tools import io
+from . import nrn
+
+pc = h.ParallelContext() # object to access MPI methods
+MPI_Rank = int(pc.id())
+
+
+# load the configuration schema
+schema_folder = os.path.join(os.path.dirname(__file__), 'schemas')
+config_schema_file = os.path.join(schema_folder, 'config_schema.json')
+
+# json schemas (but not real jsonschema) to describe the various input file formats
+file_formats = [
+ ("csv:nodes_internal", os.path.join(schema_folder, 'csv_nodes_internal.json')),
+ ("csv:node_types_internal", os.path.join(schema_folder, 'csv_node_types_internal.json')),
+ ("csv:edge_types", os.path.join(schema_folder, 'csv_edge_types.json')),
+ ("csv:nodes_external", os.path.join(schema_folder, 'csv_nodes_external.json')),
+ ("csv:node_types_external", os.path.join(schema_folder, 'csv_node_types_external.json'))
+]
+
+# Create a config and input file validator for Bionet
+with open(config_schema_file, 'r') as f:
+ config_schema = json.load(f)
+bionet_validator = SimConfigValidator(config_schema, file_formats=file_formats)
+
+
+class Config(ConfigDict):
+ @property
+ def cao0(self):
+ return self.conditions['cao0']
+
+ @property
+ def optocell(self):
+ return self.run['optocell']
+
+ @staticmethod
+ def get_validator():
+ return bionet_validator
+
+ def create_output_dir(self):
+ io.setup_output_dir(self.output_dir, self.log_file)
+
+ def load_nrn_modules(self):
+ nrn.load_neuron_modules(self.mechanisms_dir, self.templates_dir)
+
+ def build_env(self):
+ if MPI_Rank == 0:
+ self.create_output_dir()
+ self.copy_to_output()
+
+ if io.mpi_size > 1:
+ # A friendly message requested by fb
+ io.log_info('Running NEURON with mpi ({} cores).'.format(io.mpi_size))
+
+ pc.barrier()
+ self.load_nrn_modules()
diff --git a/bmtk-vb/bmtk/simulator/bionet/config.pyc b/bmtk-vb/bmtk/simulator/bionet/config.pyc
new file mode 100644
index 0000000..a2394fb
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/config.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_setters/__init__.py b/bmtk-vb/bmtk/simulator/bionet/default_setters/__init__.py
new file mode 100644
index 0000000..4ad0b56
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/default_setters/__init__.py
@@ -0,0 +1,25 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from . import cell_models
+from . import synapse_models
+from . import synaptic_weights
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_setters/__init__.pyc b/bmtk-vb/bmtk/simulator/bionet/default_setters/__init__.pyc
new file mode 100644
index 0000000..5107fd0
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/default_setters/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_setters/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/default_setters/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..6c648a6
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/default_setters/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_setters/__pycache__/cell_models.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/default_setters/__pycache__/cell_models.cpython-37.pyc
new file mode 100644
index 0000000..f4ac9ff
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/default_setters/__pycache__/cell_models.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_setters/__pycache__/synapse_models.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/default_setters/__pycache__/synapse_models.cpython-37.pyc
new file mode 100644
index 0000000..08a45bb
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/default_setters/__pycache__/synapse_models.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_setters/__pycache__/synaptic_weights.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/default_setters/__pycache__/synaptic_weights.cpython-37.pyc
new file mode 100644
index 0000000..6263560
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/default_setters/__pycache__/synaptic_weights.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_setters/cell_models.py b/bmtk-vb/bmtk/simulator/bionet/default_setters/cell_models.py
new file mode 100644
index 0000000..16d5bfb
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/default_setters/cell_models.py
@@ -0,0 +1,460 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import numpy as np
+from neuron import h
+try:
+ from sklearn.decomposition import PCA
+except Exception as e:
+ pass
+
+from bmtk.simulator.bionet.pyfunction_cache import add_cell_model, add_cell_processor
+from bmtk.simulator.bionet.io_tools import io
+from bmtk.simulator.bionet.nml_reader import NMLTree
+
+"""
+Functions for loading NEURON cell objects.
+
+Functions will be loaded by bionetwork and called when a new cell object is created. These are for standard models
+loaded with Cell-Types json files or their NeuroML equivelent, but may be overridden by the users.
+"""
+
+
+def IntFire1(cell, template_name, dynamics_params):
+ """Loads a point integrate and fire neuron"""
+ hobj = h.IntFire1()
+ hobj.tau = dynamics_params['tau']*1000.0 # Convert from seconds to ms.
+ hobj.refrac = dynamics_params['refrac']*1000.0 # Convert from seconds to ms.
+ return hobj
+
+
+def Biophys1(cell, template_name, dynamic_params):
+ """Loads a biophysical NEURON hoc object using Cell-Types database objects."""
+ morphology_file = cell.morphology_file
+ hobj = h.Biophys1(str(morphology_file))
+ #fix_axon(hobj)
+ #set_params_peri(hobj, dynamic_params)
+ return hobj
+
+
+def Biophys1_nml(json_file):
+ # TODO: look at examples to see how to convert .nml files
+ raise NotImplementedError()
+
+
+def Biophys1_dict(cell):
+ """ Set parameters for cells from the Allen Cell Types database Prior to setting parameters will replace the
+ axon with the stub
+ """
+ morphology_file = cell['morphology']
+ hobj = h.Biophys1(str(morphology_file))
+ return hobj
+
+
+def aibs_perisomatic(hobj, cell, dynamics_params):
+ if dynamics_params is not None:
+ fix_axon_peri(hobj)
+ set_params_peri(hobj, dynamics_params)
+
+ return hobj
+
+
+def fix_axon_peri(hobj):
+ """Replace reconstructed axon with a stub
+
+ :param hobj: hoc object
+ """
+ for sec in hobj.axon:
+ h.delete_section(sec=sec)
+
+ h.execute('create axon[2]', hobj)
+
+ for sec in hobj.axon:
+ sec.L = 30
+ sec.diam = 1
+ hobj.axonal.append(sec=sec)
+ hobj.all.append(sec=sec) # need to remove this comment
+
+ hobj.axon[0].connect(hobj.soma[0], 0.5, 0)
+ hobj.axon[1].connect(hobj.axon[0], 1, 0)
+
+ h.define_shape()
+
+
+def set_params_peri(hobj, biophys_params):
+ """Set biophysical parameters for the cell
+
+ :param hobj: NEURON's cell object
+ :param biophys_params: name of json file with biophys params for cell's model which determine spiking behavior
+ :return:
+ """
+ passive = biophys_params['passive'][0]
+ conditions = biophys_params['conditions'][0]
+ genome = biophys_params['genome']
+
+ # Set passive properties
+ cm_dict = dict([(c['section'], c['cm']) for c in passive['cm']])
+ for sec in hobj.all:
+ sec.Ra = passive['ra']
+ sec.cm = cm_dict[sec.name().split(".")[1][:4]]
+ sec.insert('pas')
+
+ for seg in sec:
+ seg.pas.e = passive["e_pas"]
+
+ # Insert channels and set parameters
+ for p in genome:
+ sections = [s for s in hobj.all if s.name().split(".")[1][:4] == p["section"]]
+
+ for sec in sections:
+ if p["mechanism"] != "":
+ sec.insert(p["mechanism"])
+ setattr(sec, p["name"], p["value"])
+
+ # Set reversal potentials
+ for erev in conditions['erev']:
+ sections = [s for s in hobj.all if s.name().split(".")[1][:4] == erev["section"]]
+ for sec in sections:
+ sec.ena = erev["ena"]
+ sec.ek = erev["ek"]
+
+
+def aibs_allactive(hobj, cell, dynamics_params):
+ fix_axon_allactive(hobj)
+ set_params_allactive(hobj, dynamics_params)
+ return hobj
+
+
+def fix_axon_allactive(hobj):
+ """Replace reconstructed axon with a stub
+
+ Parameters
+ ----------
+ hobj: instance of a Biophysical template
+ NEURON's cell object
+ """
+ # find the start and end diameter of the original axon, this is different from the perisomatic cell model
+ # where diameter == 1.
+ axon_diams = [hobj.axon[0].diam, hobj.axon[0].diam]
+ for sec in hobj.all:
+ section_name = sec.name().split(".")[1][:4]
+ if section_name == 'axon':
+ axon_diams[1] = sec.diam
+
+ for sec in hobj.axon:
+ h.delete_section(sec=sec)
+
+ h.execute('create axon[2]', hobj)
+ for index, sec in enumerate(hobj.axon):
+ sec.L = 30
+ sec.diam = axon_diams[index] # 1
+
+ hobj.axonal.append(sec=sec)
+ hobj.all.append(sec=sec) # need to remove this comment
+
+ hobj.axon[0].connect(hobj.soma[0], 1.0, 0)
+ hobj.axon[1].connect(hobj.axon[0], 1.0, 0)
+
+ h.define_shape()
+
+
+def set_params_allactive(hobj, params_dict):
+ # params_dict = json.load(open(params_file_name, 'r'))
+ passive = params_dict['passive'][0]
+ genome = params_dict['genome']
+ conditions = params_dict['conditions'][0]
+
+ section_map = {}
+ for sec in hobj.all:
+ section_name = sec.name().split(".")[1][:4]
+ if section_name in section_map:
+ section_map[section_name].append(sec)
+ else:
+ section_map[section_name] = [sec]
+
+ for sec in hobj.all:
+ sec.insert('pas')
+ # sec.insert('extracellular')
+
+ if 'e_pas' in passive:
+ e_pas_val = passive['e_pas']
+ for sec in hobj.all:
+ for seg in sec:
+ seg.pas.e = e_pas_val
+
+ if 'ra' in passive:
+ ra_val = passive['ra']
+ for sec in hobj.all:
+ sec.Ra = ra_val
+
+ if 'cm' in passive:
+ # print('Setting cm')
+ for cm_dict in passive['cm']:
+ cm = cm_dict['cm']
+ for sec in section_map.get(cm_dict['section'], []):
+ sec.cm = cm
+
+ for genome_dict in genome:
+ g_section = genome_dict['section']
+ if genome_dict['section'] == 'glob':
+ io.log_warning("There is a section called glob, probably old json file")
+ continue
+
+ g_value = float(genome_dict['value'])
+ g_name = genome_dict['name']
+ g_mechanism = genome_dict.get("mechanism", "")
+ for sec in section_map.get(g_section, []):
+ if g_mechanism != "":
+ sec.insert(g_mechanism)
+ setattr(sec, g_name, g_value)
+
+ for erev in conditions['erev']:
+ erev_section = erev['section']
+ erev_ena = erev['ena']
+ erev_ek = erev['ek']
+
+ if erev_section in section_map:
+ for sec in section_map.get(erev_section, []):
+ if h.ismembrane('k_ion', sec=sec) == 1:
+ setattr(sec, 'ek', erev_ek)
+ if h.ismembrane('na_ion', sec=sec) == 1:
+ setattr(sec, 'ena', erev_ena)
+ else:
+ io.log_warning("Can't set erev for {}, section array doesn't exist".format(erev_section))
+
+
+def aibs_perisomatic_directed(hobj, cell, dynamics_params):
+ fix_axon_perisomatic_directed(hobj)
+ set_params_peri(hobj, dynamics_params)
+ return hobj
+
+
+def aibs_allactive_directed(hobj, cell, dynamics_params):
+ fix_axon_allactive_directed(hobj)
+ set_params_allactive(hobj, dynamics_params)
+ return hobj
+
+
+def fix_axon_perisomatic_directed(hobj):
+ # io.log_info('Fixing Axon like perisomatic')
+ all_sec_names = []
+ for sec in hobj.all:
+ all_sec_names.append(sec.name().split(".")[1][:4])
+
+ if 'axon' not in all_sec_names:
+ io.log_exception('There is no axonal recostruction in swc file.')
+ else:
+ beg1, end1, beg2, end2 = get_axon_direction(hobj)
+
+ for sec in hobj.axon:
+ h.delete_section(sec=sec)
+ h.execute('create axon[2]', hobj)
+
+ h.pt3dadd(beg1[0], beg1[1], beg1[2], 1, sec=hobj.axon[0])
+ h.pt3dadd(end1[0], end1[1], end1[2], 1, sec=hobj.axon[0])
+ hobj.all.append(sec=hobj.axon[0])
+ h.pt3dadd(beg2[0], beg2[1], beg2[2], 1, sec=hobj.axon[1])
+ h.pt3dadd(end2[0], end2[1], end2[2], 1, sec=hobj.axon[1])
+ hobj.all.append(sec=hobj.axon[1])
+
+ hobj.axon[0].connect(hobj.soma[0], 0.5, 0)
+ hobj.axon[1].connect(hobj.axon[0], 1.0, 0)
+
+ hobj.axon[0].L = 30.0
+ hobj.axon[1].L = 30.0
+
+ h.define_shape()
+
+ for sec in hobj.axon:
+ # print "sec.L:", sec.L
+ if np.abs(30-sec.L) > 0.0001:
+ io.log_exception('Axon stub L is less than 30')
+
+
+def fix_axon_allactive_directed(hobj):
+ all_sec_names = []
+ for sec in hobj.all:
+ all_sec_names.append(sec.name().split(".")[1][:4])
+
+ if 'axon' not in all_sec_names:
+ io.log_exception('There is no axonal recostruction in swc file.')
+ else:
+ beg1, end1, beg2, end2 = get_axon_direction(hobj)
+
+ axon_diams = [hobj.axon[0].diam, hobj.axon[0].diam]
+ for sec in hobj.all:
+ section_name = sec.name().split(".")[1][:4]
+ if section_name == 'axon':
+ axon_diams[1] = sec.diam
+
+ for sec in hobj.axon:
+ h.delete_section(sec=sec)
+ h.execute('create axon[2]', hobj)
+ hobj.axon[0].connect(hobj.soma[0], 1.0, 0)
+ hobj.axon[1].connect(hobj.axon[0], 1.0, 0)
+
+ h.pt3dadd(beg1[0], beg1[1], beg1[2], axon_diams[0], sec=hobj.axon[0])
+ h.pt3dadd(end1[0], end1[1], end1[2], axon_diams[0], sec=hobj.axon[0])
+ hobj.all.append(sec=hobj.axon[0])
+ h.pt3dadd(beg2[0], beg2[1], beg2[2], axon_diams[1], sec=hobj.axon[1])
+ h.pt3dadd(end2[0], end2[1], end2[2], axon_diams[1], sec=hobj.axon[1])
+ hobj.all.append(sec=hobj.axon[1])
+
+ hobj.axon[0].L = 30.0
+ hobj.axon[1].L = 30.0
+
+ h.define_shape()
+
+ for sec in hobj.axon:
+ # io.log_info('sec.L: {}'.format(sec.L))
+ if np.abs(30 - sec.L) > 0.0001:
+ io.log_exception('Axon stub L is less than 30')
+
+
+def get_axon_direction(hobj):
+ for sec in hobj.somatic:
+ n3d = int(h.n3d()) # get number of n3d points in each section
+ soma_end = np.asarray([h.x3d(n3d - 1), h.y3d(n3d - 1), h.z3d(n3d - 1)])
+ mid_point = int(n3d / 2)
+ soma_mid = np.asarray([h.x3d(mid_point), h.y3d(mid_point), h.z3d(mid_point)])
+
+ for sec in hobj.all:
+ section_name = sec.name().split(".")[1][:4]
+ if section_name == 'axon':
+ n3d = int(h.n3d()) # get number of n3d points in each section
+ axon_p3d = np.zeros((n3d, 3)) # to hold locations of 3D morphology for the current section
+ for i in range(n3d):
+ axon_p3d[i, 0] = h.x3d(i)
+ axon_p3d[i, 1] = h.y3d(i) # shift coordinates such to place soma at the origin.
+ axon_p3d[i, 2] = h.z3d(i)
+
+ # Add soma coordinates to the list
+ p3d = np.concatenate(([soma_mid], axon_p3d), axis=0)
+
+ # Compute PCA
+ pca = PCA(n_components=3)
+ pca.fit(p3d)
+ unit_v = pca.components_[0]
+
+ mag_v = np.sqrt(pow(unit_v[0], 2) + pow(unit_v[1], 2) + pow(unit_v[2], 2))
+ unit_v[0] = unit_v[0] / mag_v
+ unit_v[1] = unit_v[1] / mag_v
+ unit_v[2] = unit_v[2] / mag_v
+
+ # Find the direction
+ axon_end = axon_p3d[-1] - soma_mid
+ if np.dot(unit_v, axon_end) < 0:
+ unit_v *= -1
+
+ axon_seg_coor = np.zeros((4, 3))
+ # unit_v = np.asarray([0,1,0])
+ axon_seg_coor[0] = soma_end
+ axon_seg_coor[1] = soma_end + (unit_v * 30.)
+ axon_seg_coor[2] = soma_end + (unit_v * 30.)
+ axon_seg_coor[3] = soma_end + (unit_v * 60.)
+
+ return axon_seg_coor
+
+
+nml_files = {} # For caching neuroml file trees
+def NMLLoad(cell, template_name, dynamic_params):
+ """Convert a NEUROML file to a NEURON hoc cell object.
+
+ Current limitations:
+ * Ignores nml morphology section. You must pass in a swc file
+ * Only for biophysically detailed cell biophysical components. All properties must be assigned to a segment group.
+
+ :param cell:
+ :param template_name:
+ :param dynamic_params:
+ :return:
+ """
+ # Last I checked there is no built in way to load a NML file directly into NEURON through the API, instead we have
+ # to manually parse the nml file and build the NEUROM cell object section-by-section.
+ morphology_file = cell.morphology_file
+ hobj = h.Biophys1(str(morphology_file))
+ # Depending on if the axon is cut before or after setting cell channels and mechanism can create drastically
+ # different results. Currently NML files doesn't produce the same results if you use model_processing directives.
+ # TODO: Find a way to specify model_processing directive with NML file
+ fix_axon_peri(hobj)
+
+ # Load the hoc template containing a swc initialized NEURON cell
+ if template_name in nml_files:
+ nml_params = nml_files[template_name]
+ else:
+ # Parse the NML parameters file xml tree and cache.
+ biophys_dirs = cell.network.get_component('biophysical_neuron_models_dir')
+ nml_path = os.path.join(biophys_dirs, template_name)
+ nml_params = NMLTree(nml_path)
+ nml_files[template_name] = nml_params
+
+ # Iterate through the NML tree by section and use the properties to manually create cell mechanisms
+ section_lists = [(sec, sec.name().split(".")[1][:4]) for sec in hobj.all]
+ for sec, sec_name in section_lists:
+ for prop_name, prop_obj in nml_params[sec_name].items():
+ if prop_obj.element_tag() == 'resistivity':
+ sec.Ra = prop_obj.value
+
+ elif prop_obj.element_tag() == 'specificCapacitance':
+ sec.cm = prop_obj.value
+
+ elif prop_obj.element_tag() == 'channelDensity' and prop_obj.ion_channel == 'pas':
+ sec.insert('pas')
+ setattr(sec, 'g_pas', prop_obj.cond_density)
+ for seg in sec:
+ seg.pas.e = prop_obj.erev
+
+ elif prop_obj.element_tag() == 'channelDensity' or prop_obj.element_tag() == 'channelDensityNernst':
+ sec.insert(prop_obj.ion_channel)
+ setattr(sec, prop_obj.id, prop_obj.cond_density)
+ if prop_obj.ion == 'na' and prop_obj:
+ sec.ena = prop_obj.erev
+ elif prop_obj.ion == 'k':
+ sec.ek = prop_obj.erev
+
+ elif prop_obj.element_tag() == 'concentrationModel':
+ sec.insert(prop_obj.id)
+ setattr(sec, 'gamma_' + prop_obj.type, prop_obj.gamma)
+ setattr(sec, 'decay_' + prop_obj.type, prop_obj.decay)
+
+ return hobj
+
+def set_extracellular(hobj, cell, dynamics_params):
+ for sec in hobj.all:
+ sec.insert('extracellular')
+
+ return hobj
+
+
+add_cell_model(NMLLoad, directive='nml', model_type='biophysical')
+add_cell_model(Biophys1, directive='ctdb:Biophys1', model_type='biophysical', overwrite=False)
+add_cell_model(Biophys1, directive='ctdb:Biophys1.hoc', model_type='biophysical', overwrite=False)
+add_cell_model(IntFire1, directive='nrn:IntFire1', model_type='point_process', overwrite=False)
+
+
+add_cell_processor(aibs_perisomatic, overwrite=False)
+add_cell_processor(aibs_allactive, overwrite=False)
+add_cell_processor(aibs_perisomatic_directed, overwrite=False)
+add_cell_processor(aibs_allactive_directed, overwrite=False)
+add_cell_processor(set_extracellular, overwrite=False)
+add_cell_processor(set_extracellular, 'extracellular', overwrite=False)
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_setters/cell_models.pyc b/bmtk-vb/bmtk/simulator/bionet/default_setters/cell_models.pyc
new file mode 100644
index 0000000..4bc74f0
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/default_setters/cell_models.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_setters/synapse_models.py b/bmtk-vb/bmtk/simulator/bionet/default_setters/synapse_models.py
new file mode 100644
index 0000000..013cbcb
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/default_setters/synapse_models.py
@@ -0,0 +1,206 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from neuron import h
+
+from bmtk.simulator.bionet.pyfunction_cache import add_synapse_model
+from bmtk.simulator.bionet.nrn import *
+
+
+def exp2syn(syn_params, xs, secs):
+ """Create a list of exp2syn synapses
+
+ :param syn_params: parameters of a synapse
+ :param xs: list of normalized distances along the section
+ :param secs: target sections
+ :return: list of NEURON synpase objects
+ """
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.Exp2Syn(x, sec=sec)
+ syn.e = syn_params['erev']
+ syn.tau1 = syn_params['tau1']
+ syn.tau2 = syn_params['tau2']
+ syns.append(syn)
+ return syns
+
+
+def Exp2Syn(syn_params, sec_x, sec_id):
+ """Create a list of exp2syn synapses
+
+ :param syn_params: parameters of a synapse
+ :param sec_x: normalized distance along the section
+ :param sec_id: target section
+ :return: NEURON synapse object
+ """
+ syn = h.Exp2Syn(sec_x, sec=sec_id)
+ syn.e = syn_params['erev']
+ syn.tau1 = syn_params['tau1']
+ syn.tau2 = syn_params['tau2']
+ return syn
+
+
+
+@synapse_model
+def stp1syn(syn_params, xs, secs):
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.stp1syn(x, sec=sec)
+
+ syn.e = syn_params["erev"]
+ syn.p0 = 0.5
+ syn.tau_r = 200
+ syn.tau_1 = 5
+ syns.append(syn)
+
+ return syns
+
+
+@synapse_model
+def stp2syn(syn_params, x, sec):
+ syn = h.stp2syn(x, sec=sec)
+ syn.e = syn_params["erev"]
+ syn.p0 = syn_params["p0"]
+ syn.tau_r0 = syn_params["tau_r0"]
+ syn.tau_FDR = syn_params["tau_FDR"]
+ syn.tau_1 = syn_params["tau_1"]
+ return syn
+
+
+@synapse_model
+def stp3syn(syn_params, xs, secs):
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.stp3syn(x, sec=sec) # temporary
+ syn.e = syn_params["erev"]
+ syn.p0 = 0.6
+ syn.tau_r0 = 200
+ syn.tau_FDR = 2000
+ syn.tau_D = 500
+ syn.tau_1 = 5
+ syns.append(syn)
+
+ return syns
+
+
+@synapse_model
+def stp4syn(syn_params, xs, secs):
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.stp4syn(x, sec=sec)
+ syn.e = syn_params["erev"]
+ syn.p0 = 0.6
+ syn.tau_r = 200
+ syn.tau_1 = 5
+ syns.append(syn)
+
+ return syns
+
+
+@synapse_model
+def stp5syn(syn_params, x, sec): # temporary
+ syn = h.stp5syn(x, sec=sec)
+ syn.e = syn_params["erev"]
+ syn.tau_1 = syn_params["tau_1"]
+ syn.tau_r0 = syn_params["tau_r0"]
+ syn.tau_FDR = syn_params["tau_FDR"]
+ syn.a_FDR = syn_params["a_FDR"]
+ syn.a_D = syn_params["a_D"]
+ syn.a_i = syn_params["a_i"]
+ syn.a_f = syn_params["a_f"]
+ syn.pbtilde = syn_params["pbtilde"]
+ return syn
+
+
+def stp5isyn(syn_params, xs, secs): # temporary
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.stp5isyn(x, sec=sec)
+ syn.e = syn_params["erev"]
+ syn.tau_1 = syn_params["tau_1"]
+ syn.tau_r0 = syn_params["tau_r0"]
+ syn.tau_FDR = syn_params["tau_FDR"]
+ syn.a_FDR = syn_params["a_FDR"]
+ syn.a_D = syn_params["a_D"]
+ syn.a_i = syn_params["a_i"]
+ syn.a_f = syn_params["a_f"]
+ syn.pbtilde = syn_params["pbtilde"]
+ syns.append(syn)
+
+ return syns
+
+
+@synapse_model
+def tmgsyn(syn_params, xs, secs):
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.tmgsyn(x, sec=sec)
+ syn.e = syn_params["erev"]
+ syn.tau_1 = syn_params["tau_1"]
+ syn.tau_rec = syn_params["tau_rec"]
+ syn.tau_facil = syn_params["tau_facil"]
+ syn.U = syn_params["U"]
+ syn.u0 = syn_params["u0"]
+ syns.append(syn)
+
+ return syns
+
+
+@synapse_model
+def expsyn(syn_params, x, sec):
+ """Create a list of expsyn synapses
+
+ :param syn_params: parameters of a synapse (dict)
+ :param x: normalized distance along the section (float)
+ :param sec: target section (hoc object)
+ :return: synapse objects
+ """
+ syn = h.ExpSyn(x, sec=sec)
+ syn.e = syn_params['erev']
+ syn.tau = syn_params["tau1"]
+ return syn
+
+
+@synapse_model
+def exp1syn(syn_params, xs, secs):
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.exp1syn(x, sec=sec)
+ syn.e = syn_params['erev']
+ syn.tau = syn_params["tau_1"]
+ syns.append(syn)
+ return syns
+
+
+@synapse_model
+def exp1isyn(syn_params, xs, secs):
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.exp1isyn(x, sec=sec)
+ syn.e = syn_params['erev']
+ syn.tau = syn_params["tau_1"]
+ syns.append(syn)
+ return syns
+
+
+add_synapse_model(Exp2Syn, 'exp2syn', overwrite=False)
+add_synapse_model(Exp2Syn, overwrite=False)
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_setters/synapse_models.pyc b/bmtk-vb/bmtk/simulator/bionet/default_setters/synapse_models.pyc
new file mode 100644
index 0000000..7e3c6a0
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/default_setters/synapse_models.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_setters/synaptic_weights.py b/bmtk-vb/bmtk/simulator/bionet/default_setters/synaptic_weights.py
new file mode 100644
index 0000000..0f0973d
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/default_setters/synaptic_weights.py
@@ -0,0 +1,51 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import math
+
+from bmtk.simulator.bionet.pyfunction_cache import add_weight_function
+
+
+def default_weight_fnc(edge_props, src_props, trg_props):
+ return edge_props['syn_weight']
+
+
+def wmax(edge_props, src_props, trg_props):
+ return edge_props["syn_weight"]
+
+
+def gaussianLL(edge_props, src_props, trg_props):
+ src_tuning = src_props['tuning_angle']
+ tar_tuning = trg_props['tuning_angle']
+
+ w0 = edge_props["syn_weight"]
+ sigma = edge_props["weight_sigma"]
+
+ delta_tuning = abs(abs(abs(180.0 - abs(float(tar_tuning) - float(src_tuning)) % 360.0) - 90.0) - 90.0)
+ weight = w0 * math.exp(-(delta_tuning / sigma) ** 2)
+
+ return weight
+
+
+add_weight_function(wmax, 'wmax', overwrite=False)
+add_weight_function(gaussianLL, 'gaussianLL', overwrite=False)
+add_weight_function(default_weight_fnc, 'default_weight_fnc', overwrite=False)
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_setters/synaptic_weights.pyc b/bmtk-vb/bmtk/simulator/bionet/default_setters/synaptic_weights.pyc
new file mode 100644
index 0000000..ee8a4ff
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/default_setters/synaptic_weights.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_templates/BioAxonStub.hoc b/bmtk-vb/bmtk/simulator/bionet/default_templates/BioAxonStub.hoc
new file mode 100644
index 0000000..df8660d
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/default_templates/BioAxonStub.hoc
@@ -0,0 +1,61 @@
+begintemplate BioAxonStub
+
+public init
+public soma, dend, apic, axon
+public all, somatic, basal, apical, axonal
+
+objref all, somatic, basal, apical, axonal
+objref this
+
+create soma[1]
+create dend[1]
+create apic[1]
+create axon[1]
+
+
+proc init() {localobj nl, import
+ all = new SectionList()
+ somatic = new SectionList()
+ basal = new SectionList()
+ apical = new SectionList()
+ axonal = new SectionList()
+ forall delete_section()
+
+// nl = new Import3d_Neurolucida3()
+ nl = new Import3d_SWC_read()
+ nl.quiet = 1
+ nl.input($s1)
+ import = new Import3d_GUI(nl, 0)
+// import.quite = 1
+ import.instantiate(this)
+
+ simplify_axon()
+}
+
+proc simplify_axon() {
+
+ forsec axonal { delete_section() }
+ create axon[2]
+
+ axon[0] {
+ L = 30
+ diam = 1
+ nseg = 1+2*int(L/40)
+ all.append()
+ axonal.append()
+ }
+ axon[1] {
+ L = 30
+ diam = 1
+ nseg = 1+2*int(L/40)
+ all.append()
+ axonal.append()
+ }
+ connect axon(0), soma(0.5)
+ connect axon[1](0), axon[0](1)
+ define_shape()
+
+
+}
+
+endtemplate BioAxonStub
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_templates/Biophys1.hoc b/bmtk-vb/bmtk/simulator/bionet/default_templates/Biophys1.hoc
new file mode 100644
index 0000000..e25192a
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/default_templates/Biophys1.hoc
@@ -0,0 +1,32 @@
+begintemplate Biophys1
+
+public init
+public soma, dend, apic, axon
+public all, somatic, basal, apical, axonal
+
+objref all, somatic, basal, apical, axonal
+objref this
+
+create soma[1]
+create dend[1]
+create apic[1]
+create axon[1]
+
+
+proc init() {localobj nl, import
+ all = new SectionList()
+ somatic = new SectionList()
+ basal = new SectionList()
+ apical = new SectionList()
+ axonal = new SectionList()
+ forall delete_section()
+
+ nl = new Import3d_SWC_read()
+ nl.quiet = 1
+ nl.input($s1)
+ import = new Import3d_GUI(nl, 0)
+ import.instantiate(this)
+
+}
+
+endtemplate Biophys1
diff --git a/bmtk-vb/bmtk/simulator/bionet/default_templates/advance.hoc b/bmtk-vb/bmtk/simulator/bionet/default_templates/advance.hoc
new file mode 100644
index 0000000..f4ed0b8
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/default_templates/advance.hoc
@@ -0,0 +1,10 @@
+// custom proc advance()
+
+objref pysim // defined in the Simulation as h.pysim = self
+
+pysim = new PythonObject()
+
+proc advance() {
+ fadvance()
+ pysim.post_fadvance() // run Simulation.post_fadvance() function after each fadvance call
+}
diff --git a/bmtk-vb/bmtk/simulator/bionet/gids.pyc b/bmtk-vb/bmtk/simulator/bionet/gids.pyc
new file mode 100644
index 0000000..89dd0fa
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/gids.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/iclamp.py b/bmtk-vb/bmtk/simulator/bionet/iclamp.py
new file mode 100644
index 0000000..fe823ef
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/iclamp.py
@@ -0,0 +1,38 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from neuron import h
+
+
+class IClamp(object):
+ def __init__(self, amplitude, delay, duration):
+ self._iclamp_amp = amplitude
+ self._iclamp_del = delay
+ self._iclamp_dur = duration
+ self._stim = None
+
+ def attach_current(self, cell):
+ self._stim = h.IClamp(cell.hobj.soma[0](0.5))
+ self._stim.delay = self._iclamp_del
+ self._stim.dur = self._iclamp_dur
+ self._stim.amp = self._iclamp_amp
+ return self._stim
diff --git a/bmtk-vb/bmtk/simulator/bionet/iclamp.pyc b/bmtk-vb/bmtk/simulator/bionet/iclamp.pyc
new file mode 100644
index 0000000..312c976
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/iclamp.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/import3d.hoc b/bmtk-vb/bmtk/simulator/bionet/import3d.hoc
new file mode 100644
index 0000000..3bcad33
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/import3d.hoc
@@ -0,0 +1,12 @@
+{xopen("import3d/import3d_sec.hoc")}
+{xopen("import3d/read_swc.hoc")}
+{xopen("import3d/read_nlcda.hoc")}
+{xopen("import3d/read_nlcda3.hoc")}
+{xopen("import3d/read_nts.hoc")}
+{xopen("import3d/read_morphml.hoc")}
+{xopen("import3d/import3d_gui.hoc")}
+objref tobj, nil
+proc makeimport3dtool() {
+ tobj = new Import3d_GUI(nil)
+ tobj = nil
+}
diff --git a/bmtk-vb/bmtk/simulator/bionet/import3d/import3d_gui.hoc b/bmtk-vb/bmtk/simulator/bionet/import3d/import3d_gui.hoc
new file mode 100644
index 0000000..81d6935
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/import3d/import3d_gui.hoc
@@ -0,0 +1,1174 @@
+{load_file("celbild.hoc")}
+{load_file("stdlib.hoc")}
+
+begintemplate Import3d_GUI
+public swc, diam_glyph, box, plrot, readfile, redraw, name
+public origin, rotmatold, raworigin, g, rotraw, instantiate
+external hoc_sf_
+objref swc, g, box, this, rotmat, m2, origin, tobj, rotmatold
+objref raworigin, rotsav, viewsec, rawsel, deck
+objref file, nil, problist, types, editbox
+strdef tstr, tstr1, typelabel_, filename
+public quiet
+
+
+proc init() {
+
+ quiet = 0
+
+ if (numarg() == 2) if ($2 == 0) {
+ swc = $o1
+ return
+ }
+ if ($o1 == nil) {
+ file = new File()
+ filename = "choose a file "
+ }else{
+ file = $o1.file
+ hoc_sf_.head(file.getname(), "[^/]*$", tstr)
+ file.chooser("r", "Import 3-D Reconstruction File", "*", "Read", "Cancel", tstr)
+ filename =file.getname()
+ }
+ ztrans_ = 0
+ dummy_ = 0
+ undo_type_ = 0
+ show_point_ = 1
+ show_diam_ = 1
+ if ($o1 == nil) { build() map() return }
+ init1($o1)
+ build()
+ map()
+ init2()
+
+}
+
+proc map() {
+ sprint(tstr, "%s", this)
+ if (numarg() == 0) {
+ box.map(tstr)
+ }else{
+ box.map(tstr, $2, $3, $4, $5)
+ }
+}
+
+proc init1() {
+ i=0 j=0
+ swc = $o1
+ selpoint_ = -1
+ selid_ = swc.pt2id(selpoint_)
+ viewsec = new List()
+ showtype(-10000)
+ rotated_ = 0
+ rotmat = new Matrix(3,3)
+ rotmatold = rotmat.c.ident
+ rotsav = rotmat.c.ident
+ origin = new Vector(3)
+ raworigin = new Vector(3)
+ rawsel = new Vector(3)
+ m2 = new Matrix(3,3)
+}
+proc init2() {
+ rot(0,0)
+ pl()
+ g.exec_menu("View = plot")
+ g.exec_menu("Zoom")
+}
+
+proc build() {local i
+ box = new HBox(3)
+ box.full_request(1)
+ box.save("")
+ box.ref(this)
+ box.intercept(1)
+ box.adjuster(400)
+ g = new Graph(0)
+ g.view(2)
+ g.xaxis(3)
+ deck = new Deck(3)
+ build_panel()
+ deck.map
+ box.intercept(0)
+}
+
+proc build_panel() {local i
+ deck.intercept(1)
+ xpanel("")
+ xcheckbox(filename, &readfile_, "readfile()")
+ if (swc == nil) {
+ xlabel(" accepted file formats:")
+ xlabel(" SWC")
+ xlabel(" Neurolucida (v1 and v3)")
+ xlabel(" Eutectic")
+ if (nrnpython("")) xlabel(" MorphML")
+ for i = 0, 15 { xlabel("") }
+ xpanel(0)
+ deck.intercept(0)
+ deck.flip_to(0)
+ return
+ }
+ sprint(tstr, "File format: %s", swc.filetype)
+ xlabel(tstr)
+ xlabel("-------------------------------")
+ g.menu_remove("Zoom")
+ g.menu_tool("Zoom", "zoom")
+ g.menu_remove("Translate ")
+ g.menu_tool("Translate ", "translate")
+ g.menu_remove("Rotate")
+ g.menu_tool("Rotate (about axis in plane)", "rotate")
+ xcheckbox("Rotate 45deg about y axis", &dummy_, "rot45()")
+ xcheckbox("Rotated (vs Raw view)", &rotated_, "rotraw()")
+ xcheckbox("Show Points", &show_point_, "pl()")
+ xcheckbox("Show Diam", &show_diam_, "pl()")
+ xvarlabel(typelabel_)
+ xmenu("View type")
+ xradiobutton("All", "showtype(-10000) pl()", 1)
+ xradiobutton("Section containing selected point", "showsec() pl()")
+ xradiobutton("Distal (tree) from selected point", "showdistal() pl()")
+ xradiobutton("Proximal (path to root) from selected point", "showprox() pl()")
+ xradiobutton("Root sections", "showroot() pl()")
+ if (swc.type.min != swc.type.max) {
+ for i = swc.type.min, swc.type.max {
+ if (swc.type.indwhere("==", i) != -1) {
+ sprint(tstr, "type %d", i)
+ sprint(tstr1, "showtype(%d) pl()", i)
+ xradiobutton(tstr, tstr1)
+ }
+ }
+ }
+ xmenu()
+ g.menu_remove("Select point")
+ g.menu_tool("Select point", "selpoint", "selpoint1(1)")
+ if (strcmp(swc.filetype, "Neurolucida") == 0) {
+ xpvalue("Line#", &selid_, 1, "selid(1)")
+ if (swc.err) {
+ xbutton("Problem points", "probpointpanel()")
+ }
+ }else if (strcmp(swc.filetype, "Neurolucida V3") == 0) {
+ xpvalue("Line#", &selid_, 1, "selid(1)")
+ }else{
+ xpvalue("Select id", &selid_, 1, "selid(1)")
+ }
+ xlabel("-------------------------------")
+ xbutton("Edit", "map_edit()")
+ xmenu("Export")
+ xbutton("CellBuilder", "cbexport()")
+ xbutton("Instantiate", "instantiate(nil)")
+ xmenu()
+ sprint(tstr, "%s filter facts", swc.filetype)
+ xbutton(tstr, "swc.helptxt()")
+ xpanel(0)
+ deck.intercept(0)
+ deck.flip_to(0)
+}
+
+
+proc map_edit() {
+ if (editbox == nil) {
+ build_edit()
+ }
+ if (editbox.ismapped) { return }
+ sprint(tstr, "Edit %s", this)
+ editbox.map(tstr)
+}
+proc build_edit() {
+ editbox = new VBox()
+ editbox.intercept(1)
+ editbox.save("")
+ xpanel("")
+ ztransitem()
+ xlabel("Select point:")
+ xcheckbox("Largest z change", &dummy_, "sel_largest_dz()")
+ xlabel("then action:")
+ xcheckbox("z-translate rest of tree to parent point", &dummy_, "edit2()")
+ xcheckbox("z-translate to average of adjacent points", &dummy_, "edit1()")
+ xcheckbox("undo last", &dummy_, "edit0()")
+ xlabel("-------------------")
+ xcheckbox("3 point filter of all z values (no undo)", &dummy_, "edit3()")
+ xpanel()
+ editbox.intercept(0)
+}
+
+proc sel_largest_dz() {local i, j, dz, dzmax, imax, jmax localobj sec, tobj
+ dummy_ = 0
+ dzmax = -1
+ for i = 0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ tobj = sec.raw.getrow(2).deriv(1,1).abs
+ j = tobj.max_ind
+ dz = tobj.x[j]
+ if (dz > dzmax) {
+ jmax = j+1
+ imax = i
+ dzmax = dz
+ }
+ }
+ if (dzmax > 0) {
+ selpoint_ = swc.sec2pt(imax, jmax)
+ selpoint_dependent_show()
+ swc.sections.object(imax).raw.getcol(jmax, rawsel)
+ selid_ = swc.pt2id(selpoint_)
+ pl()
+ }
+}
+
+proc ztransitem() {local i, n localobj raw
+ n = 0
+ for i = 0, swc.sections.count-1 {
+ raw = swc.sections.object(i).raw
+ if (abs(raw.x[2][0] - raw.x[2][1]) > 10) {
+ n += 1
+ }
+ }
+ if (n > 0) {
+ sprint(tstr, "z translation for %d abrupt branch backlash", n)
+ xcheckbox(tstr, &ztrans_, "ztrans()")
+ }
+}
+
+proc ztrans() { local i, zd, pn localobj sec
+ if (ztrans_) {
+ for i = 0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (object_id(sec.parentsec) == 0) { continue }
+ if (object_id(sec.parentsec.parentsec) == 0) { continue }
+ zd = sec.raw.x[2][1] - sec.raw.x[2][0]
+ if (abs(zd) > 5) {
+ zd += sec.parentsec.ztrans
+ }else{
+ zd = sec.parentsec.ztrans
+ }
+ sec.ztrans = zd
+ sec.raw.setrow(2, sec.raw.getrow(2).sub(sec.ztrans))
+ pn = sec.parentsec.raw.ncol
+ sec.raw.x[2][0] = sec.parentsec.raw.x[2][pn-1]
+ }
+ }else{
+ for i = 0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (sec.ztrans) {
+sec.raw.setrow(2, sec.raw.getrow(2).add(sec.ztrans))
+ pn = sec.parentsec.raw.ncol
+ sec.raw.x[2][0] = sec.parentsec.raw.x[2][pn-1]
+ sec.ztrans = 0
+ }
+ }
+ }
+ redraw()
+}
+
+proc edit0() {local i, n localobj sec
+ dummy_ = 0
+ if (undo_type_ == 1) {
+ i = swc.pt2sec(undo_selpoint_, sec)
+ sec.raw.x[2][i] = undo_z_
+ sec.raw.getcol(i, rawsel)
+ }else if (undo_type_ == 2) {
+ i = swc.pt2sec(undo_selpoint_, sec)
+ n = sec.raw.ncol
+ for i=i, n-1 {
+ sec.raw.x[2][i] += undo_z_
+ }
+ sec.raw.getcol(i, rawsel)
+ for i=0, swc.sections.count-1 { swc.sections.object(i).volatile = 0 }
+ sec.volatile = 1
+ for i=0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (object_id(sec.parentsec)) if (sec.parentsec.volatile) {
+ sec.volatile = 1
+ sec.raw.setrow(2, sec.raw.getrow(2).add(undo_z_))
+ }
+ }
+ }
+ undo_type_ = 0
+ redraw()
+}
+
+proc edit1() {local i, z1, z2 localobj sec
+ // z translate to average of adjacent points
+ dummy_ = 0
+ if (selpoint_ >= 0) {
+ i = swc.pt2sec(selpoint_, sec)
+ if (i > 0) {
+ z1 = sec.raw.x[2][i-1]
+ }else{
+ return
+ }
+ if (i < sec.raw.ncol-1) {
+ z2 = sec.raw.x[2][i+1]
+ }else{
+ return
+ }
+ undo_selpoint_ = selpoint_
+ undo_type_ = 1
+ undo_z_ = sec.raw.x[2][i]
+ sec.raw.x[2][i] = (z1 + z2)/2
+ sec.raw.getcol(i, rawsel)
+ }
+ redraw()
+}
+
+proc edit2() {local i, ip, z1, n localobj sec
+ // z-translate rest of tree to parent point
+ dummy_ = 0
+ if (selpoint_ >= 0) {
+ ip = swc.pt2sec(selpoint_, sec)
+ if (ip > 0) {
+ z1 = sec.raw.x[2][ip] - sec.raw.x[2][ip-1]
+ }else{
+ return
+ }
+ undo_selpoint_ = selpoint_
+ undo_type_ = 2
+ undo_z_ = z1
+ n = sec.raw.ncol
+ for i=ip, n-1 {
+ sec.raw.x[2][i] -= z1
+ }
+ sec.raw.getcol(ip, rawsel)
+ for i=0, swc.sections.count-1 { swc.sections.object(i).volatile = 0 }
+ sec.volatile = 1
+ for i=0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (object_id(sec.parentsec)) if (sec.parentsec.volatile) {
+ sec.volatile = 1
+ sec.raw.setrow(2, sec.raw.getrow(2).sub(z1))
+ }
+ }
+ }
+ redraw()
+}
+
+proc edit3() {local i localobj sec
+ dummy_ = 0
+ for i=0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ sec.raw.setrow(2, sec.raw.getrow(2).medfltr)
+ }
+ if (selpoint_ >= 0) {
+ i = swc.pt2sec(selpoint_, sec)
+ sec.raw.getcol(i, rawsel)
+ }
+ redraw()
+}
+
+proc probpointpanel() {
+ problist = new List()
+ problist.browser("Problem points", "s")
+ problist.select_action("probpoint(hoc_ac_)")
+ swc.fillproblist(problist)
+ problist.select(-1)
+}
+
+proc probpoint() {local i
+ if ($1 < 0) {return}
+ sscanf(problist.object($1).s, "%d:", &i)
+ selid_ = i
+ selid(0)
+}
+
+proc readfile() {
+ readfile_ = 0
+ if (numarg() == 0) {
+ file.chooser("r", "Import 3-D Reconstruction File", "*", "Read", "Cancel")
+ if (file.chooser()) {
+ if (!some_format()) {
+ return
+ }
+ }else{
+ return
+ }
+ }else{
+ file = new File($s1)
+ if (!some_format()) {
+ return
+ }
+ }
+ // if new file
+ problist = nil
+ deck.flip_to(-1)
+ build_panel()
+ deck.move_last(0)
+ deck.flip_to(0)
+ init1(swc)
+ init2()
+ doNotify()
+ if (swc.err) {
+ printf("\n")
+ sprint(tstr, "%s: File translation problems. See the messages on the terminal", file.getname)
+ continue_dialog(tstr)
+ if (strcmp(swc.filetype, "Neurolucida V3") == 0) {
+ swc.b2spanel(this)
+ }
+ }
+ deck.remove_last()
+}
+
+func some_format() {local i, a,b,c,d,e,f,g, n
+ if (!file.ropen()) {
+ sprint(tstr, "Can't read %s", file.getname)
+ continue_dialog(tstr)
+ return 0
+ }
+ while (1) {
+ if (file.eof) {
+ file.close
+ sprint(tstr, "Can't figure out file format for %s", file.getname)
+ continue_dialog(tstr)
+ return 0
+ }
+ file.gets(tstr)
+ if (hoc_sf_.head(tstr, "^\\<\\?xml", tstr1) != -1) {
+ if (nrnpython("")) {
+ swc = new Import3d_MorphML() break
+ }else{
+ file.close
+ sprint(tstr, "Can't read MorphML: Python not available.")
+ continue_dialog(tstr)
+ return 0
+ }
+ }
+ n = sscanf(tstr, "%f %f %f %f %f %f %f", &a, &b, &c, &d, &e, &f, &g)
+ if (n == 7) { swc = new Import3d_SWC_read() break }
+ n = sscanf(tstr, "[%d,%d] (%f,%f,%f) %f", &a, &b, &c, &d, &e, &f)
+ if (n == 6) { swc = new Import3d_Neurolucida_read() break }
+ n = sscanf(tstr, "%d %s %d %f %f %f %f", &a, tstr, &b, &c, &d, &e, &f)
+ if (n == 7) { swc = new Import3d_Eutectic_read() break }
+ if (hoc_sf_.tail(tstr, "^[ \t]*", tstr1) != -1) {
+ //unfortunately regexp does not allow an explicit "("
+ hoc_sf_.left(tstr1, 1)
+ if (strcmp(tstr1, "(") == 0) {
+ swc = new Import3d_Neurolucida3() break
+ }
+ }
+ if (hoc_sf_.head(tstr, "^;[ \t]*V3", tstr1) != -1) {
+ swc = new Import3d_Neurolucida3() break
+ }
+ }
+ file.close
+ filename = file.getname
+ swc.input(filename)
+ return 1
+}
+
+proc pl_point() { local i, j, i1 localobj m, m0
+ if (viewsec.count) {m0 = swc.sections.object(0).xyz}
+ for i=0, viewsec.count-1 {
+ viewsec.object(i).pl_point(g)
+ }
+}
+
+proc pl_centroid() {local i
+ for i=0, swc.sections.count-1 {
+ swc.sections.object(i).pl_centroid(g)
+ }
+}
+proc pl_diam() {local i localobj sec
+ for i=0, viewsec.count-1 {
+ viewsec.object(i).pl_diam(g)
+ }
+}
+proc pl() { localobj tobj
+ g.erase_all
+ if (show_diam_) {pl_diam()}
+ pl_centroid()
+ if (show_point_) {pl_point()}
+ if (selpoint_ >= 0) {
+ tobj = m2.mulv(rawsel)
+ g.mark(tobj.x[0], tobj.x[1], "O", 12, 2, 1)
+ swc.label(selpoint_, tstr)
+ g.label(.1, .05, tstr, 2, 1, 0, 0, 1)
+ }
+}
+
+proc redraw() { local i localobj sec
+ if (selpoint_ >= 0) {
+ i = swc.pt2sec(selpoint_, sec)
+ sec.raw.getcol(i, rawsel)
+ }
+ showtype(viewtype_)
+ rot(0,0)
+ pl()
+}
+
+proc showtype() {
+ viewtype_ = $1
+ viewsec.remove_all
+ if ($1 == -10000) {
+ typelabel_ = "View all types"
+ for i=0, swc.sections.count - 1 {
+ viewsec.append(swc.sections.object(i))
+ swc.sections.object(i).centroid_color = 2
+ }
+ }else{
+ sprint(typelabel_, "View type %d", viewtype_)
+ for i=0, swc.sections.count - 1 {
+ if (swc.sections.object(i).type == viewtype_) {
+ viewsec.append(swc.sections.object(i))
+ swc.sections.object(i).centroid_color = 2
+ }else{
+ swc.sections.object(i).centroid_color = 9
+ }
+ }
+ }
+}
+
+proc selpoint_dependent_show() {
+ if (viewtype_ == -20000) {
+ showdistal()
+ }else if (viewtype_ == -30000) {
+ showprox()
+ }else if (viewtype_ == -40000) {
+ showsec()
+ }else if (viewtype_ == -50000) {
+ showroot()
+ }
+}
+
+proc showdistal() {local i localobj sec
+ viewtype_ = -20000
+ typelabel_ = "Show distal (tree) from selected point"
+ viewsec.remove_all
+ for i=0, swc.sections.count - 1 {
+ swc.sections.object(i).centroid_color = 9
+ }
+ if (selpoint_ < 0) { return }
+ swc.pt2sec(selpoint_, sec)
+ // recursion is trivial but I want to avoid the depth so use the
+ // fact that children are after the parent in the sections list
+ sec.centroid_color = 2
+ viewsec.append(sec)
+ for i=0, swc.sections.count - 1 {
+ if (swc.sections.object(i).centroid_color == 2) {
+ break
+ }
+ }
+ for i=i+1, swc.sections.count - 1 {
+ sec = swc.sections.object(i)
+ if (sec.parentsec != nil) if (sec.parentsec.centroid_color == 2) {
+ sec.centroid_color = 2
+ viewsec.append(sec)
+ }
+ }
+}
+
+proc showprox() {localobj sec
+ viewtype_ = -30000
+ typelabel_ = "Show proximal (path to root) from selected point"
+ viewsec.remove_all
+ for i=0, swc.sections.count - 1 {
+ swc.sections.object(i).centroid_color = 9
+ }
+ if (selpoint_ < 0) { return }
+ for (swc.pt2sec(selpoint_, sec); sec != nil; sec = sec.parentsec) {
+ viewsec.append(sec)
+ sec.centroid_color = 2
+ }
+}
+
+proc showsec() {localobj sec
+ viewtype_ = -40000
+ typelabel_ = "Show section containing selected point"
+ viewsec.remove_all
+ for i=0, swc.sections.count - 1 {
+ swc.sections.object(i).centroid_color = 9
+ }
+ if (selpoint_ < 0) { return }
+ swc.pt2sec(selpoint_, sec)
+ if (sec != nil) {
+ viewsec.append(sec)
+ sec.centroid_color = 2
+ }
+}
+
+proc showroot() {localobj sec
+ viewtype_ = -50000
+ typelabel_ = "Show root sections"
+ viewsec.remove_all
+ for i=0, swc.sections.count - 1 {
+ sec = swc.sections.object(i)
+ sec.centroid_color = 9
+ if (sec.parentsec == nil) {
+ sec.centroid_color = 2
+ viewsec.append(sec)
+ }
+ }
+}
+
+proc selpoint1() { // deselection not supported by menu_tool
+ if ($1 == 0) {
+ selpoint_ = -1
+ }
+}
+proc selpoint() {local i, j
+ if ($1 == 2) {
+ nearest_point($2, $3, &i, &j)
+ selpoint_ = swc.sec2pt(i, j)
+ selpoint_dependent_show()
+ swc.sections.object(i).raw.getcol(j, rawsel)
+ selid_ = swc.pt2id(selpoint_)
+ pl()
+ }
+}
+
+proc selid() {local i, j localobj sec
+ selpoint_ = swc.id2pt(selid_)
+ selid_ = swc.pt2id(selpoint_)
+ if (selpoint_ >= 0) {
+ i = swc.pt2sec(selpoint_, sec)
+ sec.raw.getcol(i, rawsel)
+ }
+ selpoint_dependent_show()
+ pl()
+ if ($1 == 1) {
+ swc.label(selpoint_, tstr)
+ print tstr
+ }
+}
+
+proc zoom() {local x1,y1,scale,w,h,x0,y0
+ if ($1 == 2) {
+ i = g.view_info()
+ x = $2
+ y = $3
+ xrel=g.view_info(i, 11, $2)
+ yrel=g.view_info(i, 12, $3)
+ width=g.view_info(i,1)
+ height=g.view_info(i,2)
+ }
+ if ($1 == 1) {
+ x1 = g.view_info(i, 11, $2)
+ y1 = g.view_info(i, 12, $3)
+ y1 = (y1 - yrel) + (x1 - xrel)
+ if(y1 > 2) { y1 = 2 } else if (y1 < -2) { y1 = -2 }
+ scale = 10^(y1)
+ w = width/scale
+ h = height/scale
+ x0 = x - w*xrel
+ y0 = y - h*yrel
+ g.view_size(i, x0, x0+w, y0, y0+h)
+ }
+}
+
+proc translate() {local x0,y0
+ if ($1 == 2) {
+ i = g.view_info()
+ x = g.view_info(i, 5)
+ y = g.view_info(i, 7)
+ xrel=g.view_info(i, 11, $2)
+ yrel=g.view_info(i, 12, $3)
+ width=g.view_info(i,1)
+ height=g.view_info(i,2)
+ }
+ if ($1 == 1) {
+ x1 = g.view_info(i, 11, $2)
+ y1 = g.view_info(i, 12, $3)
+ x0 = x - width*(x1 - xrel)
+ y0 = y - height*(y1 - yrel)
+ g.view_size(i, x0, x0 + width, y0, y0 + height)
+ }
+}
+
+func nearest_point() { local i, j, xmin localobj m, v1
+ // return section index and sectionpoint index in $3 and $4
+ xmin = 1e9
+ for i=0, swc.sections.count-1 {
+ m = swc.sections.object(i).xyz
+ v1 = m.getrow(0).sub($1).pow(2).add(m.getrow(1).sub($2).pow(2))
+ j = v1.min_ind
+ if (v1.x[j] < xmin) {
+ xmin = v1.x[j]
+ $&3 = i
+ $&4 = j
+ }
+ }
+ return xmin
+}
+
+proc rotate() {local x, y, x0, y0, len, a
+ if ($1 == 2) {
+ rotated_ = 1
+ nearest_point($2, $3, &i, &j)
+ swc.sections.object(i).xyz.getcol(j, origin)
+ swc.sections.object(i).raw.getcol(j, raworigin)
+//print i, j origin.printf
+ i = g.view_info()
+ xpix = g.view_info(i,13, $2)
+ ypix = g.view_info(i, 14, $3) // from top
+ left = g.view_info(i, 5)
+ bottom = g.view_info(i, 7)
+ width=g.view_info(i,1)
+ height=g.view_info(i,2)
+ }else{
+ x = g.view_info(i,13, $2) - xpix
+ y = ypix - g.view_info(i, 14, $3)
+ // rotation axis is normal to the line, rotation magnitude
+ // proportional to length of line
+ len = sqrt(x*x + y*y)
+ // rotation axis angle
+ if (len > 0) {
+ a = atan2(x, y)
+ b = len/50
+ }else{
+ a = 0
+ b = 0
+ }
+ rot(a, b)
+ pl()
+ tobj = rotmat.mulv(origin)
+ //tobj.x[0] should be at same place as origin.x[0]
+ x0 = left - origin.x[0] + tobj.x[0]
+ y0 = bottom - origin.x[1] + tobj.x[1]
+ g.view_size(i, x0, x0 + width, y0, y0 + height)
+
+ }
+ if ($1 == 3) {
+ m2.c(rotmatold)
+//rotmatold.printf
+ }
+}
+
+proc rotraw() {local x0, y0
+ width = g.view_info(0, 1)
+ height = g.view_info(0, 2)
+ left = g.view_info(0,5)
+ bottom = g.view_info(0,7)
+ if (rotated_ == 0) { //turn off
+ rotmatold.c(rotsav)
+ tobj = rotmatold.mulv(raworigin)
+ //tobj.x[0] should be at same place as origin.x[0]
+ x0 = left + raworigin.x[0] - tobj.x[0]
+ y0 = bottom + raworigin.x[1] - tobj.x[1]
+ rotmatold.ident
+ }else{ // back to previous rotation
+ rotsav.c(rotmatold)
+ tobj = rotmatold.mulv(raworigin)
+ //tobj.x[0] should be at same place as origin.x[0]
+ x0 = left - raworigin.x[0] + tobj.x[0]
+ y0 = bottom - raworigin.x[1] + tobj.x[1]
+ }
+ rot(0,0)
+ pl()
+ g.view_size(0, x0, x0 + width, y0, y0 + height)
+}
+
+proc rot45() {
+ rot(PI/2, PI/4)
+ rotated_=1
+ m2.c(rotmatold)
+ pl()
+ dummy_ = 0
+}
+
+proc rot() {local s, c, i localobj sec
+ s = sin($1) c = cos($1)
+ m2.zero
+ m2.x[2][2] = 1
+ m2.x[1][1] = m2.x[0][0] = c
+ m2.x[1][0] = -s
+ m2.x[0][1] = s
+//m2.printf
+ s = sin($2) c = cos($2)
+ rotmat.zero
+ rotmat.x[0][0] = 1
+ rotmat.x[1][1] = rotmat.x[2][2] = c
+ rotmat.x[1][2] = s
+ rotmat.x[2][1] = -s
+//rotmat.printf
+
+ m2.mulm(rotmat).mulm(m2.transpose(m2), rotmat)
+ rotmat.mulm(rotmatold, m2)
+//rotmat.printf
+ for i=0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ sec.rotate(m2)
+ }
+}
+
+proc cbexport() {local i, j, k localobj sec, cell
+ chk_valid()
+ j = 0
+ for i=0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (sec.is_subsidiary) { continue }
+ if (sec.parentsec == nil) {
+ sec.volatile2 = j
+ j += 1
+ }else{
+ sec.volatile2 = sec.parentsec.volatile2
+ }
+ }
+ cell = new List()
+ for k=0, j-1 {
+ cell.remove_all()
+ for i=0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (sec.is_subsidiary) { continue }
+ if (sec.volatile2 == k) {
+ cell.append(sec)
+ }
+ }
+ cbexport1(cell)
+ }
+}
+
+proc sphere_rep() { local i localobj x, y, z, d
+ x = new Vector(3) y = x.c z = x.c d = x.c
+ x.fill($o1.x[0])
+ y.fill($o2.x[0])
+ z.fill($o3.x[0])
+ d.fill($o4.x[0])
+ x.x[0] -= $o4.x[0]/2
+ x.x[2] += $o4.x[0]/2
+ $o1 = x $o2 = y $o3 = z $o4 = d
+}
+
+proc cbexport1() {local i, j, k, min localobj cb, sec, psec, cbsec, slist, m, subsetindex, xx, yy, zz, dd
+ for i=0, $o1.count-1 {
+ sec = $o1.object(i)
+ sec.volatile = i
+ }
+ min = set_nameindex($o1)
+ cb = new CellBuild()
+ cb.topol.names_off = 1
+ cb.topol.circles_off = 1
+ slist = cb.topol.slist
+ slist.remove_all()
+ for i=0, $o1.count-1 {
+ sec = $o1.object(i)
+ psec = nil
+ if (sec.parentsec != nil) {
+ psec = slist.object(sec.parentsec.volatile)
+ }
+ type2name(sec.type, tstr)
+ cbsec = new CellBuildSection(tstr, sec.nameindex, 0, psec, sec.parentx)
+ slist.append(cbsec)
+ m = sec.raw
+ j = sec.first
+ xx = m.getrow(0).c(j)
+ yy = m.getrow(1).c(j)
+ zz = m.getrow(2).c(j)
+ dd = sec.d.c(j)
+ if (sec.iscontour_) {
+ contour2centroid(xx, yy, zz, dd, sec)
+ }
+ if (sec.parentsec == nil && dd.size == 1) {
+ // represent spherical soma as 3 point cylinder
+ // with L=diam
+ sphere_rep(xx, yy, zz, dd)
+ }
+ k = dd.size-1
+ cbsec.position(xx.x[0], yy.x[0], xx.x[k], yy.x[k])
+ cbsec.i3d = k+1
+ cbsec.p3d = new P3D(k + 1)
+ cbsec.p3d.x = xx
+ cbsec.p3d.y = yy
+ cbsec.p3d.z = zz
+ cbsec.p3d.d = dd
+ if (sec.first == 1) {
+ cbsec.logstyle(m.x[0][0], m.x[1][0], m.x[2][0])
+ }
+ cb.all.add(cbsec)
+ }
+ cb.topol.consist()
+ cb.topol.update()
+ cb.subsets.update()
+ subsetindex = types.c.fill(0)
+ k = 0
+ for i=0, types.size-1 {
+ if (types.x[i] > 0) {
+ k += 1 // after all
+ subsetindex.x[i] = k
+ j = i + min
+ if (j == 1) {
+ tstr = "somatic"
+ }else if (j == 2) {
+ tstr = "axonal"
+ }else if (j == 3) {
+ tstr = "basal"
+ }else if (j == 4) {
+ tstr = "apical"
+ }else if (j < 0) {
+ sprint(tstr, "minus_%dset", -j)
+ }else{
+ sprint(tstr, "dendritic_%d", j)
+ }
+ m = new SNList(tstr)
+ cb.subsets.snlist.append(m)
+ }
+ }
+ for i=0, slist.count-1 {
+ sec = $o1.object(i)
+ cbsec = slist.object(i)
+ cb.subsets.snlist.object(subsetindex.x[sec.type-min]).add(cbsec)
+ }
+ //cb.page(2) //unfortunately not able to blacken the radiobutton
+}
+
+func set_nameindex() {local i, min localobj sec
+ min = swc.type.min
+ types = new Vector(swc.type.max - min + 1)
+ for i = 0, $o1.count-1 {
+ sec = $o1.object(i)
+ if (sec.is_subsidiary) { continue }
+ sec.nameindex = types.x[sec.type - min]
+ types.x[sec.type-min] += 1
+ }
+ return min
+}
+
+proc instantiate() {local i, j, min, haspy localobj sec, xx, yy, zz, dd, pyobj
+ chk_valid()
+ haspy = nrnpython("import neuron")
+ if (haspy) {
+ pyobj = new PythonObject()
+ }
+ min = set_nameindex(swc.sections)
+ // create
+ for i = 0, types.size-1 {
+ type2name(i+min, tstr)
+ if (types.x[i] == 1) {
+ sprint(tstr1, "~create %s[1]\n", tstr)
+ execute(tstr1, $o1)
+ }else if (types.x[i] > 1) {
+ sprint(tstr1, "~create %s[%d]\n", tstr, types.x[i])
+ execute(tstr1, $o1)
+ }
+ if ($o1 != nil) { mksubset($o1, i+min, tstr) }
+ }
+ if ($o1 != nil) {execute("forall all.append", $o1) }
+ // connect
+ for i = 0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (sec.is_subsidiary) { continue }
+ name(sec, tstr)
+ if (i == 0) {
+ sprint(tstr1, "access %s", tstr)
+ if ($o1 == nil) {
+ execute(tstr1, $o1)
+ }
+ }
+ if (sec.parentsec != nil) {
+ name(sec.parentsec, tstr1)
+ sprint(tstr1, "%s connect %s(0), %g", tstr1, tstr, sec.parentx)
+ execute(tstr1, $o1)
+ }
+ // 3-d point info
+ if (sec.first == 1) {
+ sprint(tstr1, "%s { pt3dstyle(1, %g, %g, %g) }", tstr, sec.raw.x[0][0], sec.raw.x[1][0], sec.raw.x[2][0])
+ execute(tstr1, $o1)
+ }
+ j = sec.first
+ xx = sec.raw.getrow(0).c(j)
+ yy = sec.raw.getrow(1).c(j)
+ zz = sec.raw.getrow(2).c(j)
+ dd = sec.d.c(j)
+ if (sec.iscontour_) {
+ if (haspy) {
+ pyobj.neuron._declare_contour(sec, tstr)
+ }
+ contour2centroid(xx, yy, zz, dd, sec)
+ }
+ if (dd.size == 1) { sphere_rep(xx, yy, zz, dd) }
+ for j = 0, dd.size-1 {
+ sprint(tstr1, "%s { pt3dadd(%g, %g, %g, %g) }",\
+ tstr,xx.x[j], yy.x[j], zz.x[j], dd.x[j])
+ execute(tstr1, $o1)
+ }
+ }
+}
+
+proc chk_valid() {local i, x, replot localobj sec
+ replot = 0
+ // some validity checks added in response to experienced file errors
+ // sometimes we can work around them
+
+ // two point sections with 0 length, remove, unless root
+ for (i=swc.sections.count-1; i >= 0; i -= 1) {
+ sec = swc.sections.object(i)
+ if (sec.parentsec == nil) { continue }
+ if ((sec.raw.ncol - sec.first) <= 1) {
+ if (!quiet) {// addded by Sergey to suppress the warning output
+ printf("One point section %s ending at line %d has been removed\n", sec, swc.iline.x[swc.id2line(sec.id)])
+ }
+ rm0len(i, sec)
+ replot = 1
+ }else if ((sec.raw.ncol - sec.first) <= 2) {
+ if (sec.raw.getcol(sec.first).eq(sec.raw.getcol(sec.first + 1))) {
+ printf("Two point section ending at line %d with 0 length has been removed\n", swc.iline.x[swc.id2line(sec.id)])
+ rm0len(i, sec)
+ replot = 1
+ }
+ }
+ }
+ if (replot && g != nil) {
+ redraw()
+ }
+}
+
+proc rm0len() {local i localobj sec
+ swc.sections.remove($1)
+ for i=$1, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (sec.parentsec == $o2) {
+ sec.parentsec = $o2.parentsec
+ sec.parentx = $o2.parentx
+ if (!quiet) {// addded by Sergey to suppress the warning output
+ printf("\tand child %s reattached\n", sec)
+ }
+ }
+ }
+}
+
+proc mksubset() {
+ if ($2 == 1) {
+ tstr1 = "somatic"
+ }else if ($2 == 2) {
+ tstr1 = "axonal"
+ }else if ($2 == 3) {
+ tstr1 = "basal"
+ }else if ($2 == 4) {
+ tstr1 = "apical"
+ }else if ($2 < 0) {
+ sprint(tstr1, "minus_%dset", -$2)
+ }else{
+ sprint(tstr1, "dendritic_%d", $2)
+ }
+ sprint(tstr1, "forsec \"%s\" %s.append", $s3, tstr1)
+ execute(tstr1, $o1)
+}
+
+proc contour2centroid() {local i, j, imax, imin, ok localobj mean, pts, d, max, min, tobj, rad, rad2, side2, pt, major, m, minor
+ if (object_id($o5.contour_list)) {
+ contourstack2centroid($o1, $o2, $o3, $o4, $o5)
+ return
+ }
+ mean = swc.sections.object(0).contourcenter($o1, $o2, $o3)
+ if (g != nil) {
+ g.beginline(6,1)
+ for i=0, $o1.size-1 {
+ g.line($o1.x[i], $o2.x[i])
+ }
+ g.flush()
+ }
+ pts = new Matrix(3, $o1.size)
+ for i=1,3 { pts.setrow(i-1, $oi.c.sub(mean.x[i-1])) }
+ // find the major axis of the ellipsoid that best fits the shape
+ // assuming (falsely in general) that the center is the mean
+
+ m = new Matrix(3,3)
+ for i=0, 2 {
+ for j=i, 2 {
+ m.x[i][j] = pts.getrow(i).mul(pts.getrow(j)).sum
+ m.x[j][i] = m.x[i][j]
+ }
+ }
+ tobj = m.symmeig(m)
+ // major axis is the one with largest eigenvalue
+ major = m.getcol(tobj.max_ind)
+ // minor is normal and in xy plane
+ minor = m.getcol(3-tobj.min_ind-tobj.max_ind)
+ minor.x[2] = 0
+ minor.div(minor.mag)
+if (g != nil) {
+g.beginline(4, 3) g.line(mean.x[0], mean.x[1])
+g.line(mean.x[0] + 20*major.x[0], mean.x[1] + 20*major.x[1]) g.flush
+}
+ d = new Vector(pts.ncol)
+ rad = new Vector(pts.ncol)
+ for i=0, pts.ncol-1 {
+ pt = pts.getcol(i)
+ d.x[i] = pt.dot(major) // position on the line
+ tobj = major.c.mul(d.x[i])
+ rad.x[i] = pt.dot(minor)
+ }
+ imax = d.max_ind
+ d.rotate(-imax)
+ rad.rotate(-imax)
+ imin = d.min_ind
+ side2 = d.c(imin)
+ rad2 = rad.c(imin)
+ d.resize(imin).reverse
+ rad.resize(imin).reverse
+ // now we have the two sides without the min and max points (rad=0)
+ // we hope both sides now monotonically increase, i.e. convex
+ // make it convex
+ for (j = d.size-1; j > 0; j -= 1) {
+ if (d.x[j] <= d.x[j-1]) {
+//printf("removed d %d %g\n", j, d.x[j])
+ d.remove(j)
+ rad.remove(j)
+ if (j != d.size()) { j += 1 }
+ }
+ }
+ for (j = side2.size-1; j > 0; j -= 1) {
+ if (side2.x[j] <= side2.x[j-1]) {
+//printf("removed side2 %d %g\n", j, side2.x[j])
+ side2.remove(j)
+ rad2.remove(j)
+ if (j != side2.size()) { j += 1 }
+ }
+ }
+ // can interpolate so diams on either side of major have same d
+ tobj = d.c.append(side2)
+ tobj.sort
+ i = tobj.x[1] j = tobj.x[tobj.size-2]
+ tobj.indgen(i, j, (j-i)/20)
+ rad.interpolate(tobj, d)
+ rad2.interpolate(tobj,side2)
+ d = tobj
+ pts.resize(3, d.size)
+ $o4.resize(d.size)
+ for i = 0, d.size-1 {
+ pt = major.c.mul(d.x[i]).add(mean)
+ $o4.x[i] = abs(rad.x[i] - rad2.x[i])
+ tobj = pt.c.add(minor.c.mul(rad.x[i]))
+if (g != nil) g.beginline(5,3) g.line(tobj.x[0], tobj.x[1])
+ tobj = pt.c.add(minor.c.mul(rad2.x[i]))
+if (g != nil) g.line(tobj.x[0], tobj.x[1]) g.flush
+// pt.add(minor.c.mul(rad2.x[i])).add(minor.c.mul(rad.x[i]))
+ pts.setcol(i, pt)
+ }
+ // avoid 0 diameter ends
+ $o4.x[0] = ($o4.x[0]+$o4.x[1])/2
+ i = $o4.size-1
+ $o4.x[i] = ($o4.x[i]+$o4.x[i-1])/2
+ for i=1,3 { $oi = pts.getrow(i-1) }
+// print d d.printf print rad rad.printf
+// print side2 side2.printf print rad2 rad2.printf
+}
+
+proc contourstack2centroid() {local i, j, area, d localobj c
+ area = $o5.stk_triang_area()
+ printf("stk_triang_area = %g\n", area)
+ for i=1,4 { $oi.resize(0) }
+ c = $o5.approximate_contour_by_circle(&d)
+ $o4.append(d) for i=1,3 { $oi.append(c.x[i-1]) }
+ for j=0, $o5.contour_list.count-1 {
+ c = $o5.contour_list.object(j).approximate_contour_by_circle(&d)
+ $o4.append(d) for i=1,3 { $oi.append(c.x[i-1]) }
+ }
+}
+
+proc name() {
+ type2name($o1.type, $s2)
+ if ($o1.nameindex > 0) {
+ sprint($s2, "%s[%d]", $s2, $o1.nameindex)
+ }
+}
+
+proc type2name() {
+ if ($1 == 1) {
+ $s2 = "soma"
+ }else if ($1 == 2) {
+ $s2 = "axon"
+ }else if ($1 == 3) {
+ $s2 = "dend"
+ }else if ($1 == 4) {
+ $s2 = "apic"
+ }else if ($1 < 0) {
+ sprint($s2, "minus_%d", -$1)
+ }else{
+ sprint($s2, "dend_%d", $1)
+ }
+}
+endtemplate Import3d_GUI
diff --git a/bmtk-vb/bmtk/simulator/bionet/import3d/import3d_sec.hoc b/bmtk-vb/bmtk/simulator/bionet/import3d/import3d_sec.hoc
new file mode 100644
index 0000000..01b0b2d
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/import3d/import3d_sec.hoc
@@ -0,0 +1,392 @@
+begintemplate Import3d_Section
+// primarily for display. Allows gui without instantiating sections
+// fid refers to the raw index of the point that id refers to.
+// For a root section fid is normally 0. For sections that have
+// parents, fid is normally 1 since the first point is often a copy of
+// the last point of the parent.
+// The variable first=0 means that when diam is shown, there
+// should be a glyph drawn defined by raw indices 0 and 1.
+// if this is a contour it may also contain a list of contours that
+// define a 3-d object
+public raw, xyz, d, id, append, g, mkglyph, rotate, type, centroid_color
+public iscontour_, pid, parentsec, parentx, volatile, nameindex, first, fid
+public contour_list, pl_centroid, pl_diam
+public stk_triang_vec, stk_triang_area, is_subsidiary
+public volatile2, contourcenter, ztrans, approximate_contour_by_circle
+public pl_point, insrt, set_pt, stk_center, accurate_triangle_area
+objref raw, xyz, d, g, parentsec, contour_list, this, stk_triang_vec
+proc init() {
+ is_subsidiary = 0
+ ztrans = 0
+ first = 0
+ fid = 0
+ nameindex=0
+ parentx = 1
+ volatile = 0
+ volatile2 = 0
+ pid = -1
+ iscontour_ = 0
+ type = 0
+ centroid_color = 2
+ id = $1
+ raw = new Matrix(3, $2)
+ xyz = new Matrix(3, $2)
+ d = new Vector($2)
+}
+proc set_pt() {
+ raw.x[0][$1] = $2
+ raw.x[1][$1] = $3
+ raw.x[2][$1] = $4
+ d.x[$1] = $5
+}
+
+proc append() {local i, j
+ for i=0, $3-1 {
+ j = $1 + i
+ k = $2 + i
+ set_pt(j, $o4.x[k], $o5.x[k], $o6.x[k], $o7.x[k])
+ }
+}
+
+proc insrt() {local i, nr, nc
+ nr = raw.nrow nc = raw.ncol
+ d.resize(nc+1)
+ raw.resize(nr, nc+1)
+ xyz.resize(nr, nc+1)
+ for (i=nc-1; i >= $1; i -= 1) {
+ raw.setcol(i+1, raw.getcol(i))
+ d.x[i+1] = d.x[i]
+ }
+ set_pt($1, $2, $3, $4, $5)
+}
+
+proc pl_centroid() {local i, n
+ xyz.getrow(1).line($o1, xyz.getrow(0), centroid_color, 1)
+ if (iscontour_) {
+ n = xyz.ncol - 1
+ $o1.beginline(centroid_color, 1)
+ $o1.line(xyz.x[0][0], xyz.x[1][0])
+ $o1.line(xyz.x[0][n], xyz.x[1][n])
+ }
+ if (0) {
+ if (object_id(contour_list)) {
+ for i=0, contour_list.count-1 {
+ contour_list.object(i).pl_centroid($o1)
+ }
+ }
+ }
+}
+
+proc pl_diam() {local i
+ if (!iscontour_) {
+ mkglyph()
+ $o1.glyph(g, 0, 0)
+ }else{
+ if (object_id(contour_list)) {
+ if (!object_id(contour_list.object(0).stk_triang_vec)) {
+ mk_stk_triang_vec(this, contour_list.object(0))
+ for i=1, contour_list.count-1 {
+ mk_stk_triang_vec(contour_list.object(i-1), contour_list.object(i))
+ }
+ }
+ pl_stk_triang($o1, this, contour_list.object(0))
+ for i=1, contour_list.count-1 {
+ pl_stk_triang($o1, contour_list.object(i-1), contour_list.object(i))
+ }
+ }
+ }
+}
+
+proc pl_point() {local i
+ for i=first, xyz.ncol-1 {
+ $o1.mark(xyz.x[0][i], xyz.x[1][i], "s", 5, 3, 1)
+ }
+ if (object_id(parentsec) == 0) {
+ $o1.mark(xyz.x[0][0], xyz.x[1][0], "S", 8, 3, 1)
+ }
+ if (0) {
+ if (object_id(contour_list)) {
+ for i=0, contour_list.count-1 {
+ contour_list.object(i).pl_point($o1)
+ }
+ }
+ }
+}
+
+proc mkglyph() {local i, d1, d2 localobj x, y, norm, x1, y1, i1
+ g = new Glyph()
+ if (xyz.ncol - first < 1) { return }
+ // normal
+ x1 = xyz.getrow(0)
+ y1 = xyz.getrow(1)
+ if (xyz.ncol - first == 1) {
+ // render as spherical
+ g.circle(x1.x[0], y1.x[0], d.x[0]/2)
+ g.fill(1)
+ return
+ }
+ // may or may not want to include parent point in glyph
+ x = x1.c(first).deriv(1,1)
+ y = y1.c(first).deriv(1,1)
+ // point separations
+ norm = x.c.mul(x).add(y.c.mul(y)).sqrt.mul(2) // d is diam, need radius
+ // only want frustra for the non-zero separations
+ i1=norm.c.indvwhere("!=", 0)
+ if (i1.size == 0) {
+// printf("Section with id=%d has 0 length in this projection\n", id)
+ return
+ }
+ norm.index(norm, i1)
+ x.index(x, i1).div(norm)
+ y.index(y, i1).div(norm)
+
+ // but take care of the possible index offset due to missing parent point
+ if (first) { i1.add(first) }
+ i1.append(x1.size-1)
+ x1.index(x1, i1)
+ y1.index(y1, i1)
+
+ for i = 0, x.size-1 {
+ d1 = d.x[i1.x[i]] d2=d.x[i1.x[i]+1]
+ g.path()
+ g.m(x1.x[i]+y.x[i]*d1, y1.x[i]-x.x[i]*d1)
+ g.l(x1.x[i+1]+y.x[i]*d2, y1.x[i+1]-x.x[i]*d2)
+ g.l(x1.x[i+1]-y.x[i]*d2, y1.x[i+1]+x.x[i]*d2)
+ g.l(x1.x[i]-y.x[i]*d1, y1.x[i]+x.x[i]*d1)
+ g.close()
+ g.fill(1)
+ }
+}
+
+proc rotate() {
+ $o1.mulm(raw, xyz)
+ if (1) {
+ if (object_id(contour_list)) {
+ for i=0, contour_list.count-1 {
+ contour_list.object(i).rotate($o1)
+ }
+ }
+ }
+}
+
+
+// a utility function
+obfunc contourcenter() {local i localobj mean, pts, perim, d
+ // convert contour defined by $o1, $o2, $o3 vectors to
+ // 100 uniform points around perimeter
+ // and return the center coordinates as well as the uniform contour
+ // vectors (in $o1, $o2, $o3)
+ pts = new Matrix(3, $o1.size)
+ for i=1,2 { pts.setrow(i-1, $oi) }
+ for i=0,2 {pts.setrow(i, pts.getrow(i).append(pts.x[i][0]).deriv(1,1)) }
+ perim = new Vector(pts.ncol)
+ for i=1, pts.ncol-1 { perim.x[i] = perim.x[i-1] + pts.getcol(i-1).mag }
+ d = new Vector(101)
+ d.indgen(perim.x(perim.size-1)/100)
+ for i=1,3 $oi.interpolate(d, perim)
+ mean = new Vector(3)
+ for i=1, 3 { mean.x[i-1] = $oi.mean }
+ return mean
+}
+
+// return center (Vector.size=3) and average diameter in $&1
+obfunc approximate_contour_by_circle() {local i,n, perim localobj center, x, y, z
+ x=raw.getrow(0)
+ y=raw.getrow(1)
+ z=raw.getrow(2)
+ perim = 0
+ n = x.size
+ for i = 0, n-1 {
+ perim += edgelen(raw.getcol(i), raw.getcol((i+1)%n))
+ }
+ center = contourcenter(x, y, z)
+ if (0) {
+ $&1 = perim/PI
+ }else{
+ x.sub(center.x[0]).mul(x)
+ y.sub(center.x[1]).mul(y)
+ z.sub(center.x[2]).mul(z)
+// $&1 = 2*x.add(y).add(z).sqrt.mean
+ // average of radius based on perim and mean radius of all points
+ $&1 = x.add(y).add(z).sqrt.mean + perim/(2*PI)
+ }
+// printf("%g %g %g %g\n", center.x[0], center.x[1], center.x[2], $&1)
+// printf("perimeter approx = %g actual = %g\n", PI*$&1, perim)
+ return center
+}
+
+proc mk_stk_triang_vec() {local i, j, n1, n2, d1, d2 localobj i1, i2, trv
+ trv = new Vector()
+ $o2.stk_triang_vec = trv
+ // contour indices are chosen so points 0 cross 1 of a contour from center
+ // are in +z direction and points 0 between the two contours are
+ // guaranteed to be an edge. An extra index added to end to close the polygon
+ // I suppose this could fail if angle does not increase monotonically
+ stk_contour_indices($o1, i1, $o1.raw.getcol(0))
+ stk_contour_indices($o2, i2, $o1.raw.getcol(0))
+ i = 0 j = 0
+ n1 = i1.size-1
+ n2 = i2.size-1
+ while(i < n1 || j < n2) {
+ trv.append(i1.x[i], i2.x[j])
+ if (i < n1 && j < n2) {
+ // which next one is shorter
+ d1 = ($o1.raw.x[0][i1.x[i]] - $o2.raw.x[0][i2.x[j+1]])^2 + ($o1.raw.x[1][i1.x[i]] - $o2.raw.x[1][i2.x[j+1]])^2
+ d2 = ($o1.raw.x[0][i1.x[i+1]] - $o2.raw.x[0][i2.x[j]])^2 + ($o1.raw.x[1][i1.x[i+1]] - $o2.raw.x[1][i2.x[j]])^2
+ if (d2 < d1) {
+ i += 1
+ }else{
+ j += 1
+ }
+ }else{
+ if (i < n1) {
+ i += 1
+ }else{
+ j += 1
+ }
+ }
+ }
+ trv.append(i1.x[i], i2.x[j])
+}
+
+proc stk_contour_indices() {local i, d, dmin, imin localobj c, x, y, z
+ $o2 = new Vector($o1.raw.ncol)
+ $o2.indgen()
+ // order the points counterclockwise. ie 0 cross 1 in -z direction
+ x = $o1.raw.getrow(0)
+ y = $o1.raw.getrow(1)
+ z = $o1.raw.getrow(2)
+ c = contourcenter(x, y, z)
+ x = $o1.raw.getcol(0).sub(c)
+ y = $o1.raw.getcol(1).sub(c)
+ if (x.x[0]*y.x[1] - x.x[1]*y.x[0] > 0) {
+ $o2.reverse()
+ }
+
+ // which point is closest to $o3
+ imin = -1
+ dmin = 1e9
+ for i=0, $o2.size - 1 {
+ d = edgelen($o1.raw.getcol($o2.x[i]), $o3)
+ if (d < dmin) {
+ dmin = d
+ imin = i
+ }
+ }
+ $o2.rotate(-imin)
+
+ $o2.append($o2.x[0])
+}
+
+proc pl_stk_triang() {local i, j localobj g, m1, m2, trv
+ g = $o1
+ m1 = $o2.xyz
+ m2 = $o3.xyz
+ trv = $o3.stk_triang_vec
+ for i=0, trv.size-1 {
+ g.beginline(centroid_color, 1)
+ j = trv.x[i]
+ g.line(m1.x[0][j], m1.x[1][j])
+ i += 1
+ j = trv.x[i]
+ g.line(m2.x[0][j], m2.x[1][j])
+ }
+}
+
+func edgelen() {
+ return sqrt($o1.c.sub($o2).sumsq)
+}
+
+func stk_triang_area1() {local area, i, i1, i2, j1, j2, a, b, c, na localobj m1, m2, trv
+ area = 0
+ m1 = $o1.raw
+ m2 = $o2.raw
+ trv = $o2.stk_triang_vec
+ i1 = trv.x[0]
+ i2 = trv.x[1]
+ a = edgelen(m1.getcol(i1), m2.getcol(i2))
+ na = 0
+ for i=2, trv.size-1 {
+ j1 = trv.x[i]
+ i += 1
+ j2 = trv.x[i]
+ b = edgelen(m1.getcol(j1), m2.getcol(j2))
+
+ // which contour for side c
+ if (i1 == j1) {
+ c = edgelen(m2.getcol(i2), m2.getcol(j2))
+ }else{
+ c = edgelen(m1.getcol(i1), m1.getcol(j1))
+ }
+
+ area += accurate_triangle_area(a, b, c)
+ na += 1
+ i1 = j1
+ i2 = j2
+ a = b
+ }
+//printf("stk_triang_area1 na=%d npoints=%d\n", na, m1.ncol+m2.ncol)
+ // missing one triangle
+ return area
+}
+
+func stk_triang_area() {local area, i
+ area = stk_triang_area1(this, contour_list.object(0))
+ for i=1, contour_list.count-1 {
+ area += stk_triang_area1(contour_list.object(i-1), contour_list.object(i))
+ }
+ return area
+}
+
+// the center of the centroid of the contour stack
+obfunc stk_center() {local i, len, th localobj c, centroid, x, y, z, r, lenvec
+ centroid = new Matrix(3, 1 + contour_list.count)
+ lenvec = new Vector(centroid.ncol) lenvec.resize(1)
+ x = raw.getrow(0)
+ y = raw.getrow(1)
+ z = raw.getrow(2)
+ c = contourcenter(x, y, z)
+ centroid.setcol(0, c)
+ len = 0
+ for i=0, contour_list.count-1 {
+ r = contour_list.object(i).raw
+ x = r.getrow(0)
+ y = r.getrow(1)
+ z = r.getrow(2)
+ c = contourcenter(x, y, z)
+ centroid.setcol(i+1, c)
+
+ len += sqrt(c.sub(centroid.getcol(i)).sumsq)
+ lenvec.append(len)
+ }
+ len = len/2
+ if (len == 0) {
+ c = centroid.getcol(0)
+ return c
+ }
+ i = lenvec.indwhere(">", len)
+ th = (len - lenvec.x[i-1])/(lenvec.x[i] - lenvec.x[i-1])
+ for j=0, 2 {
+ c.x[j] = th*centroid.x[j][i] + (1 - th)*centroid.x[j][i-1]
+ }
+ return c
+}
+
+func accurate_triangle_area() {local x localobj a
+ // from http://http.cs.berkeley.edu/~wkahan/Triangle.pdf
+ // W. Kahan
+ x = float_epsilon
+ float_epsilon = 0
+ a = new Vector(3) a.resize(0)
+ a.append($1, $2, $3).sort
+ if ((a.x[0] - (a.x[2] - a.x[1])) < 0) {
+ float_epsilon = x
+ execerror("accurate_triangle_area:","not a triangle")
+ }
+ float_epsilon = x
+ x = .25*sqrt((a.x[2]+(a.x[1]+a.x[0])) * (a.x[0]-(a.x[2]-a.x[1])) \
+ * (a.x[0]+(a.x[2]-a.x[1])) * (a.x[2]+(a.x[1]-a.x[0])))
+ return x
+}
+
+endtemplate Import3d_Section
diff --git a/bmtk-vb/bmtk/simulator/bionet/import3d/read_morphml.hoc b/bmtk-vb/bmtk/simulator/bionet/import3d/read_morphml.hoc
new file mode 100644
index 0000000..c6801b4
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/import3d/read_morphml.hoc
@@ -0,0 +1,78 @@
+
+begintemplate Import3d_MorphML
+public input, filetype, type, sections, err, parsed
+public pt2id, id2pt, pt2sec, sec2pt, label, id2line
+objref type, sections, this, p, nil
+objref cables, points, cableid2index
+strdef filetype, tstr
+proc init() {
+ nrnpython("from neuron.neuroml.rdxml import rdxml")
+ //print "Import3d_MorphML"
+ filetype = "MorphML"
+ p = new PythonObject()
+}
+proc input() {
+ //print "Import3d_MorphML.input"
+ type = new Vector()
+ sections = new List(1000)
+ err = 0
+ p.rdxml($s1, this)
+}
+proc parsed() {local i, j, ip, jp localobj cab, sec, pt
+ cables = $o1.cables_
+ points = $o1.points_
+ cableid2index = $o1.cableid2index_
+ // ptid2pt = $o1.ptid2pt_
+ //print $o1, cables.__len__()
+ for i=0, cables.__len__() - 1 {
+ cab = cables._[i]
+ sec = new Import3d_Section(cab.first_, cab.pcnt_)
+ sections.append(sec)
+ if (cab.parent_cable_id_ >= 0) {
+ ip = $o1.cableid2index_[cab.parent_cable_id_]
+ sec.parentsec = sections.object(ip)
+ sec.parentx = cab.px_
+ }
+ //print i, cab.id_, cab.name_
+ for j=0, cab.pcnt_ - 1 {
+ jp = cab.first_ + j
+ pt = points._[jp]
+ sec.set_pt(j, pt.x_, pt.y_, pt.z_, pt.d_)
+ }
+ }
+}
+func pt2id() {
+ //print "pt2id ", $1
+ if ($1 < 0) { return 0 }
+ if ($1 >= points.__len__()) { return points.__len__() - 1 }
+ return $1
+}
+func id2pt() {
+ //print "id2pt ", $1
+ return $1
+}
+func pt2sec() {local cid, cindex
+ //print "pt2sec ", $1, " cid=", points._[$1].cid_
+ cid = points._[$1].cid_
+ cindex = cableid2index._[cid]
+ //print " cindex=", cindex, " first=", cables._[cindex].first_
+ $o2 = sections.object(cindex)
+ //printf("pt2sec %s\n", $o2)
+ return $1 - cables._[cindex].first_
+}
+func sec2pt() {local i localobj sec
+ sec = sections.object($1)
+ //print "sec2pnt ", $1, $2, " secid=", sec.id, " cabid=", cables._[$1].id_
+ i = sec.id + $2 - sec.fid
+ return i
+}
+func id2line() {
+ //print "id2line ", $1
+ return $1
+}
+proc label() {localobj pt
+ pt = points._[$1]
+ sprint($s2, "pt[%d] Line %d x=%g y=%g z=%g d=%g", $1, pt.lineno_, pt.x_, pt.y_, pt.z_, pt.d_)
+}
+endtemplate Import3d_MorphML
+
diff --git a/bmtk-vb/bmtk/simulator/bionet/import3d/read_nlcda.hoc b/bmtk-vb/bmtk/simulator/bionet/import3d/read_nlcda.hoc
new file mode 100644
index 0000000..9a8e450
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/import3d/read_nlcda.hoc
@@ -0,0 +1,550 @@
+// Assume that except for soma, the move and line items form a tree
+// where, generally, a move is at the same point of the line to which
+// it is connected. Under this assumption, all major codes except 1 and 2
+// can be ignored.
+// An exception is the [10,5] code for branch point. The next point
+// is generally a line (not a move) with the same x,y,z of the branch point.
+
+begintemplate Import3d_Neurolucida_read
+public input, pheader
+public type, x, y, z, d, iline, header, point2sec, sections, lines
+public label, id2pt, id2line, pt2id, pt2sec, sec2pt, file, filetype, err
+public points, pointtype, branchpoints, firstpoints
+public helptxt, iline2pt, mark, fillproblist
+external hoc_sf_
+objref major, minor, x, y, z, d, iline, header, lines, iline2sec
+objref type, pointtype, points, iline2pt
+objref file, vectors, sec2point, point2sec, sections
+objref firstpoints, branchpoints
+objref cursec, diam, nil, gm
+objref line_branch_err, parse_err, xyparent_err, xynotnearest_err, noparent_err
+objref line_coincide_err, line_branch_err_pt, somabbox_err
+strdef tstr, line, filetype
+double a[7]
+
+proc init() {
+ filetype = "Neurolucida"
+ vectors = new List()
+ header = new List()
+ lines = new List()
+ gm = new GUIMath()
+}
+
+proc input() {
+ err = 0
+ line_branch_err = new List()
+ parse_err = new List()
+ xyparent_err = new List()
+ xynotnearest_err = new List()
+ noparent_err = new List()
+ line_coincide_err = new List()
+ somabbox_err = new List()
+ line_branch_err_pt = new Vector()
+
+ rdfile($s1)
+ find_parents()
+ repair_diam()
+ connect2soma()
+ if (err) { errout() }
+}
+
+proc repair_diam() {local i localobj sec
+ // I am told, and it seems the case, that
+ // the first point incorrectly always has the diameter of
+ // the last point of the previous branch. For this reason
+ // we set the diameter of the first point to the diameter
+ // of the second point in the section
+ for i=0, sections.count-1 {
+ sec = sections.object(i)
+ if (sec.parentsec != nil) {
+ if (sec.first < sec.d.size-1){
+ sec.d.x[sec.first] = sec.d.x[sec.first + 1]
+ }
+ }
+ }
+}
+
+proc rdfile() {local i, j
+ file = new File($s1)
+ // count lines for vector allocation space (not really necessary)
+ if (!file.ropen()) {
+ err = 1
+ printf("could not open %s\n", $s1)
+ }
+ for (i = 0; !file.eof(); i += 1) {
+ file.gets(line)
+ }
+ file.close()
+// printf("%s has %d lines\n", $s1, i)
+ alloc(i, major, minor, x, y, z, d, iline, pointtype, points)
+ diam = d
+ file.ropen()
+ for (i = 1; !file.eof(); i += 1) {
+ file.gets(line)
+ parse(i, line)
+ }
+ file.close()
+ iline2pt = new Vector(iline.x[iline.size-1])
+ j = 0
+ for i=0, points.size-2 {
+ while(j <= iline.x[points.x[i]]) {
+ iline2pt.x[j] = i
+ j += 1
+ }
+ }
+ for j=j, iline2pt.size-1 {
+ iline2pt.x[j] = points.size-1
+ }
+}
+
+proc alloc() { local i // $oi.size = 0 but enough space for $1 elements
+ for i = 2, numarg() {
+ $oi = new Vector($1)
+ $oi.resize(0)
+ vectors.append($oi)
+ }
+}
+
+func dist() {local x1, y1, z1
+ x1 = ($1 - x.x[$4])
+ y1 = ($2 - y.x[$4])
+ z1 = ($3 - z.x[$4])
+ return sqrt(x1*x1 + y1*y1 + z1*z1)
+}
+
+func xydist() {local x1, y1
+ x1 = (x.x[$1] - x.x[$2])
+ y1 = (y.x[$1] - y.x[$2])
+ return sqrt(x1*x1 + y1*y1)
+}
+
+func xysame() {
+ if ($1 == x.x[$3]) {
+ if ($2 == y.x[$3]) {
+ return 1
+ }
+ }
+ return 0
+}
+
+proc parse() {local i, n, m
+ n = sscanf($s2, "[%d,%d] (%f,%f,%f) %f", &a[0], &a[1], &a[2],\
+ &a[3], &a[4], &a[5])
+ hoc_sf_.left($s2, hoc_sf_.len($s2)-1)
+ if (n == 6) {
+ a[5] *= 2
+ iline_ = major.size
+ if (a[0] == 1) { // line
+ m = major.x[iline_ - 1]
+ if (m == 10 && minor.x[iline_-1] == 5) {
+ pointtype.append(0)
+ points.append(iline_)
+ if (!xysame(a[2], a[3], iline_-1)) {
+ err = 1
+ line_branch_err_pt.append(points.size-1)
+sprint(tstr, "%d: %s separated by %g from branch",\
+$1, $s2, dist(a[2], a[3], a[4], iline_-1))
+line_branch_err.append(new String(tstr))
+ }
+ }else if (m == 1 || m == 2) {
+ pointtype.append(1)
+ points.append(iline_)
+ }else{
+ pointtype.append(1)
+ points.append(iline_)
+ }
+ }else if (a[0] == 2) { // move
+ pointtype.append(0)
+ points.append(iline_)
+ }else if (a[0] == 10 && a[1] == 5) { // branch
+ pointtype.append(2)
+ points.append(iline_)
+ }else{
+ }
+ for i=0, 5 {
+ vectors.object(i).append(a[i])
+ }
+ iline.append($1) // for error messages
+ lines.append(new String($s2))
+ } else if (n == 0) { // comment
+ header.append(new String($s2))
+ } else {
+ err = 1
+ sprint(tstr, "%d: %s parse failure after item %d", $1, $s2, n)
+ parse_err.append(new String(tstr))
+ }
+}
+
+proc mark() {local i, a,b,c,d,e,f
+ print $o1, $2, iline, lines
+ i = iline.indwhere("==",$2)
+ printf("%d,%d: %s\n", i, iline.x[i], lines.object(i).s)
+ n = sscanf(lines.object(i).s, "[%d,%d] (%f,%f,%f) %f", &a,&b,&c,\
+ &d,&e,&f)
+ if (n == 6) {
+ print a,b,c,d,e,f
+ $o1.mark(c,d,"S",12,4,1)
+ }
+}
+
+proc pheader() {local i
+ for i=0, header.count-1 {
+ printf("%s", header.object(i).s)
+ }
+}
+
+proc find_parents() {local i, j, m, ip, jp, jpmin, d, dmin, xi,yi,zi, bp, ip1
+ // we need to associate all pointtype=0 with a branch point (except the
+ // ones conceptually connected to the soma
+ // assume the pid is earlier than the pointtype=0
+ point2sec = points.c.fill(-1)
+ branchpoints = pointtype.c.indvwhere("==", 2)
+ firstpoints = pointtype.c.indvwhere("==", 0)
+ sections = new List()
+ type = firstpoints.c.fill(0)
+ for i=0, firstpoints.size-1 {
+ ip = points.x[firstpoints.x[i]]
+ newsec(i)
+ type.x[i] = cursec.type
+ xi = x.x[ip] yi = y.x[ip] zi = z.x[ip]
+ dmin = 1e9
+ jpmin = -1
+ m = minor.x[ip]
+ if (m == 41) { // soma start (contour
+ continue
+/* some files use these as branch beginnings so check this after seeing if
+there are coincident points.
+ }else if (m == 1) { // dendrite start
+ continue
+ }else if (m == 21) { // axon start
+ continue
+ }else if (m == 61) { // apical dendrite start
+ continue
+*/
+ }
+ if (line_branch_err_pt.size) {
+ j = line_branch_err_pt.x[0]
+ if (ip == points.x[j]) {
+ physcon(i, ip, ip-1, j-1)
+ line_branch_err_pt.remove(0)
+ continue
+ }
+ }
+ for j=0, branchpoints.size-1 {
+ jp = points.x[branchpoints.x[j]]
+ if (ip <= jp) { break }
+ d = dist(xi, yi, zi, jp)
+ if (d < dmin) {
+ bp = branchpoints.x[j]
+ dmin = d
+ jpmin = jp
+ }
+ }
+ if (dmin <= 0) {
+ cursec.parentsec = sections.object(point2sec.x[bp])
+ }else if (m == 1) { // dendrite start
+ continue
+ }else if (m == 21) { // axon start
+ continue
+ }else if (m == 61) { // apical dendrite start
+ continue
+ }else{
+ err = 1
+sprint(tstr, "%d: %s branch at line %d is %.4g away",\
+iline.x[ip], lines.object(ip).s, iline.x[jpmin], dmin)
+ d = xydist(ip, jpmin)
+ if (d <= 0) { // overlay branch point in xy plane?
+ xyparent_err.append(new String(tstr))
+ physcon(i, ip, jpmin, bp)
+ }else if (ip > 0) {
+ // sometime it coincides with a previous LineTo
+ ip1 = firstpoints.x[i]-1
+ d = dist(xi, yi, zi, points.x[ip1])
+ if (d <= 0) {
+sprint(tstr, "%s\n but coincides with line %d", tstr, iline.x[points.x[ip1]])
+ line_coincide_err.append(new String(tstr))
+ cursec.parentsec = sections.object(point2sec.x[ip1])
+ }else if (try_xy_coincide(i, ip)){
+ xynotnearest_err.append(new String(tstr))
+ }else{
+ noparent_err.append(new String(tstr))
+ }
+ }
+ }
+ }
+}
+
+func try_xy_coincide() {local j, jp, d
+ // sometimes it coincides in the xy plane with a branch point
+ // even though it is not the nearest point and therefore we
+ // assume that is the parent point
+ for j=0, branchpoints.size-1 {
+ jp = points.x[branchpoints.x[j]]
+ if ($2 <= jp) { break }
+ d = xydist($2, jp)
+ if (d <= 0) {
+sprint(tstr, "%s\n but coincides with branch point at line %d", tstr, iline.x[jp])
+ bp = branchpoints.x[j]
+ physcon($1, $2, jp, bp)
+ return 1
+ }
+ }
+ return 0
+}
+
+proc physcon() {
+ cursec.parentsec = sections.object(point2sec.x[$4])
+ cursec.insrt(0, x.x[$3], y.x[$3], z.x[$3], d.x[$2])
+ cursec.id -= 1
+}
+
+proc newsec() {local i, ip, n, m, first, isec
+ first = firstpoints.x[$1]
+ ip = points.x[first]
+ if ($1 < firstpoints.size-1) {
+ n = firstpoints.x[$1+1] - first
+ }else{
+ n = points.size - first
+ }
+ cursec = new Import3d_Section(first, n)
+ isec = sections.count
+ sections.append(cursec)
+ for i = 0, n-1 {
+ cursec.append(i, points.x[i+first], 1, x, y, z, d)
+ point2sec.x[i+first] = isec
+ }
+ m = minor.x[ip]
+ if (m == 1 || m == 2) { // dendrite
+ cursec.type = 3
+ }else if (m == 21 || m == 22) { //axon
+ cursec.type = 2
+ }else if (m == 41 || m == 42) { // soma
+ cursec.type = 1
+ cursec.iscontour_ = 1
+ }else if (m == 61 || m == 62) { // apdendrite
+ cursec.type = 4
+ }else{
+ err = 1
+printf("%s line %d: don't know section type: %s\n",\
+ file.getname, iline.x[ip], lines.object(ip).s)
+ }
+}
+
+proc connect2soma() {local i, ip, j, jp, bp, jpmin, dmin, d, xmin, xmax, ymin, ymax localobj soma, sec, xc, yc, zc, c, psec, r
+ // find centroid of soma if outline and connect all dangling
+ // dendrites to that if inside the contour
+ for i=0, sections.count-1 {
+ sec = sections.object(i)
+ if (sec.type == 1 && sec.iscontour_ == 1) {
+ soma = sec
+ sections.remove(i)
+ sections.insrt(0, soma)
+ break
+ }
+ }
+ if (soma == nil) { return }
+ xc = soma.raw.getrow(0)
+ yc = soma.raw.getrow(1)
+ zc = soma.raw.getrow(2)
+ xmin = xc.min-.5 xmax = xc.max + .5
+ ymin = yc.min-.5 ymax = yc.max + .5
+ c = soma.contourcenter(xc, yc, zc)
+ for i=0, sections.count-1 {
+ sec = sections.object(i)
+ if (sec.parentsec == nil && sec != soma) {
+ if (gm.inside(sec.raw.x[0][0], sec.raw.x[1][0], xmin, ymin, xmax, ymax)) {
+ sec.parentsec = soma
+ sec.parentx = .5
+ sec.insrt(0, c.x[0], c.x[1], c.x[2], .01)
+ sec.id -= 1
+ sec.first = 1
+ }else{
+ // is same as end point of earlier section?
+ ip = points.x[sec2pt(i, 0)]
+ d = 1e9
+ for j=0, i-1 {
+ psec = sections.object(j)
+ jp = psec.d.size-1
+ r = psec.raw
+ d = dist(r.x[0][jp], r.x[1][jp], r.x[2][jp], ip)
+ if (d == 0) {
+ sec.parentsec = psec
+ break
+ }
+ }
+ if (d == 0) { continue }
+ ip = points.x[sec2pt(i, 0)]
+ dmin = dist(c.x[0], c.x[1], c.x[2], ip)
+ jpmin = -1
+ for j=0, branchpoints.size-1 {
+ jp = points.x[branchpoints.x[j]]
+ if (ip <= jp) { break }
+ d = dist(x.x[ip], y.x[ip], z.x[ip], jp)
+ if (d < dmin) {
+ bp = branchpoints.x[j]
+ dmin = d
+ jpmin = jp
+ }
+ }
+ err = 1
+sprint(tstr, "%d: %s is outside soma, logically connect to", iline.x[ip], lines.object(ip).s)
+ if (jpmin == -1) {
+ sprint(tstr, "%s soma", tstr)
+ sec.parentsec = soma
+ sec.insrt(0, c.x[0], c.x[1], c.x[2], .01)
+ sec.id -= 1
+ }else{
+ jp = jpmin
+ sprint(tstr, "%s %d", tstr, iline.x[jp])
+ sec.parentsec = sections.object(point2sec.x[bp])
+ sec.insrt(0, x.x[jp], y.x[jp], z.x[jp], .01)
+ sec.id -= 1
+ }
+ sec.first = 1
+ somabbox_err.append(new String(tstr))
+ }
+ }
+ }
+}
+
+// note selpoint defined in swc_gui.hoc as sec.id + j
+// selpoint is the points index
+// ie. the first points of the sections are firstpoints
+proc label() {local i
+ i = points.x[$1]
+ sprint($s2, "Line %d: %s", iline.x[i], lines.object(i).s)
+}
+func id2pt() {
+ if ($1 < 0) { return -1 }
+ if ($1 >= iline2pt.size) { return iline2pt.x[iline2pt.size-1]}
+ return iline2pt.x[$1]
+}
+func id2line() { return points.x[$1] }
+func pt2id() {
+ if ($1 < 0) {return -1}
+ return iline.x[points.x[$1]]
+}
+func pt2sec() {local i
+ i = firstpoints.indwhere(">", $1)
+ if (i == -1) {
+ i = firstpoints.size
+ }
+ $o2 = sections.object(i-1)
+ j = $1 - $o2.id
+ return j
+}
+func sec2pt() {
+//print "sec2pt ", $1, $2, sections.object($1).id
+ return sections.object($1).id + $2
+}
+
+proc helptxt() {
+ xpanel("Neurolucida file filter characteristics")
+xlabel(" The only lines utilized are [1,x], [2,x], and [5,10]. i.e , LineTo,")
+xlabel("MoveTo, and Branch lines. ")
+xlabel(" Sections generally consist of MoveTo followed by sequence of LineTo,")
+xlabel("and possibly ending with Branch. Intervening lines of other major types")
+xlabel("are ignored. ")
+xlabel(" The type of the section (dendrite, axon, soma outline, or apical) is")
+xlabel("determined by the minor code of the first point in the branch. ")
+xlabel(" Coincidence of the first x,y,z point of a section with the last")
+xlabel("(branch) point of some section defines a connection between child and")
+xlabel("parent section. However most files contain errors and the following")
+xlabel("heuristics are applied to the first points of problem sections when the")
+xlabel("parent is not obvious. EACH PROBLEM POINT SHOULD BE EXAMINED to")
+xlabel("determine if the correction is suitable. ")
+xlabel(" 1) The first point after a Branch point is a MoveTo which is")
+xlabel("coincident in the xy plane but not in the z axis. A physical connection")
+xlabel("is made with the diam of the MoveTo. ")
+xlabel(" 2) The nearest branch point is coincident in the xy plane. A physical")
+xlabel("connection is made with the diam of the MoveTo.")
+xlabel(" 3) There is no coincident branchpoint in the xy plane but the MoveTo")
+xlabel("is 3-d coincident with the preceding LineTo point. A logical connection")
+xlabel("is made to the section containing the LineTo point.")
+xlabel(" 4) There is an xy plane coincident branch point but it is not the")
+xlabel("nearest in a 3-d sense. A physical connection is made to the section")
+xlabel("containing the xy plane coincident point. ")
+xlabel(" 5) The first point of the branch is not a soma, dendrite, axon, or")
+xlabel("apical start point and there is no xy plane coincident branch point. ")
+xlabel("The branch remains unattached (but see heuristic 6). ")
+xlabel(" 6) All unattached branches within 0.5 microns of the soma contour")
+xlabel("bounding box are logically connected to the soma contour section. ")
+xlabel("I am told, and it seems to be the case, that the first point in a")
+xlabel("branch always has a diameter value of the last point in the previous")
+xlabel("branch. For this reason we set the first point to the diameter of")
+xlabel("of the second point in each section that has a parent branch.")
+xlabel("If this is not the right thing to do then comment out the call to")
+xlabel("repair_diam() in the input() procedure of read_nlcda.hoc")
+ xpanel(1)
+}
+
+proc errout() {local i
+ printf("\n%s problems and default fixes\n\n", file.getname)
+ if (parse_err.count) {
+ printf(" Following lines could not be parsed\n")
+ for i=0, parse_err.count-1 {
+ printf(" %s\n", parse_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (line_branch_err.count) {
+printf(" LINETO follows branch and does not coincide in the xy plane.\n")
+printf(" Make a physical connection using the LINETO diameter.\n")
+ for i = 0, line_branch_err.count-1 {
+ printf(" %s\n", line_branch_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (xyparent_err.count) {
+ printf(" Nearest branch point is coincident in xy plane.\n Make a physical connection with diam of the MOVETO\n")
+ for i=0, xyparent_err.count-1 {
+ printf(" %s\n", xyparent_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (line_coincide_err.count) {
+ printf(" No coincident branchpoint in xy plane but 3-d coincident to previous LINETO.\n")
+ printf(" point. Make a logical connection to the section containing that LINETO\n")
+ for i=0, line_coincide_err.count-1 {
+ printf(" %s\n", line_coincide_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (xynotnearest_err.count) {
+ printf(" The xy plane coincident branch point is not the nearest in the 3-d sense.\n")
+ printf(" However we connect physically to the indicated xy coincident branch point\n")
+ for i=0, xynotnearest_err.count-1 {
+ printf(" %s\n", xynotnearest_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (noparent_err.count) {
+ printf(" Cannot figure out which is the parent\n")
+ printf(" No coincident (even in xy plane) branch point.\n")
+ for i=0, noparent_err.count-1 {
+ printf(" %s\n", noparent_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (somabbox_err.count) {
+ printf(" Unconnected branch is more than .5 microns outside the soma bounding box.\n")
+ printf(" Connect logically to nearest branch point\n")
+ for i=0, somabbox_err.count-1 {
+ printf(" %s\n", somabbox_err.object(i).s)
+ }
+ printf("\n")
+ }
+}
+
+proc fillproblist() {
+ fillproblist1($o1, parse_err, line_branch_err, xyparent_err, line_coincide_err, xynotnearest_err, noparent_err, somabbox_err)
+}
+proc fillproblist1() { local i, j
+ for i=2, numarg() {
+ for j=0, $oi.count-1 {
+ $o1.append($oi.object(j))
+ }
+ }
+}
+
+endtemplate Import3d_Neurolucida_read
diff --git a/bmtk-vb/bmtk/simulator/bionet/import3d/read_nlcda3.hoc b/bmtk-vb/bmtk/simulator/bionet/import3d/read_nlcda3.hoc
new file mode 100644
index 0000000..0402dbb
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/import3d/read_nlcda3.hoc
@@ -0,0 +1,1194 @@
+// Read neurolucida
+// ; V3 text file written for MicroBrightField products.
+// file.
+// The format is given by a context free grammar that would be easy
+// to handle with lex/yacc but we can do reasonably well using recursive descent
+// that more or less matches each production rules for the grammar.
+// Presently we only handle contours and trees but with spines ignored.
+
+begintemplate Branch2SomaInfo
+// info to carry out decision about which to connect to for
+// possible root branch mistakes
+// may have to split the parent
+public sec, sindex, pbranch, ipoint, d2p, d2s, connected2p
+objref sec, pbranch
+proc init() {
+ sec = $o1
+ pbranch = $o2
+ sindex = $3
+ d2p = $4
+ d2s = $5
+ ipoint = $6
+ connected2p = 0
+}
+endtemplate Branch2SomaInfo
+
+begintemplate Import3d_LexToken
+public token, x, s, itok, iline, clone
+strdef s
+token = 0
+x = 0
+itok = 0
+iline = 0
+obfunc clone() { localobj r
+ r = new Import3d_LexToken()
+ r.s = s
+ r.token = token
+ r.x = x
+ r.itok = itok
+ r.iline = iline
+ return r
+}
+endtemplate Import3d_LexToken
+
+begintemplate Import3d_Neurolucida3
+public type
+public filetype, input, file, sections
+public label, id2pt, id2line, pt2id, pt2sec, sec2pt, helptxt, mark, err, b2spanel
+public x, y, z, d, iline, lines, quiet
+external hoc_sf_
+objref type, firstpoints, gm, plist
+objref current, look_ahead, look_ahead2
+objref file, tokens, sections, cursec, parentsec, nil
+objref x, y, z, d, iline, lines
+objref somas, centers, b2serr, b2sinfo
+strdef line, tstr, tstr2, filetype, fline
+
+proc init() {
+ quiet = 0
+ debug_on = 0
+ gm = new GUIMath()
+ filetype = "Neurolucida V3"
+ current = new Import3d_LexToken()
+ look_ahead = new Import3d_LexToken()
+ look_ahead2 = new Import3d_LexToken()
+ eof=0
+ number=1 leftpar=2 rightpar=3 comma=4 bar=5
+ set=6 rgb=7 string=8 label_=9 err_=10
+ leftsp=11 rightsp=12
+ tokens = new List()
+ tokensappend("eof", "number", "leftpar", "rightpar", "comma", "bar")
+ tokensappend("set", "rgb", "string", "label", "err")
+ tokensappend("leftsp", "rightsp")
+ plist = new List()
+}
+proc tokensappend() {local i
+ for i=1, numarg() {
+ tokens.append(new String($si))
+ }
+}
+
+proc input() {
+ b2serr = new List()
+ b2sinfo = new List()
+ nspine = 0
+ err = 0
+ type = new Vector()
+ sections = new List(1000)
+ alloc(25000, x, y, z, d, iline)
+ lines = new List(25000)
+ itoken = 0
+ depth = 0
+ rdfile($s1)
+ firstpoints = new Vector(sections.count)
+ set_firstpoints()
+ connect2soma()
+ if (err) { errout() }
+}
+
+proc set_firstpoints() {local i
+ firstpoints.resize(sections.count)
+ for i=0, sections.count-1 {
+ firstpoints.x[i] = sections.object(i).id
+ }
+}
+
+proc alloc() {local i
+ for i=2, numarg() {
+ $oi = new Vector($1)
+ $oi.resize(0)
+ }
+}
+proc connect2soma() {local i, j, d, dmin localobj sec, roots, xx
+ // first make sure all somas are at the beginning
+ centers = new List()
+ j = 0 // next soma index
+ somas = new List()
+ roots = new List()
+ for i=0, sections.count-1 {
+ sec = sections.object(i)
+ if (sec.iscontour_) {
+ if (i > j) {
+ sections.remove(i)
+ sections.insrt(j, sec)
+ }
+ somas.append(sec)
+ j += 1
+ }
+ }
+ // mark the soma contours that are part of a
+ // contour stack and link them into a list
+ // that is in the main contour section.
+ // we do not remove them from the sections since
+ // we want to be able to select their points
+ soma_contour_stack()
+ for i=0, sections.count-1 {
+ sec = sections.object(i)
+ if (!sec.iscontour_) if (sec.parentsec == nil) {
+ roots.append(sec)
+ }
+ }
+ if (somas.count == 0) { return }
+ // note that j is the number of soma's
+ for i = 0, somas.count-1 {
+ connect2soma_2(somas.object(i), roots)
+ }
+ for i=0, roots.count-1 {
+ sec = roots.object(i)
+ xx = sec.raw.getcol(0)
+ dmin = 1e9
+ for j=0, centers.count-1 {
+ d = xx.c.sub(centers.object(j)).mag
+ if (d < dmin) {
+ imin = j
+ dmin = d
+ }
+ }
+ err = 1
+ xx = centers.object(imin)
+ sprint(tstr, "\nMain branch starting at line %d is outside the soma bounding boxes", pt2id(sec.id))
+ b2serr.append(new String(tstr))
+ sprint(tstr, " Making a logical connection to center of nearest soma")
+ b2serr.append(new String(tstr))
+ sec.parentsec = somas.object(imin)
+ sec.parentx = .5
+ sec.insrt(0, xx.x[0], xx.x[1], xx.x[2], .01)
+ sec.first = 1
+ sec.fid = 1
+ opt_connect(sec, imin, dmin)
+ }
+}
+
+proc soma_contour_stack() {local i, j localobj bb1, bb2, first, next
+ // if soma contour bounding boxes overlap, treat as single soma
+ if (somas.count == 0) return
+ first = somas.object(0)
+ bb1 = bounding_box(first)
+ j = 0
+ for i = 1, somas.count-1 {
+ j += 1
+ next = somas.object(j)
+ bb2 = bounding_box(next)
+ if (xy_intersect(bb1, bb2)) {
+ if (!object_id(first.contour_list)) {
+ first.contour_list = new List()
+ }
+ first.contour_list.append(next)
+ next.is_subsidiary = 1
+ somas.remove(j)
+ j -= 1
+ }else{
+ first = next
+ }
+ bb1 = bb2
+ }
+ for i=0, somas.count-1 {
+ somastack_makes_sense(somas.object(i))
+ somastack_process(somas.object(i))
+ }
+}
+
+obfunc bounding_box() {localobj bb
+ bb = new Vector(6)
+ bb.x[0] = $o1.raw.getrow(0).min
+ bb.x[1] = $o1.raw.getrow(1).min
+ bb.x[2] = $o1.raw.getrow(2).min
+ bb.x[3] = $o1.raw.getrow(0).max
+ bb.x[4] = $o1.raw.getrow(1).max
+ bb.x[5] = $o1.raw.getrow(2).max
+ return bb
+}
+
+func xy_intersect() {local i
+ for i = 0, 1 {
+if ($o1.x[i] > $o2.x[3+i] || $o2.x[i] > $o1.x[3+i]) { return 0 }
+ }
+ return 1
+}
+
+proc somastack_makes_sense() {local i, j, z, z2, dz, dz2 localobj sec
+ if (!object_id($o1.contour_list)) { return }
+ // the soma stack must be monotonic in the z axis and all points
+ // on a contour must have same z value.
+ z = $o1.raw.x[2][0]
+ for i = 1, $o1.raw.ncol-1 if (z != $o1.raw.x[2][i]) {
+ sprint(tstr, "Soma stack contour %s does not have constant z value.", $o1)
+ b2serr.append(new String(tstr))
+ b2serr.append(new String(" Soma area calculation may be serious in error."))
+ return
+ }
+ dz = 0
+ for j=0, $o1.contour_list.count-1 {
+ sec = $o1.contour_list.object(j)
+ z2 = sec.raw.x[2][0]
+ dz2 = z2 - z
+ if (dz2 == 0) {
+ sprint(tstr, "Adjacent contour %d of soma stack %s has same z coordinate and previous.", j, $o1)
+ b2serr.append(new String(tstr))
+ return
+ }else if (dz2 > 0) {
+ dz2 = 1
+ }else{
+ dz2 = -1
+ }
+ if (dz == 0) {
+ dz = dz2
+ }else if (dz != dz2) {
+ sprint(tstr, "Contour %d of the Soma stack %s is not monotonic in z.", j, $o1)
+ b2serr.append(new String(tstr))
+ b2serr.append(new String(" Manually edit the neurolucida file and reorder or eliminate some contours."))
+ b2serr.append(new String(" Presently the soma surface is nonsense."))
+ return
+ }
+ z = z2
+ for i = 1, sec.raw.ncol-1 if (z != sec.raw.x[2][i]) {
+ sprint(tstr, "contour %d of the Soma stack %s does not have constant z value.", j, $o1)
+ b2serr.append(new String(tstr))
+ b2serr.append(new String(" Soma area calculation may be serious in error."))
+ return
+ }
+ }
+}
+
+proc somastack_process() {local i, n, n1 localobj pts, m, center, pv
+ if (!object_id($o1.contour_list)) { return }
+ printf("somastack_process %d\n", $o1.contour_list.count + 1)
+ // The stack defines a volume. Determine the principle axes
+ // and slice the volume along the major axis, approximating
+ // each slice by a circle and shifting the circle to be
+ // along the major axis. So the set of soma contours ends
+ // up being one straight cylindrically symetric soma centroid
+ // note then that curved carrots don't look quite right but
+ // straight carrots do.
+
+ // for each contour use 100 points equally spaced.
+ // we should, but do not, make the stack equally spaced.
+ // then all the points are used to find the principle axes
+ // this pretty much follows the corresponding analysis in
+ // Import3d_GUI
+ // Heck. Let's just use all the contour points and approximate
+ // the thing as an ellipsoid
+
+ // copy all the centroids into one matrix
+ // size of matrix
+ n = $o1.raw.nrow
+ for i=0, $o1.contour_list.count-1 { n += $o1.contour_list.object(i).raw.nrow}
+ pts = new Matrix(3, n)
+ n = 0
+ n1 = $o1.raw.nrow
+ $o1.raw.bcopy(0, 0, 3, n1, 0, n, pts)
+ n = n1
+ for i=0, $o1.contour_list.count-1 {
+ n1 = $o1.contour_list.object(i).raw.nrow
+ $o1.contour_list.object(i).raw.bcopy(0, 0, 3, n1, 0, n, pts)
+ n += n1
+ }
+ center = new Vector(3)
+ for i=0, 2 { center.x[i] = pts.getrow(i).mean }
+ printf("center\n") center.printf
+
+ //principle axes
+ m = new Matrix(3,3)
+ for i=0, 2 { pts.setrow(i, pts.getrow(i).sub(center.x[i])) }
+ for i=0, 2 {
+ for j=i, 2 {
+ m.x[i][j] = pts.getrow(i).mul(pts.getrow(j)).sum
+ m.x[j][i] = m.x[i][j]
+ }
+ }
+ pv = m.symmeig(m)
+ printf("Principle values\n") pv.printf()
+ printf("Principle axes\n") m.printf()
+}
+
+proc stk_bbox() {local i, j localobj bbs, bbc
+ bbs = bounding_box($o1)
+ for i=0, $o1.contour_list.count-1 {
+ bbc = bounding_box($o1.contour_list.o(i))
+ for j=0, 2 {
+ if (bbs.x[j] > bbc.x[j]) bbs.x[j] = bbc.x[j]
+ if (bbs.x[j+3] < bbc.x[j+3]) bbs.x[j+3] = bbc.x[j+3]
+ }
+ }
+ $&2 = bbs.x[0] $&3 = bbs.x[3] $&4 = bbs.x[1] $&5 = bbs.x[4]
+}
+
+proc connect2soma_2() {local i, xmin, xmax, ymin, ymax localobj sec, xc, yc, zc, center
+ // find centroid of soma if outline and connect all dangling
+ // dendrites to that if inside the contour
+ if (object_id($o1.contour_list)) {
+ center = $o1.stk_center()
+ stk_bbox($o1, &xmin, &xmax, &ymin, &ymax)
+ }else{
+ xc = $o1.raw.getrow(0)
+ yc = $o1.raw.getrow(1)
+ zc = $o1.raw.getrow(2)
+ xmin = xc.min-.5 xmax = xc.max + .5
+ ymin = yc.min-.5 ymax = yc.max + .5
+ center = $o1.contourcenter(xc, yc, zc)
+ }
+ centers.append(center)
+
+ for (i=$o2.count-1; i >= 0; i -= 1) {
+ sec = $o2.object(i)
+ if (gm.inside(sec.raw.x[0][0], sec.raw.x[1][0], xmin, ymin, xmax, ymax)) {
+ sec.parentsec = $o1
+ sec.parentx = .5
+ sec.insrt(0, center.x[0], center.x[1], center.x[2], .01)
+ sec.first = 1
+ sec.fid = 1
+ $o2.remove(i)
+ }
+ }
+}
+
+proc opt_connect() {local i, j, d, dmin, imin, n, ip localobj psec, xx
+ dmin = 1e9
+ xx = $o1.raw.getcol(1)
+ for i=0, sections.count - 1 {
+ psec = sections.object(i)
+ if (psec == $o1) { break }
+ n = psec.raw.ncol
+ for j=0, n-1 {
+ d = xx.c.sub(psec.raw.getcol(j)).set(2,0).mag
+ if (d < dmin) {
+ dmin = d
+ imin = i
+ ip = j
+ }
+ }
+ }
+ if (dmin == 1e9) { return }
+ psec = sections.object(imin)
+// if (dmin < psec.d.x[psec.d.size-1]) {
+ if (dmin < $3) {
+ b2sinfo.append(new Branch2SomaInfo($o1, psec, $2, dmin, $3, ip))
+ }
+}
+
+proc b2spanel() {local i localobj b2s
+ if (b2sinfo.count == 0) { return }
+ xpanel("Possible root branch errors")
+ xlabel("Default logical connection to nearest soma.")
+ xlabel("Check to physically connect to closest parent")
+ xlabel(" in the xy plane.")
+ xlabel(" (Note: may split the parent into two sections)")
+ for i=0, b2sinfo.count -1 {
+ b2s = b2sinfo.object(i)
+sprint(tstr, "Line #%d connect to #%d %g (um) away", pt2id(sec2pto(b2s.sec, 1)), \
+pt2id(sec2pto(b2s.pbranch, b2s.ipoint)), b2s.d2p)
+sprint(tstr2, "b2soption_act(%d, \"%s\")", i, $o1)
+ xcheckbox(tstr, &b2s.connected2p(), tstr2)
+ }
+ xpanel()
+}
+
+proc b2soption_act() {local i localobj b2s, sec, parent, soma, xx
+ b2s = b2sinfo.object($1)
+ sec = b2s.sec
+ soma = somas.object(b2s.sindex)
+ parent = b2s.pbranch
+ if (sec.parentsec == soma) { // connect to parent
+ if (b2s.ipoint != parent.raw.ncol-1) { // need to split
+ b2soption_split(b2s)
+ parent = b2s.pbranch
+ set_firstpoints()
+ }
+ xx = parent.raw.getcol(b2s.ipoint)
+ sec.parentsec = parent
+ sec.parentx = 1
+ sec.raw.setcol(0, xx)
+ sec.d.x[0] = sec.d.x[1]
+ sec.first = 0
+ sec.fid = 1
+ }else{ // connect to soma
+ xx = centers.object(b2s.sindex)
+ sec.parentsec = soma
+ sec.parentx = .5
+ sec.raw.setcol(0, xx)
+ sec.d.x[0] = .01
+ sec.first = 1
+ sec.fid = 1
+ }
+ sprint(tstr, "%s.redraw()", $s2)
+ execute(tstr)
+}
+
+proc b2soption_split() {local i, n, id, ip localobj p, newsec, tobj
+ p = $o1.pbranch
+ ip = $o1.ipoint
+ id = sec2pto(p, ip)
+ n = p.raw.ncol
+ newsec = new Import3d_Section(p.id, ip+1)
+ p.id = id
+
+ tobj = p.raw.c
+ tobj.bcopy(0,0,3,ip+1,newsec.raw)
+ p.raw.resize(3, n - ip)
+ p.xyz.resize(3, n - ip)
+ tobj.bcopy(0, ip, 3, n - ip, p.raw)
+
+ tobj = p.d.c
+ newsec.d.copy(tobj, 0, ip)
+ p.d.resize(n - ip)
+ p.d.copy(tobj, ip, n-1)
+
+ newsec.parentsec = p.parentsec
+ p.parentsec = newsec
+ newsec.parentx = p.parentx
+ p.parentx = 1
+ newsec.type = p.type
+ newsec.first = p.first
+ newsec.fid = p.fid
+ p.first = 0
+ p.fid = 0
+ newsec.type = p.type
+ $o1.pbranch = newsec
+ $o1.ipoint = newsec.d.size-1
+ // now adjust any screwed up b2sinfo items that also reference p
+ for i=0, b2sinfo.count-1 {
+ tobj = b2sinfo.object(i)
+ if (tobj == $o1) { continue }
+ if (tobj.pbranch == p) {
+ if (tobj.ipoint <= ip) { // on newsec
+ tobj.pbranch = newsec
+ }else{ // still on p
+ tobj.ipoint -= ip
+ }
+ }
+ }
+ sections.insrt(sections.index(p), newsec)
+}
+
+func lex() {local n
+ $o1.x = 0
+ $o1.s = ""
+ while (hoc_sf_.len(line) <= 1 || sscanf(line, " ;%[^@]", line) == 1) {
+ if (!getline(fline)) {
+ $o1.token = eof
+ itoken += 1
+ $o1.itok = itoken
+ $o1.iline = iline_
+ return eof
+ }
+ line = fline
+ hoc_sf_.left(fline, hoc_sf_.len(fline)-1)
+ }
+ if (sscanf(line, " %lf%[^@]", &$o1.x, line) == 2) {
+ $o1.token = number
+ }else if (sscanf(line, " (%[^@]", line) == 1) {
+ $o1.token = leftpar
+ }else if (sscanf(line, " )%[^@]", line) == 1) {
+ $o1.token = rightpar
+ }else if (sscanf(line, " ,%[^@]", line) == 1) {
+ $o1.token = comma
+ }else if (sscanf(line, " |%[^@]", line) == 1) {
+ $o1.token = bar
+ }else if (sscanf(line, " <%[^@]", line) == 1) {
+ $o1.token = leftsp
+ }else if (sscanf(line, " >%[^@]", line) == 1) {
+ $o1.token = rightsp
+ }else if (sscanf(line, " set %[^@]", line) == 1) {
+ $o1.token = set
+ }else if (sscanf(line, " Set %[^@]", line) == 1) {
+ $o1.token = set
+ }else if (sscanf(line, " SET %[^@]", line) == 1) {
+ $o1.token = set
+ }else if (sscanf(line, " RGB %[^@]", line) == 1) {
+ $o1.token = rgb
+ }else if ((n = sscanf(line, " \"%[^\"]\"%[^@]", $o1.s, line)) > 0) {
+ // not allowing quotes in quote
+ $o1.token = string
+ if (n == 1) {
+ printf("Lexical error: no closing '\"' in string. The entire line %d in ||is\n", iline_)
+ printf("|%s|\n", fline)
+ line = ""
+ $o1.token = err_
+ }
+ }else if (sscanf(line, " %[A-Za-z0-9_]%[^@]", $o1.s, line) == 2) {
+ $o1.token = label_
+ }else{
+ $o1.token = err_
+ }
+ itoken += 1
+ $o1.itok = itoken
+ $o1.iline = iline_
+ return $o1.token
+}
+
+func getline() {
+ if (file.eof) {
+ if (!quiet) {
+ printf("\r%d lines read\n", iline_)
+ }
+ return 0
+ }
+ file.gets($s1)
+ iline_ += 1
+// printf("%d: %s", iline_, $s1)
+ if ((iline_%1000) == 0) {
+ if (!quiet) {
+ printf("\r%d lines read", iline_)
+ }
+ }
+ return 1
+}
+
+proc rdfile() {local i
+ iline_ = 0
+ file = new File($s1)
+ if (!file.ropen()) {
+ err = 1
+ printf("could not open %s\n", $s1)
+ }
+ for (i=0; !file.eof(); i += 1) {
+ file.gets(line)
+ }
+ alloc(i, x, y, z, d, iline)
+ file.close
+ lines = new List(25000)
+ line=""
+ if (!quiet) {
+ printf("\n")
+ }
+ file.ropen()
+ p_file()
+ file.close
+}
+
+objref rollback
+
+proc save_for_rollback() {
+ if (object_id(rollback)) {
+ printf("rollback in use\n")
+ p_err()
+ }
+ rollback = new List()
+ rollback.append(current.clone())
+ rollback.append(look_ahead.clone())
+ rollback.append(look_ahead2.clone())
+ use_rollback_ = 0
+}
+proc use_rollback() {
+ use_rollback_ = 1
+ current = rollback.o(0) rollback.remove(0)
+ look_ahead = rollback.o(0) rollback.remove(0)
+ look_ahead2 = rollback.o(0) rollback.remove(0)
+ if (rollback.count == 0) {clear_rollback()}
+}
+proc clear_rollback() {localobj nil
+ rollback = nil
+ use_rollback_ = 0
+}
+
+proc read_next_token() {
+ if (use_rollback_) {
+ current = look_ahead
+ look_ahead = look_ahead2
+ look_ahead2 = rollback.o(0)
+ rollback.remove(0)
+ if (rollback.count == 0) {
+ clear_rollback()
+ }
+ }else{
+ read_next_token_lex()
+ if (object_id(rollback)){
+ rollback.append(look_ahead2.clone())
+ }
+ }
+}
+proc read_next_token_lex() {localobj tobj
+ tobj = current
+ current = look_ahead
+ look_ahead = look_ahead2
+ look_ahead2 = tobj
+ if (look_ahead.token != eof) {
+ lex(look_ahead2)
+ }else{
+ look_ahead2.token = eof
+ }
+// printf("current token=%s x=%g s=%s\n", tokens.object(current.token).s, current.x, current.s)
+}
+
+func need_extra() {local i, n localobj m
+ if (parentsec == nil) { return 0 }
+ m = parentsec.raw
+ n = m.ncol-1
+ if ( m.x[0][n] == x.x[$1]) {
+ if ( m.x[1][n] == y.x[$1]) {
+ if ( m.x[2][n] == z.x[$1]) {
+ return 0
+ }
+ }
+ }
+ return 1
+}
+proc newsec() {local i, n, first, n1 localobj m
+ first = 0
+ n = $2 - $1
+ if (need_extra($1)) {
+ cursec = new Import3d_Section($1, n+1)
+ first = 1
+ cursec.fid = 1
+ m = parentsec.raw
+ n1 = m.ncol-1
+ cursec.set_pt(0, m.x[0][n1], m.x[1][n1], m.x[2][n1], d.x[$1])
+ }else{
+ cursec = new Import3d_Section($1, n)
+ }
+ cursec.type = sectype
+ type.append(sectype)
+ sections.append(cursec)
+ cursec.append(first, $1, n, x, y, z, d)
+}
+proc set_sectype() {localobj tobj
+ sectype = 0
+ if (plist.count) {
+ tobj = plist.object(plist.count-1)
+ if (strcmp(tobj.s, "Axon") == 0) {
+ sectype = 2
+ }else if (strcmp(tobj.s, "Dendrite") == 0) {
+ sectype = 3
+ }else if (strcmp(tobj.s, "Apical") == 0) {
+ sectype = 4
+ }
+ }
+}
+
+proc label() {
+ sprint($s2, "Line %d: %s", iline.x[$1], lines.object($1).s)
+}
+func id2pt() {local i
+ i = iline.indwhere(">=", $1)
+ if (i < 0) { i = iline.size-1 }
+ return i
+}
+func id2line() { return $1 }
+func pt2id() {local i
+ i = $1
+ if (i < 0) { i == 0 }
+ if (i >= iline.size) { i = iline.size-1 }
+ return iline.x[i]
+}
+func pt2sec() {local i, j
+ i = firstpoints.indwhere(">", $1)
+ if (i == -1) {
+ i = firstpoints.size
+ }
+ $o2 = sections.object(i-1)
+ j = $1 - $o2.id + $o2.fid
+ return j
+}
+func sec2pt() {local i localobj sec
+ sec = sections.object($1)
+ i = sec.id + $2 - sec.fid
+ return i
+}
+func sec2pto() {local i localobj sec
+ sec = $o1
+ i = sec.id + $2 - sec.fid
+ return i
+}
+proc mark() {local i
+ print $o1, $2, iline, lines
+ i = iline.indwhere("==", $2)
+ if (i != -1) {
+ printf("%d,%d,%d (%g,%g): %s\n", $2, iline.x[i], i, x.x[i], y.x[i], lines.object(i).s)
+ $o1.mark(x.x[i], y.x[i], "S",12,4,1)
+ }
+}
+
+proc helptxt() {
+ xpanel("Neurolucida V3 file filter characteristics")
+xlabel("The elaborate file format is handled by a reasonably complete")
+xlabel("recursive descent parser that more or less matches the production")
+xlabel("rules for the grammar. However, at present, only contours and trees")
+xlabel("are given any semantic actions (in particular, spines are ignored).")
+ xpanel(1)
+}
+
+proc chk() {
+ if (current.token != $1) { p_err() }
+}
+proc demand() {
+ read_next_token()
+ chk($1)
+}
+proc pcur() {
+ printf("itok=%d on line %d token=%s x=%g s=%s\n", current.itok, current.iline, tokens.object(current.token).s, current.x, current.s)
+}
+proc plook() {
+// printf("lookahead: itok=%d token=%s x=%g s=%s\n", look_ahead.itok, tokens.object(look_ahead.token).s, look_ahead.x, look_ahead.s)
+}
+proc enter() {local i
+ if (debug_on == 0) {return}
+ for i=1, depth {printf(" ")}
+ printf("enter %s: ", $s1)
+ pcur()
+ depth += 1
+}
+proc leave() {local i
+ if (debug_on == 0) {return}
+ depth -= 1
+ for i=1, depth {printf(" ")}
+ printf("leave %s: ", $s1)
+ pcur()
+}
+// p stands for production if needed to avoid conflict with variable
+proc p_file() {
+ look_ahead2.token = eof
+ look_ahead.token = eof
+ if (lex(current) != eof) {
+ if (lex(look_ahead) != eof) {
+ lex(look_ahead2)
+ }
+ }
+ enter("p_file")
+ objects()
+ leave("p_file")
+}
+proc objects() {
+ enter("objects")
+ object()
+ while(1) {
+ optionalcomma()
+ if (current.token != leftpar) {
+ break
+ }
+ object()
+ }
+ leave("objects")
+}
+proc object() {local i
+ i = current.itok
+ enter("object")
+ if (current.token == leftpar) {
+ plook()
+ if (look_ahead.token == string) {
+ contour()
+ }else if (look_ahead.token == label_) {
+ marker_or_property()
+ }else if (look_ahead.token == leftpar) {
+ tree_or_text()
+ }else if (look_ahead.token == set) {
+ p_set()
+ }else{
+ p_err()
+ }
+ }else{
+ p_err()
+ }
+ leave("object")
+ if (i == current.itok) {
+ print "internal error: ", "object consumed no tokens"
+ stop
+ }
+}
+proc marker_or_property() {
+ enter("marker_or_property")
+ if (look_ahead2.token == leftpar) {
+ marker()
+ }else{
+ property()
+ }
+ leave("marker_or_property")
+}
+proc tree_or_text() {
+ // the tree and text productions are poorly conceived since they
+ // match each other for arbitrarily long sequences of Properties tokens.
+ // And after the properties they both have a Point.
+ // For now just assume it is a tree.
+ // It will be painful to consume the [ '(' Properties Point ] here
+ // and then disambiguate between Tree or Text and then more
+ // often than not, start the tree production after having already
+ // read the first point (Branch currently assumes it is supposed
+ // to read the first point of the tree.)
+ enter("tree_or_text")
+ save_for_rollback()
+ if (text()) {
+ clear_rollback()
+ }else{
+ use_rollback()
+ tree()
+ }
+ leave("tree_or_text")
+}
+proc properties() {
+ enter("properties")
+ plist.remove_all()
+ if (current.token == leftpar) {
+ if(look_ahead.token == label_ || look_ahead.token == set) {
+ property_or_set()
+ while (1) {
+ optionalcomma()
+if (current.token != leftpar || (look_ahead.token != label_ && look_ahead.token != set)) {
+ break
+ }
+ property_or_set()
+ }
+ }
+ }
+ leave("properties")
+}
+proc property_or_set() {
+ if (look_ahead.token == label_) {
+ property()
+ }else{
+ p_set()
+ }
+}
+proc property() {
+ enter("property")
+ chk(leftpar)
+ demand(label_)
+ plist.append(new String(current.s))
+ read_next_token()
+ optionalvalues()
+ chk(rightpar)
+ read_next_token()
+ leave("property")
+}
+proc optionalvalues() {local c
+ enter("optionalvalues")
+ c = current.token
+ if (c == number || c == string || c == label_ || c == rgb) {
+ values()
+ }
+ leave("optionalvalues")
+}
+proc values() {local c
+ enter("values")
+ value()
+ while (1) {
+ c = current.token
+ if (c != number && c != string && c != label_ && c != rgb) {
+ break
+ }
+ value()
+ }
+ leave("values")
+}
+proc value() {local c
+ enter("value")
+ c = current.token
+ if (c == number) {
+ }else if (c == string) {
+ }else if (c == label_) {
+ }else if (c == rgb) {
+ demand(leftpar)
+ demand(number)
+ read_next_token()
+ optionalcomma()
+ chk(number)
+ read_next_token()
+ optionalcomma()
+ chk(number)
+ demand(rightpar)
+ }else{
+ p_err()
+ }
+ read_next_token()
+ leave("value")
+}
+proc p_set() {
+ // presently, I am imagining that we ignore sets
+ // and I hope we never see objects() in them.
+ enter("p_set")
+ chk(leftpar)
+ demand(set)
+ demand(string)
+ read_next_token()
+ if (current.token != rightpar) {
+ objects()
+ }
+ chk(rightpar)
+ read_next_token()
+ leave("p_set")
+}
+proc contour() {local begin, end, keep, il
+ enter("contour")
+ chk(leftpar)
+ begin = x.size
+ keep = 0
+ demand(string)
+ if (strcmp(current.s, "CellBody") == 0) { keep = 1 }
+ if (strcmp(current.s, "Cell Body") == 0) { keep = 1 }
+ il = current.iline
+ read_next_token()
+ contourinfo()
+ if (keep) {
+ end = x.size
+ if (end - begin > 2) {
+ sectype = 1
+ newsec(begin, end)
+ cursec.iscontour_ = 1
+ }else{
+sprint(tstr, "CellBody contour has less than three points at line %d. Ignoring.", il)
+ b2serr.append(new String(tstr))
+ }
+ }
+ chk(rightpar)
+ read_next_token()
+ leave("contour")
+}
+proc contourinfo() {
+ enter("contourinfo")
+ properties()
+ points()
+ morepoints()
+ leave("contourinfo")
+}
+proc morepoints() {
+ enter("morepoints")
+ optmarkerlist()
+ leave("morepoints")
+}
+proc optmarkerlist() {
+ enter("optmarkerlist")
+ leave("optmarkerlist")
+}
+proc markerlist() {local pcnt
+ enter("markerlist")
+ chk(leftpar)
+ pcnt = 1
+ // not handling markers. when pcnt goes to 0 then leave
+ while (pcnt != 0) {
+ read_next_token()
+ if (current.token == rightpar) {
+ pcnt -= 1
+ }else if (current.token == leftpar) {
+ pcnt += 1
+ }
+ }
+ read_next_token()
+ leave("markerlist")
+}
+proc tree() {
+ enter("tree")
+ parentsec = nil
+ chk(leftpar)
+ read_next_token()
+ properties()
+ set_sectype()
+ branch()
+ chk(rightpar)
+ read_next_token()
+ parentsec = nil
+ leave("tree")
+}
+proc branch() {local begin, end localobj psav
+ enter("branch")
+ psav = parentsec
+ begin = x.size
+ treepoints()
+ end = x.size
+ newsec(begin, end)
+ cursec.parentsec = parentsec
+ parentsec = cursec
+ branchend()
+ parentsec = psav
+ leave("branch")
+}
+proc treepoints() {
+ enter("treepoints")
+ treepoint()
+ while (1) {
+ optionalcomma()
+ if (current.token != leftpar || look_ahead.token != number) {
+ break
+ }
+ treepoint()
+ }
+ leave("treepoints")
+}
+proc treepoint() {
+ enter("treepoint")
+ point()
+ if (current.token == leftsp) {
+ spines()
+ }
+ leave("treepoint")
+}
+proc spines() {
+ enter("spines")
+ spine()
+ while(current.token == leftsp) {
+ spine()
+ }
+ leave("spines")
+}
+proc spine() {
+ enter("spine")
+ chk(leftsp) read_next_token()
+ nspine += 1 err = 1
+// properties() points()
+ while (current.token != rightsp) {
+ read_next_token()
+ }
+ chk(rightsp) read_next_token()
+ leave("spine")
+}
+proc branchend() {
+ enter("branchend")
+ optionalcomma()
+ if (current.token == leftpar) {
+ while (look_ahead.token == label_) {
+ markerlist()
+ }
+ }
+ optionalcomma()
+ if (current.token == leftpar || current.token == label_) {
+ node()
+ }
+ leave("branchend")
+}
+proc node() {
+ enter("node")
+ if (current.token == leftpar) {
+ read_next_token() split()
+ chk(rightpar) read_next_token()
+ }else if (current.token == label_) {
+ read_next_token()
+ }else{
+ p_err()
+ }
+ leave("node")
+}
+proc split() {
+ enter("split")
+ branch()
+ while (current.token == bar) {
+ read_next_token()
+ branch()
+ }
+ leave("split")
+}
+proc marker() {
+ enter("marker")
+ chk(leftpar)
+ demand(label_)
+ read_next_token()
+ properties() points()
+ chk(rightpar) read_next_token()
+ leave("marker")
+}
+func text() {
+ // if text fails then it may be a tree
+ enter("text")
+ chk(leftpar) read_next_token()
+ properties() point()
+ if (current.token != string) {
+ leave("text invalid --- expect string")
+ return 0
+ }
+ chk(string)
+// demand(rightpar)
+ read_next_token()
+ if (current.token != rightpar) {
+ leave("text invalid --- expect rightpar")
+ return 0
+ }
+ chk(rightpar)
+ read_next_token()
+ leave("text")
+ return 1
+}
+proc points() {
+ enter("points")
+ point()
+ while (1) {
+ optionalcomma()
+ if (current.token != leftpar) {
+ break
+ }
+ point()
+ }
+ leave("points")
+}
+proc point() {
+ enter("point")
+ chk(leftpar)
+ demand(number)
+ xval = current.x
+ iline.append(iline_) lines.append(new String(fline))
+ read_next_token() optionalcomma()
+ chk(number)
+ yval = current.x
+ zval = dval = 0
+ read_next_token() optz()
+ x.append(xval) y.append(yval) z.append(zval) d.append(dval)
+ chk(rightpar) read_next_token()
+//printf("%g %g %g %g\n", xval, yval, zval, dval)
+ leave("point")
+}
+proc optz() {
+ enter("optz")
+ optionalcomma()
+ if (current.token == number) {
+ zval = current.x
+ read_next_token()
+ optmodifier()
+ }
+ leave("optz")
+}
+proc optmodifier() {
+ enter("optmodifier")
+ optionalcomma()
+ if (current.token == number) {
+ dval = current.x
+ read_next_token()
+ optionalcomma()
+ if (current.token == label_) {
+ read_next_token()
+ }
+ optbezier()
+ }
+ leave("optmodifier")
+}
+proc optbezier() {
+ enter("optbezier")
+ optionalcomma()
+ if (current.token == leftpar) {
+ demand(number)
+ read_next_token()
+ optionalcomma() chk(number) read_next_token()
+ optionalcomma() chk(number) read_next_token()
+ optionalcomma() chk(number) demand(rightpar)
+ read_next_token()
+ }
+ leave("optbezier")
+}
+proc optionalcomma() {
+ enter("optionalcomma")
+ if (current.token == comma) {
+ read_next_token()
+ }
+ leave("optionalcomma")
+}
+proc p_err() {
+ printf("\nparse error\n")
+ pcur()
+ printf("line %d: %s\n", iline_, fline)
+ stop
+}
+proc errout() {local i
+ if (quiet) { return }
+ printf("\n%s problems\n\n", file.getname)
+ if (nspine) {
+ printf("Ignored %d spines\n", nspine)
+ }
+ for i=0, b2serr.count-1 {
+ printf("%s\n", b2serr.object(i).s)
+ }
+}
+endtemplate Import3d_Neurolucida3
diff --git a/bmtk-vb/bmtk/simulator/bionet/import3d/read_nts.hoc b/bmtk-vb/bmtk/simulator/bionet/import3d/read_nts.hoc
new file mode 100644
index 0000000..c58e9d6
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/import3d/read_nts.hoc
@@ -0,0 +1,331 @@
+// translation of ntscable's read_nts.c file for importing
+// eutectic files. After reading and parsing lines, the logic
+// follows that in nlcda_read.hoc
+
+begintemplate Import3d_Eutectic_read
+public filetype, sections, input, type, file, err
+public label, id2pt, id2line, pt2id, pt2sec, sec2pt, mark
+external hoc_sf_
+public id, ptype, tag, x, y, z, d, iline, pointtype, points, type
+public firstpoints, lastpoints
+objref sections, file, stack, cursec, firstpoints, lastpoints, gm
+objref id, ptype, tag, x, y, z, d, iline, pointtype, points, type
+objref iline2pt, vectors, header, lines, diam, parse_err, nil, soma
+strdef tstr, tstr1, point_type_names, filetype, line
+
+proc init() {
+ filetype = "Eutectic"
+ vectors = new List()
+ header = new List()
+ lines = new List()
+ gm = new GUIMath()
+ MTO = 0
+ TTO = 3
+ BTO = 6
+ CP = 9+1
+ FS = 12+1
+ SB = 15+1
+ BP = 18+1
+ NE = 21+1
+ ES = 24+1
+ MAE = 27
+ TAE = 30
+ BAE = 33
+ SOS = 36
+ SCP = 39
+ SOE = 42
+ OS = 45+1
+ OCP = 48
+ OE = 51+1
+ DS = 54+1
+ DCP = 57
+ DE = 60+1
+ point_type_names = \
+"MTOTTOBTO CP FS SB BP NE ESMAETAEBAESOSSCPSOE OSOCP OE DSDCP DE"
+// note numbering for two char item is 1 more than in read_nts.c
+// since space is not included in first char
+}
+
+proc input() {local i
+ nspine = 0
+ err = 0
+ parse_err = new List()
+ sections = new List()
+ stack = new List()
+ lastpoints = new Vector()
+ firstpoints = new Vector()
+
+ rdfile($s1)
+ parse2()
+ type = new Vector(sections.count)
+ for i=0, sections.count-1 {
+ type.x[i] = tag.x[sections.object(i).id]
+ }
+ connect2soma()
+ if (err) { errout() }
+}
+
+proc rdfile() {local i, j
+ file = new File($s1)
+ // count lines for vector allocation space (not really necessary)
+ if (!file.ropen()) {
+ err = 1
+ printf("could not open %s\n", $s1)
+ }
+ for (i = 0; !file.eof(); i += 1) {
+ file.gets(line)
+ }
+ file.close()
+// printf("%s has %d lines\n", $s1, i)
+ alloc(i, id, ptype, tag, x, y, z, d, iline, pointtype, points)
+ tag
+ diam = d
+ file.ropen()
+ for (i = 1; !file.eof(); i += 1) {
+ file.gets(line)
+ parse(i, line)
+ }
+ file.close()
+}
+
+proc alloc() { local i // $oi.size = 0 but enough space for $1 elements
+ for i = 2, numarg() {
+ $oi = new Vector($1)
+ $oi.resize(0)
+ vectors.append($oi)
+ }
+}
+
+proc parse() {local a1 ,a2, a3, a4, a5, a6, a7
+ n = sscanf($s2, "%d %s %d %f %f %f %f", &a1, tstr, &a3, &a4, &a5, &a6, &a7)
+ hoc_sf_.left($s2, hoc_sf_.len($s2)-1)
+ if (n <= 0) {
+ header.append(new String($s2))
+ return
+ }
+ if (n != 7) {
+ err = 1
+ sprint(tstr, "%d: %s parse failure after item %d", $1, $s2, n)
+ parse_err.append(new String(tstr))
+ return
+ }
+ a2 = hoc_sf_.head(point_type_names, tstr, tstr1)
+// print tstr, " ", a2
+ // first points of branches (before physical connection) is 1
+ // continuation points are 2
+ // branch are 3
+ // ends are 4
+ // a branch point can also be a first point
+ // so easiest to accumulate them here
+ if (a2 == MTO) {
+ last = 1
+ firstpoints.append(id.size)
+ }else if (a2 == BP ){
+ if (last == 3 || last == 4){
+ firstpoints.append(id.size)
+ }
+ last = 3
+ }else if (a2 == FS || a2 == SB || a2 == CP){
+ if (a2 == SB) { err = 1 nspine += 1 }
+ if (last == 3 || last == 4){
+ firstpoints.append(id.size)
+ last = 1
+ }else{
+ last = 2
+ }
+ }else if (a2 == NE || a2 == ES || a2 == MAE || a2 == TAE || a2 == BAE){
+ if (last == 3 || last == 4){
+ firstpoints.append(id.size)
+ }
+ last = 4
+ }else if (a2 == SOS){
+ last = 10
+ }else if (a2 == SCP){
+ last = 10
+ }else if (a2 == SOE){
+ last = 10
+ }else if (a2 == OS){
+ return
+ }else if (a2 == DS){
+ return
+ }else if (a2 == DCP || OCP){
+ return
+ }else if (a2 == DE || a2 == OE){
+ return
+ }else{
+ return
+ }
+ pointtype.append(last)
+ points.append(a1)
+ id.append(a1)
+ ptype.append(a2)
+ tag.append(a3)
+ x.append(a4)
+ y.append(a5)
+ z.append(a6)
+ d.append(a7)
+ iline.append($1)
+ lines.append(new String($s2))
+}
+proc parse2() {local i, j, k localobj parent
+ i = ptype.indwhere("==", SOS)
+ j = ptype.indwhere("==", SOE)
+ if (i > -1 && j > i) {
+ mksec(i, j, nil)
+ cursec.iscontour_ = 1
+// cursec.type=1
+ soma = cursec
+ }
+ for i=0, firstpoints.size-1 {
+ j = firstpoints.x[i]
+ for (k=j; pointtype.x[k] <= 2; k += 1) {
+ }
+ parent = pop()
+ if (parent != nil) {
+ if (parent.volatile < 1) {
+ push(parent)
+ parent.volatile += 1
+ }
+ }
+ mksec(j, k, parent)
+//printf("%s %d %d: %s | %s\n", cursec, j, k, lines.object(j).s, lines.object(k).s)
+ cursec.parentsec = parent
+// logic_connect(cursec, parent)
+ if (pointtype.x[k] == 3) {
+ push(cursec)
+ }
+ }
+ if (stack.count > 0) {
+ err = 1
+ }
+}
+
+proc push() {
+ stack.append($o1)
+}
+obfunc pop() {localobj p
+ if (stack.count > 0) {
+ p = stack.object(stack.count-1)
+ stack.remove(stack.count-1)
+ }else{
+ p = nil
+ }
+ return p
+}
+
+proc mksec() {local i, x1, y1, z1, d1
+ if ($o3 == nil) {
+ cursec = new Import3d_Section($1, $2-$1+1)
+ cursec.append(0, $1, $2-$1+1, x, y, z, d)
+ }else{
+ cursec = new Import3d_Section($1, $2-$1+2)
+ cursec.append(1, $1, $2-$1+1, x, y, z, d)
+ cursec.first = 0 // physical connection
+ i = $o3.raw.ncol-1
+ x1 = $o3.raw.x[0][i]
+ y1 = $o3.raw.x[1][i]
+ z1 = $o3.raw.x[2][i]
+ //d1 = $o3.d.x[i]
+ cursec.set_pt(0, x1, y1, z1, cursec.d.x[1])
+ cursec.fid = 1
+ }
+ cursec.volatile = 0
+ cursec.type = tag.x[$1]
+ sections.append(cursec)
+ lastpoints.append($2)
+}
+
+proc logic_connect() {local i, x1, y1, z1, d1
+ if ($o2 == nil) { return }
+ i = $o2.raw.ncol-1
+ x1 = $o2.raw.x[0][i]
+ y1 = $o2.raw.x[1][i]
+ z1 = $o2.raw.x[2][i]
+ d1 = $o2.d.x[i]
+ $o1.insrt(0, x1, y1, z1, $o1.d.x[0])
+ $o1.first = 1
+}
+
+proc connect2soma() {local i, ip, j, jp, bp, jpmin, dmin, d, xmin, xmax, ymin, ymax localobj sec, xc, yc, zc, c
+ // find centroid of soma if outline and connect all dangling
+ // dendrites to that if inside the contour
+ if (soma == nil) { return }
+ xc = soma.raw.getrow(0)
+ yc = soma.raw.getrow(1)
+ zc = soma.raw.getrow(2)
+ xmin = xc.min-.5 xmax = xc.max + .5
+ ymin = yc.min-.5 ymax = yc.max + .5
+ c = soma.contourcenter(xc, yc, zc)
+ for i=0, sections.count-1 {
+ sec = sections.object(i)
+ if (sec.parentsec == nil && sec != soma) {
+ if (gm.inside(sec.raw.x[0][0], sec.raw.x[1][0], xmin, ymin, xmax, ymax)) {
+ sec.parentsec = soma
+ sec.parentx = .5
+ sec.insrt(0, c.x[0], c.x[1], c.x[2], .01)
+ sec.first = 1
+ sec.fid = 1
+ }
+ }
+ }
+}
+
+proc label(){
+ sprint($s2, "Line %d: %s", iline.x[$1], lines.object($1).s)
+}
+func id2pt() {
+ i = id.indwhere(">=", $1)
+//print "id2pt ", $1, i, id.x[i]
+ return i
+}
+func id2line() { return points.x[$1] }
+func pt2id() {local i
+//print "pt2id ", $1, id.x[$1]
+ return id.x[$1]
+}
+func pt2sec(){local i, j
+ i = lastpoints.indwhere(">=", $1)
+ if (i == -1) {
+ i = lastpoints.size-1
+ }
+ $o2 = sections.object(i)
+ j = $1 - $o2.id + $o2.fid
+//print "pt2sec ", $1, $o2, $o2.id, j
+ return j
+}
+func sec2pt(){local i localobj sec
+ sec = sections.object($1)
+ i = sec.id + $2 - sec.fid
+//print "sec2pt ", $1, $2, sec.id, sec.first, i
+ return i
+}
+
+proc mark() {local i, a,b,c,d,e,f
+ print $o1, $2, iline, lines
+ i = id.indwhere("==",$2)
+ printf("%d,%d,%d: %s\n", i, id.x[i], iline.x[i], lines.object(i).s)
+ n = sscanf(lines.object(i).s, "%d %s %d %f %f %f %f", &a, tstr, &b, &c, &d, &e, &f)
+ if (n == 7) {
+ print a," ",tstr," ",b,c,d,e,f
+ $o1.mark(c,d,"S",12,4,1)
+ }
+}
+
+proc errout() {
+ printf("\n%s problems and default fixes\n\n", file.getname)
+ if (parse_err.count) {
+ printf(" Following lines could not be parsed\n")
+ for i=0, parse_err.count-1 {
+ printf(" %s\n", parse_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (stack.count > 0) {
+ printf(" stack.count = %d\n", stack.count)
+ }
+ if (nspine > 0) {
+ printf(" Ignore %d spines\n", nspine)
+ }
+}
+
+endtemplate Import3d_Eutectic_read
diff --git a/bmtk-vb/bmtk/simulator/bionet/import3d/read_swc.hoc b/bmtk-vb/bmtk/simulator/bionet/import3d/read_swc.hoc
new file mode 100644
index 0000000..2dddd72
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/import3d/read_swc.hoc
@@ -0,0 +1,428 @@
+// read swc file, create and verify that it is a single tree,
+// and identify the lists of unbranched points.
+
+begintemplate Import3d_SWC_read
+public input, pheader, instantiate
+public id, type, x, y, z, d, pid, iline, header, point2sec, sections, lines
+public idoffset, label, id2pt, id2line, pt2id, pt2sec, sec2pt, file, mark
+public filetype, err, helptxt
+public quiet
+external hoc_sf_
+objref id, type, x, y, z, d, pid, iline, header, lines
+objref file, vectors, sec2point, point2sec, sections
+objref connect2prox
+strdef tstr, line, filetype
+double a[7]
+objref id2index_
+
+// id and pid contain the raw id values (1st and 7th values on each line)
+// from the file. After the file is read id2index(id.x[i]) == i
+// Note that the only requireement for a valid swc file is the tree
+// topology condition pid.x[i] < id.x[i]
+
+
+proc init() {
+ quiet = 0
+ filetype = "SWC"
+ vectors = new List()
+ header = new List()
+ lines = new List()
+}
+
+func id2index() {
+ return id2index_.x[$1]
+}
+func pix2ix() {local pid_
+ pid_ = pid.x[$1]
+ if (pid_ < 0) { return -1 }
+ return id2index_.x[pid_]
+}
+
+proc input() {
+ err = 0
+ rdfile($s1)
+ check_pid() // and also creates id2index_
+ sectionify() // create point2sec index map
+ mksections() // Import3dSection list
+// instantiate()
+}
+
+proc rdfile() {local i
+ file = new File($s1)
+ // count lines for vector allocation space (not really necessary)
+ if (!file.ropen()) {
+ err = 1
+ printf("could not open %s\n", $s1)
+ }
+ for (i = 0; !file.eof(); i += 1) {
+ file.gets(line)
+ }
+ file.close()
+// printf("%s has %d lines\n", $s1, i)
+ alloc(i, id, type, x, y, z, d, pid, iline)
+ file.ropen()
+ for (i = 1; !file.eof(); i += 1) {
+ file.gets(line)
+ parse(i, line)
+ }
+ file.close()
+}
+
+proc alloc() { local i // $oi.size = 0 but enough space for $1 elements
+ for i = 2, numarg() {
+ $oi = new Vector($1)
+ $oi.resize(0)
+ vectors.append($oi)
+ }
+}
+
+proc parse() {local i, n
+ n = sscanf($s2, "%f %f %f %f %f %f %f", &a[0], &a[1], &a[2],\
+ &a[3], &a[4], &a[5], &a[6])
+ if (n == 7) {
+ a[5] *= 2 // radius to diameter
+ for i=0, 6 {
+ vectors.object(i).append(a[i])
+ }
+ iline.append($1) // for error messages
+ hoc_sf_.left($s2, hoc_sf_.len($s2)-1)
+ lines.append(new String($s2))
+ } else if (hoc_sf_.head($s2, "#", tstr) == 0) { // comment
+ header.append(new String($s2))
+ } else {
+ err = 1
+ printf("error %s line %d: could not parse: %s", file.getname, $1, $s2)
+// Note: only swcdata/n120.swc and swcdata/n423.swc last lines are invalid
+ }
+}
+
+proc pheader() {local i
+ for i=0, header.count-1 {
+ printf("%s", header.object(i).s)
+ }
+}
+
+proc shift_id() { local i, ierr, imin
+ // Note: swcdata/*.swc have sequential id's
+ // shift id and pid so that id.x[0] == 0. Then verify that
+ // id.x[i] == i
+ if (id.size > 0) {
+ imin = id.min_ind
+ idoffset = id.x[imin]
+ // is the first one the smallest?
+ if (id.x[0] != idoffset) {
+ err = 1
+printf("error %s lines %d and %d: id's %d and %d are not sequential\n", \
+ file.getname, iline.x[0], iline.x[imin], \
+ id.x[0], idoffset)
+ }
+ id.sub(idoffset)
+ pid.sub(idoffset)
+ }
+ ierr = 0
+ for i=0, id.size-1 {
+ if (id.x[i] != i ) {
+ err = 1
+printf("error %s line %d: id's shifted by %d are not sequential: id.x[%d] != %g\n", \
+ file.getname, iline.x[i], idoffset, i, id.x[i])
+ ierr += 1
+ }
+ if (ierr > 5) { break }
+ }
+}
+
+proc check_pid() {local i, ierr, needsort localobj tobj
+ // if all pid.x[i] < id.x[i] then we must be 1 or more trees with no loops
+ // Note: swcdata/*.swc conforms.
+ needsort = 0
+ ierr = 0
+ for i=0, id.size-1 {
+ if (i > 0) if (id.x[i] <= id.x[i-1]) { needsort = 1 }
+ if (pid.x[i] >= id.x[i]) {
+ err = 1
+printf("error %s line %d: index %d pid=%d is not less than id=%d\n",\
+ file.getname, iline.x[i], i, pid.x[i], id.x[i])
+ }
+ }
+ if (needsort) { // sort in id order
+ tobj = id.sortindex()
+ id.sortindex(id, tobj)
+ pid.sortindex(pid, tobj)
+ x.sortindex(x, tobj)
+ y.sortindex(y, tobj)
+ z.sortindex(z, tobj)
+ d.sortindex(diam, tobj)
+ iline.sortindex(iline, tobj)
+ }
+ // the number of trees is just the number of pid's < 0
+ // Note: swcdata/*.swc have only one tree
+ tobj = new Vector()
+ tobj.indvwhere(pid, "<", 0)
+ if (tobj.size > 1) {
+ err = 1
+
+ if (!quiet) {// added by Sergey to suppress the warning output
+
+printf("warning %s: more than one tree:\n", file.getname)
+ printf(" root at line:")
+ for i=0, tobj.size-1 {
+ printf(" %d,", iline.x[tobj.x[i]])
+ }
+ printf(" \n")
+ }// end of quiet
+ }
+ // check for duplicate id
+ for i=1, id.size-1 if (id.x[i] == id.x[i-1]) {
+ err = 1
+printf("error %s: duplicate id:\n", file.getname)
+printf(" %d: %s\n", iline.x[i-1], lines.o(iline.x[i-1]).s)
+printf(" %d: %s\n", iline.x[i], lines.o(iline.x[i]).s)
+ }
+ // create the id2index_ map
+ id2index_ = new Vector(id.max()+1)
+ id2index_.fill(-1)
+ for i=0, id.size-1 {
+ id2index_.x[id.x[i]] = i
+ }
+}
+
+proc sectionify() {local i, si localobj tobj
+ // create point2sec map and sections list
+ // point2sec gives immediate knowledge of the section a point is in
+ // sections list is for display purposes
+ if (id.size < 2) { return }
+
+ // tobj stores the number of child nodes with pid equal to i
+ // actually every non-contiguous child adds 1.01 and a contiguous
+ // child adds 1
+ mark_branch(tobj)
+
+ point2sec = new Vector(id.size)
+ // first point in the root and if only one point it will be interpreted
+ // as spherical.
+ point2sec.x[0] = 0
+ si = 0
+ for i=1, id.size-1 {
+ if (tobj.x[pix2ix(i)] > 1 || connect2prox.x[i]) {
+ si += 1
+ }
+ point2sec.x[i] = si
+ }
+ sec2point = new Vector(si)
+ tobj.x[0] = 1
+ sec2point.indvwhere(tobj, "!=", 1)
+ // sec2point.x[i] is the last point of section i
+ // 0 is the first point of section 0
+ // sec2point.x[i-1]+1 is the first point of section i
+}
+
+proc mark_branch() { local i, p
+ //$o1 is used to store the number of child nodes with pid equal to i
+ // actually add a bit more than 1
+ // if noncontiguous child and 1 if contiguous child
+ // this is the basic computation that defines sections, i.e.
+ // contiguous 1's with perhaps a final 0 (a leaf)
+ // As usual, the only ambiguity will be how to treat the soma
+
+ // Another wrinkle is that we do not want any sections that
+ // have multiple point types. E.g. point type 1 is often
+ // associated with the soma. Therefore we identify
+ // point type changes with branch points.
+
+ // however warn if the first two points do not have the same type
+ // if single point soma set the spherical_soma flag
+ if ( type.x[0] != type.x[1]) {
+ err = 1
+ if (0 && !quiet) {
+printf("\nNotice: %s:\nThe first two points have different types (%d and %d) but\n a single point NEURON section is not allowed.\n Interpreting the point as the center of a sphere of\n radius %g at location (%g, %g, %g)\n Will represent as 3-point cylinder with L=diam and with all\n children kept at their 1st point positions and connected\n with wire to middle point.\n If this is an incorrect guess, then change the file.\n"\
+, file.getname, type.x[0], type.x[1], d.x[0]/2, x.x[0], y.x[0], z.x[0])
+ }
+ }
+
+ // another wrinkle is that when a dendrite connects to the soma
+ // by a wire,
+ // another branch may connect to the first point of that dendrite
+ // In this case (to avoid single point sections)
+ // that branch should be connected to position 0
+ // of the dendrite (and the first point of that branch should be
+ // the same position as the first point of the dendrite.
+ // use connect2prox to indicate the parent point is not
+ // the distal end but the proximal end of the parent section
+ connect2prox = new Vector(id.size)
+
+ $o1 = new Vector(id.size)
+ for i=0, id.size-1 {
+ p = pix2ix(i)
+ if (p >= 0) {
+ $o1.x[p] += 1
+ if ( p != i-1) {
+ $o1.x[p] += .01
+//i noncontiguous with parent and
+// if parent is not soma and parent of parent is soma
+// then i appended to connect2prox
+if (p > 1) if (type.x[p] != 1 && type.x[pix2ix(p)] == 1) {
+ connect2prox.x[i] = 1
+ $o1.x[p] = 1 // p not treated as a 1pt section
+ err = 1
+ if (0 && !quiet) {
+printf("\nNotice: %s:\n %d parent is %d which is the proximal point of a section\n connected by a wire to the soma.\n The dendrite is being connected to\n the proximal end of the parent dendrite.\n If this is an incorrect guess, then change the file.\n"\
+, file.getname, id.x[i], id.x[p])
+ }
+}
+
+ }
+ if (type.x[p] != type.x[i]) {
+ // increment enough to get past 1
+ // so force end of section but
+ // not really a branch
+ $o1.x[p] += .01
+ }
+ }
+ }
+}
+
+proc mksections() {local i, j, isec, first localobj sec, psec, pts
+ sections = new List()
+ isec = 0
+ first = 0
+ for i=0, id.size-1 {
+ if (point2sec.x[i] > isec) {
+ mksection(isec, first, i)
+ isec += 1
+ first = i
+ }
+ }
+ mksection(isec, first, i)
+}
+
+proc mksection() { local i, isec, first localobj sec
+ isec = $1 first=$2 i=$3
+ if (isec > 0) {// branches have pid as first point
+ sec = new Import3d_Section(first, i-first+1)
+ pt2sec(pix2ix(first), sec.parentsec)
+ // but if the parent is the root and the branch has more than
+ // one point, then connect to center of root with wire
+ if (point2sec.x[pix2ix(first)] == 0 && i > 1) {
+ sec.parentx = 0.5
+ sec.first = 1
+ }else{
+ if (pix2ix(first) == 0) { sec.parentx = 0 }
+ }
+ sec.append(0, pix2ix(first), 1, x, y, z, d)
+ sec.append(1, first, i-first, x, y, z, d)
+ }else{// pid not first point in root section
+ sec = new Import3d_Section(first, i-first)
+ sec.append(0, first, i-first, x, y, z, d)
+ }
+ sec.type = type.x[first]
+ sections.append(sec)
+ if (object_id(sec.parentsec)) {
+ if (sec.parentsec.type == 1 && sec.type != 1) {
+ sec.d.x[0] = sec.d.x[1]
+ }
+ }
+ if (connect2prox.x[first]) {
+ sec.pid = sec.parentsec.id
+ sec.parentx = 0
+ }
+}
+
+func same() {
+ if ($2 < 0) return 0
+ if (x.x[$1] == x.x[$2]) {
+ if (y.x[$1] == y.x[$2]) {
+// if (z.x[$1] == z.x[$2]) {
+ return 1
+// }
+ }
+ }
+ return 0
+}
+
+proc instantiate() {local i, isec, psec, pp, si, px
+ if (id.size < 2) { return }
+
+ sprint(tstr, "~create K[%d]", sec2point.size)
+ execute(tstr)
+
+ // connect
+ for i = 2, id.size-1 {
+ if (point2sec.x[pix2ix(i)] == point2sec.x[i]) { continue }
+ if (pix2ix(i) == 0) { px = 0 } else { px = 1 }
+ sprint(tstr, "K[%d] connect K[%d](0), (%g)", \
+ point2sec.x[pix2ix(i)], point2sec.x[i], px)
+ execute(tstr)
+ }
+
+ // 3-d point info
+ // needs some thought with regard to interior duplicate
+ // points, and whether it is appropriate to make the first
+ // point in the section the same location and diam as the
+ // pid point
+ isec = 0
+ for i=0, id.size-1 {
+ if (point2sec.x[i] > isec ) { // in next section
+ ptadd(pix2ix(i), point2sec.x[i])
+ }
+ isec = point2sec.x[i]
+ ptadd(i, isec)
+ }
+}
+
+proc ptadd() {
+ sprint(tstr, "K[%d] { pt3dadd(%g, %g, %g, %g) }", \
+ $2, x.x[$1], y.x[$1], z.x[$1], d.x[$1])
+ execute(tstr)
+}
+
+proc label() {
+ sprint($s2, "Line %d: %s", iline.x[$1], lines.object($1).s)
+}
+func id2pt() {local i
+ if ($1 < 0) {
+ $1 = 0
+ }else if ( $1 > id2index_.size-1) {
+ $1 = id2index_.size-1
+ }
+ return id2index($1)
+}
+func id2line() { return $1 }
+func pt2id() { return id.x[$1] }
+func pt2sec() { local i,j //from selpoint
+ i = point2sec.x[$1]
+ $o2 = sections.object(i)
+ j = $1 - $o2.id
+ if (i > 0) { j += 1 }
+ return j
+}
+func sec2pt() {local i
+ i = sections.object($1).id + $2
+ if ($1 > 0) {
+ i -= 1
+ }
+ return i
+}
+proc mark() {local i
+ print $o1, $2, iline, lines
+ i = id2index($2)
+ printf("%d %d %g %g: %s\n", i, iline.x[i], x.x[i], y.x[i], lines.object(i).s)
+ $o1.mark(x.x[i], y.x[i], "S", 12, 4, 1)
+}
+
+proc helptxt() {
+ xpanel("SWC file filter characteristics")
+xlabel(" Sections consist of unbranched sequences of points having")
+xlabel("the same type. All sections connect from 0 to 1")
+xlabel("(except those connecting to the first point")
+xlabel("of the root section connect from 0 to 0).")
+xlabel("With one exception, all child sections have as their first pt3d")
+xlabel("point a copy of the parent point and the diameter of that first")
+xlabel("point is the diameter of the parent point")
+xlabel(" The exception, so that the error in area is not so")
+xlabel("egregious, is that dendrite branches that connect to the soma")
+xlabel("get a copy of the parent point as their first pt3d point but")
+xlabel("the diameter of that point is the diameter of the second point")
+xlabel(" The root section does not contain an extra parent point.")
+ xpanel(0)
+}
+endtemplate Import3d_SWC_read
diff --git a/bmtk-vb/bmtk/simulator/bionet/io_tools.py b/bmtk-vb/bmtk/simulator/bionet/io_tools.py
new file mode 100644
index 0000000..2ae6289
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/io_tools.py
@@ -0,0 +1,38 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from neuron import h
+
+from bmtk.simulator.core.io_tools import IOUtils
+
+pc = h.ParallelContext()
+MPI_Rank = int(pc.id())
+MPI_Size = int(pc.nhost())
+
+
+class NEURONIOUtils(IOUtils):
+ def __init__(self):
+ super(NEURONIOUtils, self).__init__()
+ self.mpi_rank = MPI_Rank
+ self.mpi_size = MPI_Size
+
+io = NEURONIOUtils()
diff --git a/bmtk-vb/bmtk/simulator/bionet/io_tools.pyc b/bmtk-vb/bmtk/simulator/bionet/io_tools.pyc
new file mode 100644
index 0000000..33f4fcd
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/io_tools.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/__init__.py b/bmtk-vb/bmtk/simulator/bionet/modules/__init__.py
new file mode 100644
index 0000000..7bb45dc
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/modules/__init__.py
@@ -0,0 +1,27 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .ecp import EcpMod
+from .record_cellvars import MembraneReport, SomaReport, SectionReport
+from .record_spikes import SpikesMod
+from .xstim import XStimMod
+from .save_synapses import SaveSynapses
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/__init__.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/__init__.pyc
new file mode 100644
index 0000000..750b8ea
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..716edfd
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/ecp.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/ecp.cpython-37.pyc
new file mode 100644
index 0000000..b4a1961
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/ecp.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/record_cellvars.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/record_cellvars.cpython-37.pyc
new file mode 100644
index 0000000..2ff2541
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/record_cellvars.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/record_spikes.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/record_spikes.cpython-37.pyc
new file mode 100644
index 0000000..0330fa3
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/record_spikes.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/save_synapses.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/save_synapses.cpython-37.pyc
new file mode 100644
index 0000000..5c98520
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/save_synapses.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/sim_module.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/sim_module.cpython-37.pyc
new file mode 100644
index 0000000..8fbd3ba
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/sim_module.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/xstim.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/xstim.cpython-37.pyc
new file mode 100644
index 0000000..273b5a3
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/xstim.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/xstim_waveforms.cpython-37.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/xstim_waveforms.cpython-37.pyc
new file mode 100644
index 0000000..22d74a6
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/__pycache__/xstim_waveforms.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/ecp.py b/bmtk-vb/bmtk/simulator/bionet/modules/ecp.py
new file mode 100644
index 0000000..3ea1059
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/modules/ecp.py
@@ -0,0 +1,275 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import h5py
+import math
+import pandas as pd
+from neuron import h
+import numpy as np
+
+from bmtk.simulator.bionet.modules.sim_module import SimulatorMod
+from bmtk.utils.sonata.utils import add_hdf5_magic, add_hdf5_version
+
+
+pc = h.ParallelContext()
+MPI_RANK = int(pc.id())
+N_HOSTS = int(pc.nhost())
+
+
+class EcpMod(SimulatorMod):
+ def __init__(self, tmp_dir, file_name, electrode_positions, contributions_dir, cells=[], variable_name='v',
+ electrode_channels=None):
+ self._ecp_output = file_name if os.path.isabs(file_name) else os.path.join(tmp_dir, file_name)
+ self._positions_file = electrode_positions
+ self._tmp_outputdir = tmp_dir
+ self._contributions_dir = contributions_dir if os.path.isabs(contributions_dir) else os.path.join(tmp_dir, contributions_dir)
+ self._cells = cells
+ self._rel = None
+ self._fih1 = None
+ self._rel_nsites = 0
+ self._block_size = 0
+ # self._biophys_gids = []
+ self._saved_gids = {}
+ self._nsteps = 0
+
+ self._tstep = 0 # accumlative time step
+ # self._rel_time = 0 #
+ self._block_step = 0 # time step within the given block of time
+ self._tstep_start_block = 0
+ self._data_block = None
+ self._cell_var_files = {}
+
+ self._tmp_ecp_file = self._get_tmp_fname(MPI_RANK)
+ self._tmp_ecp_handle = None
+ # self._tmp_ecp_dataset = None
+
+ self._local_gids = []
+
+ def _get_tmp_fname(self, rank):
+ return os.path.join(self._tmp_outputdir, 'tmp_{}_ecp.h5'.format(MPI_RANK))
+
+ def _create_ecp_file(self, sim):
+ dt = sim.dt
+ tstop = sim.tstop
+ self._nsteps = int(round(tstop/dt))
+
+ # create file to temporary store ecp data on each rank
+ self._tmp_ecp_handle = h5py.File(self._tmp_ecp_file, 'a')
+ self._tmp_ecp_handle.create_dataset('data', (self._nsteps, self._rel_nsites), maxshape=(None, self._rel_nsites),
+ chunks=True)
+
+ # only the primary node will need to save the final ecp
+ if MPI_RANK == 0:
+ with h5py.File(self._ecp_output, 'w') as f5:
+ add_hdf5_magic(f5)
+ add_hdf5_version(f5)
+ f5.create_dataset('data', (self._nsteps, self._rel_nsites), maxshape=(None, self._rel_nsites),
+ chunks=True)
+ f5.attrs['dt'] = dt
+ f5.attrs['tstart'] = 0.0
+ f5.attrs['tstop'] = tstop
+
+ # Save channels. Current we record from all channels, may want to be more selective in the future.
+ f5.create_dataset('channel_id', data=np.arange(self._rel.nsites))
+
+ pc.barrier()
+
+ def _create_cell_file(self, gid):
+ file_name = os.path.join(self._contributions_dir, '{}.h5'.format(int(gid)))
+ file_h5 = h5py.File(file_name, 'a')
+ self._cell_var_files[gid] = file_h5
+ file_h5.create_dataset('data', (self._nsteps, self._rel_nsites), maxshape=(None, self._rel_nsites), chunks=True)
+ # self._cell_var_files[gid] = file_h5['ecp']
+
+ def _calculate_ecp(self, sim):
+ self._rel = RecXElectrode(self._positions_file)
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+ #cell = sim.net.get_local_cell(gid)
+ # cell = sim.net.cells[gid]
+ self._rel.calc_transfer_resistance(gid, cell.get_seg_coords())
+
+ self._rel_nsites = self._rel.nsites
+ sim.h.cvode.use_fast_imem(1) # make i_membrane_ a range variable
+
+ def set_pointers():
+ for gid, cell in sim.net.get_local_cells().items():
+ #for gid, cell in sim.net.local_cells.items():
+ # for gid, cell in sim.net.cells.items():
+ cell.set_im_ptr()
+ self._fih1 = sim.h.FInitializeHandler(0, set_pointers)
+
+ def _save_block(self, interval):
+ """Add """
+ itstart, itend = interval
+ self._tmp_ecp_handle['data'][itstart:itend, :] += self._data_block[0:(itend - itstart), :]
+ self._tmp_ecp_handle.flush()
+ self._data_block[:] = 0.0
+
+ def _save_ecp(self, sim):
+ """Save ECP from each rank to disk into a single file"""
+ block_size = sim.nsteps_block
+ nblocks, remain = divmod(self._nsteps, block_size)
+ ivals = [i*block_size for i in range(nblocks+1)]
+ if remain != 0:
+ ivals.append(self._nsteps)
+
+ for rank in range(N_HOSTS): # iterate over the ranks
+ if rank == MPI_RANK: # wait until finished with a particular rank
+ with h5py.File(self._ecp_output, 'a') as ecp_f5:
+ for i in range(len(ivals)-1):
+ ecp_f5['data'][ivals[i]:ivals[i+1], :] += self._tmp_ecp_handle['data'][ivals[i]:ivals[i+1], :]
+
+ pc.barrier()
+
+ def _save_cell_vars(self, interval):
+ itstart, itend = interval
+
+ for gid, data in self._saved_gids.items():
+ h5_file = self._cell_var_files[gid]
+ h5_file['data'][itstart:itend, :] = data[0:(itend-itstart), :]
+ h5_file.flush()
+ data[:] = 0.0
+
+ def _delete_tmp_files(self):
+ if os.path.exists(self._tmp_ecp_file):
+ os.remove(self._tmp_ecp_file)
+
+ def initialize(self, sim):
+ if self._contributions_dir and (not os.path.exists(self._contributions_dir)) and MPI_RANK == 0:
+ os.makedirs(self._contributions_dir)
+ pc.barrier()
+
+ self._block_size = sim.nsteps_block
+
+ # Get list of gids being recorded
+ selected_gids = set(sim.net.get_node_set(self._cells).gids())
+ self._local_gids = list(set(sim.biophysical_gids) & selected_gids)
+
+ self._calculate_ecp(sim)
+ self._create_ecp_file(sim)
+
+ # ecp data
+ self._data_block = np.zeros((self._block_size, self._rel_nsites))
+
+ # create list of all cells whose ecp values will be saved separetly
+ self._saved_gids = {gid: np.empty((self._block_size, self._rel_nsites))
+ for gid in self._local_gids}
+ for gid in self._saved_gids.keys():
+ self._create_cell_file(gid)
+
+ pc.barrier()
+
+ def step(self, sim, tstep):
+ for gid in self._local_gids: # compute ecp only from the biophysical cells
+ cell = sim.net.get_cell_gid(gid)
+ #cell = sim.net.get_local_cell(gid)
+ # cell = sim.net.cells[gid]
+ im = cell.get_im()
+ tr = self._rel.get_transfer_resistance(gid)
+ ecp = np.dot(tr, im)
+
+ if gid in self._saved_gids.keys():
+ # save individual contribution
+ self._saved_gids[gid][self._block_step, :] = ecp
+
+ # add to total ecp contribution
+ self._data_block[self._block_step, :] += ecp
+
+ self._block_step += 1
+
+ def block(self, sim, block_interval):
+ self._save_block(block_interval)
+ # self._save_ecp(block_interval)
+ self._save_cell_vars(block_interval)
+
+ self._block_step = 0
+ self._tstep_start_block = self._tstep
+
+ def finalize(self, sim):
+ if self._block_step > 0:
+ # just in case the simulation doesn't end on a block step
+ self.block(sim, (sim.n_steps - self._block_step, sim.n_steps))
+
+ self._save_ecp(sim)
+ self._delete_tmp_files()
+ pc.barrier()
+
+
+class RecXElectrode(object):
+ """Extracellular electrode
+
+ """
+
+ def __init__(self, positions):
+ """Create an array"""
+ # self.conf = conf
+ electrode_file = positions # self.conf["recXelectrode"]["positions"]
+
+ # convert coordinates to ndarray, The first index is xyz and the second is the channel number
+ el_df = pd.read_csv(electrode_file, sep=' ')
+ self.pos = el_df[['x_pos', 'y_pos', 'z_pos']].T.values
+ #self.pos = el_df.as_matrix(columns=['x_pos', 'y_pos', 'z_pos']).T
+ self.nsites = self.pos.shape[1]
+ # self.conf['run']['nsites'] = self.nsites # add to the config
+ self.transfer_resistances = {} # V_e = transfer_resistance*Im
+
+ def drift(self):
+ # will include function to model electrode drift
+ pass
+
+ def get_transfer_resistance(self, gid):
+ return self.transfer_resistances[gid]
+
+ def calc_transfer_resistance(self, gid, seg_coords):
+ """Precompute mapping from segment to electrode locations"""
+ sigma = 0.3 # mS/mm
+
+ r05 = (seg_coords['p0'] + seg_coords['p1']) / 2
+ dl = seg_coords['p1'] - seg_coords['p0']
+
+ nseg = r05.shape[1]
+
+ tr = np.zeros((self.nsites, nseg))
+
+ for j in range(self.nsites): # calculate mapping for each site on the electrode
+ rel = np.expand_dims(self.pos[:, j], axis=1) # coordinates of a j-th site on the electrode
+ rel_05 = rel - r05 # distance between electrode and segment centers
+
+ # compute dot product column-wise, the resulting array has as many columns as original
+ r2 = np.einsum('ij,ij->j', rel_05, rel_05)
+
+ # compute dot product column-wise, the resulting array has as many columns as original
+ rlldl = np.einsum('ij,ij->j', rel_05, dl)
+ dlmag = np.linalg.norm(dl, axis=0) # length of each segment
+ rll = abs(rlldl / dlmag) # component of r parallel to the segment axis it must be always positive
+ rT2 = r2 - rll ** 2 # square of perpendicular component
+ up = rll + dlmag / 2
+ low = rll - dlmag / 2
+ num = up + np.sqrt(up ** 2 + rT2)
+ den = low + np.sqrt(low ** 2 + rT2)
+ tr[j, :] = np.log(num / den) / dlmag # units of (um) use with im_ (total seg current)
+ np.copyto(tr[j, :], 0, where=(dlmag == 0)) # zero out stub segments
+
+ tr *= 1 / (4 * math.pi * sigma)
+ self.transfer_resistances[gid] = tr
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/ecp.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/ecp.pyc
new file mode 100644
index 0000000..d3fd9d9
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/ecp.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/record_cellvars.py b/bmtk-vb/bmtk/simulator/bionet/modules/record_cellvars.py
new file mode 100644
index 0000000..91a0ace
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/modules/record_cellvars.py
@@ -0,0 +1,212 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import h5py
+import numpy as np
+from neuron import h
+
+from bmtk.simulator.bionet.modules.sim_module import SimulatorMod
+from bmtk.simulator.bionet.io_tools import io
+
+from bmtk.utils.io import cell_vars
+try:
+ # Check to see if h5py is built to run in parallel
+ if h5py.get_config().mpi:
+ MembraneRecorder = cell_vars.CellVarRecorderParallel
+ else:
+ MembraneRecorder = cell_vars.CellVarRecorder
+
+except Exception as e:
+ MembraneRecorder = cell_vars.CellVarRecorder
+
+MembraneRecorder._io = io
+
+pc = h.ParallelContext()
+MPI_RANK = int(pc.id())
+N_HOSTS = int(pc.nhost())
+
+
+def first_element(lst):
+ return lst[0]
+
+
+transforms_table = {
+ 'first_element': first_element,
+}
+
+
+class MembraneReport(SimulatorMod):
+ def __init__(self, tmp_dir, file_name, variable_name, cells, sections='all', buffer_data=True, transform={}):
+ """Module used for saving NEURON cell properities at each given step of the simulation.
+
+ :param tmp_dir:
+ :param file_name: name of h5 file to save variable.
+ :param variables: list of cell variables to record
+ :param gids: list of gids to to record
+ :param sections:
+ :param buffer_data: Set to true then data will be saved to memory until written to disk during each block, reqs.
+ more memory but faster. Set to false and data will be written to disk on each step (default: True)
+ """
+ self._all_variables = list(variable_name)
+ self._variables = list(variable_name)
+ self._transforms = {}
+ # self._special_variables = []
+ for var_name, fnc_name in transform.items():
+ if fnc_name is None or len(fnc_name) == 0:
+ del self._transforms[var_name]
+ continue
+
+ fnc = transforms_table[fnc_name]
+ self._transforms[var_name] = fnc
+ self._variables.remove(var_name)
+
+ self._tmp_dir = tmp_dir
+
+ self._file_name = file_name if os.path.isabs(file_name) else os.path.join(tmp_dir, file_name)
+ self._all_gids = cells
+ self._local_gids = []
+ self._sections = sections
+
+ self._var_recorder = MembraneRecorder(self._file_name, self._tmp_dir, self._all_variables,
+ buffer_data=buffer_data, mpi_rank=MPI_RANK, mpi_size=N_HOSTS)
+
+ self._gid_list = [] # list of all gids that will have their variables saved
+ self._data_block = {} # table of variable data indexed by [gid][variable]
+ self._block_step = 0 # time step within a given block
+
+ def _get_gids(self, sim):
+ # get list of gids to save. Will only work for biophysical cells saved on the current MPI rank
+ selected_gids = set(sim.net.get_node_set(self._all_gids).gids())
+ self._local_gids = list(set(sim.biophysical_gids) & selected_gids)
+
+ def _save_sim_data(self, sim):
+ self._var_recorder.tstart = 0.0
+ self._var_recorder.tstop = sim.tstop
+ self._var_recorder.dt = sim.dt
+
+ def initialize(self, sim):
+ self._get_gids(sim)
+ self._save_sim_data(sim)
+
+ # TODO: get section by name and/or list of section ids
+ # Build segment/section list
+ sec_list = []
+ seg_list = []
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+ cell.store_segments()
+ for sec_id, sec in enumerate(cell.get_sections()):
+ for seg in sec:
+ # TODO: Make sure the seg has the recorded variable(s)
+ sec_list.append(sec_id)
+ seg_list.append(seg.x)
+
+ # sec_list = [cell.get_sections_id().index(sec) for sec in cell.get_sections()]
+ # seg_list = [seg.x for seg in cell.get_segments()]
+
+ self._var_recorder.add_cell(gid, sec_list, seg_list)
+
+ self._var_recorder.initialize(sim.n_steps, sim.nsteps_block)
+
+ def step(self, sim, tstep):
+ # save all necessary cells/variables at the current time-step into memory
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+
+ for var_name in self._variables:
+ seg_vals = [getattr(seg, var_name) for seg in cell.get_segments()]
+ self._var_recorder.record_cell(gid, var_name, seg_vals, tstep)
+
+ for var_name, fnc in self._transforms.items():
+ seg_vals = [fnc(getattr(seg, var_name)) for seg in cell.get_segments()]
+ self._var_recorder.record_cell(gid, var_name, seg_vals, tstep)
+
+ self._block_step += 1
+
+ def block(self, sim, block_interval):
+ # write variables in memory to file
+ self._var_recorder.flush()
+
+ def finalize(self, sim):
+ # TODO: Build in mpi signaling into var_recorder
+ pc.barrier()
+ self._var_recorder.close()
+
+ pc.barrier()
+ self._var_recorder.merge()
+
+
+class SomaReport(MembraneReport):
+ """Special case for when only needing to save the soma variable"""
+ def __init__(self, tmp_dir, file_name, variable_name, cells, sections='soma', buffer_data=True, transform={}):
+ super(SomaReport, self).__init__(tmp_dir=tmp_dir, file_name=file_name, variable_name=variable_name, cells=cells,
+ sections=sections, buffer_data=buffer_data, transform=transform)
+
+ def initialize(self, sim):
+ self._get_gids(sim)
+ self._save_sim_data(sim)
+
+ for gid in self._local_gids:
+ self._var_recorder.add_cell(gid, [0], [0.5])
+ self._var_recorder.initialize(sim.n_steps, sim.nsteps_block)
+
+ def step(self, sim, tstep, rel_time=0.0):
+ # save all necessary cells/variables at the current time-step into memory
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+ for var_name in self._variables:
+ var_val = getattr(cell.hobj.soma[0](0.5), var_name)
+ self._var_recorder.record_cell(gid, var_name, [var_val], tstep)
+
+ for var_name, fnc in self._transforms.items():
+ var_val = getattr(cell.hobj.soma[0](0.5), var_name)
+ new_val = fnc(var_val)
+ self._var_recorder.record_cell(gid, var_name, [new_val], tstep)
+
+ self._block_step += 1
+
+class SectionReport(MembraneReport):
+ """For variables like im which have one value per section, not segment"""
+
+ def initialize(self, sim):
+ self._get_gids(sim)
+ self._save_sim_data(sim)
+
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+ sec_list = range(len(cell.get_sections()))
+ self._var_recorder.add_cell(gid, sec_list, sec_list)
+
+ self._var_recorder.initialize(sim.n_steps, sim.nsteps_block)
+
+ def step(self, sim, tstep):
+ for gid in self._local_gids:
+ for var in self._variables:
+ cell = sim.net.get_cell_gid(gid)
+ if var == 'im':
+ vals = cell.get_im()
+ elif var =='v':
+ vals = np.array([sec.v for sec in cell.get_sections()])
+ self._var_recorder.record_cell(gid, var, vals, tstep)
+
+ self._block_step += 1
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/record_cellvars.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/record_cellvars.pyc
new file mode 100644
index 0000000..21c71d9
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/record_cellvars.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/record_netcons.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/record_netcons.pyc
new file mode 100644
index 0000000..d724a65
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/record_netcons.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/record_spikes.py b/bmtk-vb/bmtk/simulator/bionet/modules/record_spikes.py
new file mode 100644
index 0000000..4c8751b
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/modules/record_spikes.py
@@ -0,0 +1,94 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import csv
+import h5py
+import numpy as np
+from bmtk.simulator.bionet.modules.sim_module import SimulatorMod
+from bmtk.utils.io.spike_trains import SpikeTrainWriter
+
+from neuron import h
+
+
+pc = h.ParallelContext()
+MPI_RANK = int(pc.id())
+N_HOSTS = int(pc.nhost())
+
+
+class SpikesMod(SimulatorMod):
+ """Module use for saving spikes
+
+ """
+
+ def __init__(self, tmp_dir, spikes_file_csv=None, spikes_file=None, spikes_file_nwb=None, spikes_sort_order=None):
+ # TODO: Have option to turn off caching spikes to csv.
+ def _file_path(file_name):
+ if file_name is None:
+ return None
+ return file_name if os.path.isabs(file_name) else os.path.join(tmp_dir, file_name)
+
+ self._csv_fname = _file_path(spikes_file_csv)
+ self._save_csv = spikes_file_csv is not None
+
+ self._h5_fname = _file_path(spikes_file)
+ self._save_h5 = spikes_file is not None
+
+ self._nwb_fname = _file_path(spikes_file_nwb)
+ self._save_nwb = spikes_file_nwb is not None
+
+ self._tmpdir = tmp_dir
+ self._sort_order = spikes_sort_order
+
+ self._spike_writer = SpikeTrainWriter(tmp_dir=tmp_dir, mpi_rank=MPI_RANK, mpi_size=N_HOSTS)
+
+ def initialize(self, sim):
+ # TODO: since it's possible that other modules may need to access spikes, set_spikes_recordings() should
+ # probably be called in the simulator itself.
+ sim.set_spikes_recording()
+
+ def block(self, sim, block_interval):
+ # take spikes from Simulator spikes vector and save to the tmp file
+ for gid, tVec in sim.spikes_table.items():
+ for t in tVec:
+ self._spike_writer.add_spike(time=t, gid=gid)
+
+ pc.barrier() # wait until all ranks have been saved
+ sim.set_spikes_recording() # reset recording vector
+
+ def finalize(self, sim):
+ self._spike_writer.flush()
+ pc.barrier()
+
+ if self._save_csv:
+ self._spike_writer.to_csv(self._csv_fname, sort_order=self._sort_order)
+ pc.barrier()
+
+ if self._save_h5:
+ self._spike_writer.to_hdf5(self._h5_fname, sort_order=self._sort_order)
+ pc.barrier()
+
+ if self._save_nwb:
+ self._spike_writer.to_nwb(self._nwb_fname, sort_order=self._sort_order)
+ pc.barrier()
+
+ self._spike_writer.close()
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/record_spikes.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/record_spikes.pyc
new file mode 100644
index 0000000..dcd53a8
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/record_spikes.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/save_synapses.py b/bmtk-vb/bmtk/simulator/bionet/modules/save_synapses.py
new file mode 100644
index 0000000..396aa7d
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/modules/save_synapses.py
@@ -0,0 +1,235 @@
+import os
+import csv
+import h5py
+import numpy as np
+from neuron import h
+
+from .sim_module import SimulatorMod
+from bmtk.simulator.bionet.biocell import BioCell
+from bmtk.simulator.bionet.io_tools import io
+from bmtk.simulator.bionet.pointprocesscell import PointProcessCell
+
+
+pc = h.ParallelContext()
+MPI_RANK = int(pc.id())
+N_HOSTS = int(pc.nhost())
+
+
+class SaveSynapses(SimulatorMod):
+ def __init__(self, network_dir, single_file=False, **params):
+ self._network_dir = network_dir
+ self._virt_lookup = {}
+ self._gid_lookup = {}
+ self._sec_lookup = {}
+ if not os.path.exists(network_dir):
+ os.makedirs(network_dir)
+
+ if N_HOSTS > 1:
+ io.log_exception('save_synapses module is not current supported with mpi')
+
+ self._syn_writer = ConnectionWriter(network_dir)
+
+ def _print_nc(self, nc, src_nid, trg_nid, cell, src_pop, trg_pop, edge_type_id):
+ if isinstance(cell, BioCell):
+ sec_x = nc.postloc()
+ sec = h.cas()
+ sec_id = self._sec_lookup[cell.gid][sec] #cell.get_section_id(sec)
+ h.pop_section()
+ self._syn_writer.add_bio_conn(edge_type_id, src_nid, src_pop, trg_nid, trg_pop, nc.weight[0], sec_id, sec_x)
+ # print '{} ({}) <-- {} ({}), {}, {}, {}, {}'.format(trg_nid, trg_pop, src_nid, src_pop, nc.weight[0], nc.delay, sec_id, sec_x)
+
+ else:
+ self._syn_writer.add_point_conn(edge_type_id, src_nid, src_pop, trg_nid, trg_pop, nc.weight[0])
+ #print '{} ({}) <-- {} ({}), {}, {}'.format(trg_nid, trg_pop, src_nid, src_pop, nc.weight[0], nc.delay)
+
+
+ def initialize(self, sim):
+ io.log_info('Saving network connections. This may take a while.')
+
+ # Need a way to look up virtual nodes from nc.pre()
+ for pop_name, nodes_table in sim.net._virtual_nodes.items():
+ for node_id, virt_node in nodes_table.items():
+ self._virt_lookup[virt_node.hobj] = (pop_name, node_id)
+
+ # Need to figure out node_id and pop_name from nc.srcgid()
+ for node_pop in sim.net.node_populations:
+ pop_name = node_pop.name
+ for node in node_pop[0::1]:
+ if node.model_type != 'virtual':
+ self._gid_lookup[node.gid] = (pop_name, node.node_id)
+
+ for gid, cell in sim.net.get_local_cells().items():
+ trg_pop, trg_id = self._gid_lookup[gid]
+ if isinstance(cell, BioCell):
+ #from pprint import pprint
+ #pprint({i: s_name for i, s_name in enumerate(cell.get_sections())})
+ #exit()
+ # sections = cell._syn_seg_ix
+ self._sec_lookup[gid] = {sec_name: sec_id for sec_id, sec_name in enumerate(cell.get_sections_id())}
+
+ else:
+ sections = [-1]*len(cell.netcons)
+
+ for nc, edge_type_id in zip(cell.netcons, cell._edge_type_ids):
+ src_gid = int(nc.srcgid())
+ if src_gid == -1:
+ # source is a virtual node
+ src_pop, src_id = self._virt_lookup[nc.pre()]
+ else:
+ src_pop, src_id = self._gid_lookup[src_gid]
+
+ self._print_nc(nc, src_id, trg_id, cell, src_pop, trg_pop, edge_type_id)
+
+ self._syn_writer.close()
+ io.log_info(' Done saving network connections.')
+
+
+class ConnectionWriter(object):
+ class H5Index(object):
+ def __init__(self, network_dir, src_pop, trg_pop):
+ # TODO: Merge with NetworkBuilder code for building SONATA files
+ self._nsyns = 0
+ self._n_biosyns = 0
+ self._n_pointsyns = 0
+ self._block_size = 5
+
+ self._pop_name = '{}_{}'.format(src_pop, trg_pop)
+ self._h5_file = h5py.File(os.path.join(network_dir, '{}_edges.h5'.format(self._pop_name)), 'w')
+ self._pop_root = self._h5_file.create_group('/edges/{}'.format(self._pop_name))
+ self._pop_root.create_dataset('edge_group_id', (self._block_size, ), dtype=np.uint16,
+ chunks=(self._block_size, ), maxshape=(None, ))
+ self._pop_root.create_dataset('source_node_id', (self._block_size, ), dtype=np.uint64,
+ chunks=(self._block_size, ), maxshape=(None, ))
+ self._pop_root['source_node_id'].attrs['node_population'] = src_pop
+ self._pop_root.create_dataset('target_node_id', (self._block_size, ), dtype=np.uint64,
+ chunks=(self._block_size, ), maxshape=(None, ))
+ self._pop_root['target_node_id'].attrs['node_population'] = trg_pop
+ self._pop_root.create_dataset('edge_type_id', (self._block_size, ), dtype=np.uint32,
+ chunks=(self._block_size, ), maxshape=(None, ))
+ self._pop_root.create_dataset('0/syn_weight', (self._block_size, ), dtype=np.float,
+ chunks=(self._block_size, ), maxshape=(None, ))
+ self._pop_root.create_dataset('0/sec_id', (self._block_size, ), dtype=np.uint64,
+ chunks=(self._block_size, ), maxshape=(None, ))
+ self._pop_root.create_dataset('0/sec_x', (self._block_size, ), chunks=(self._block_size, ),
+ maxshape=(None, ), dtype=np.float)
+ self._pop_root.create_dataset('1/syn_weight', (self._block_size, ), dtype=np.float,
+ chunks=(self._block_size, ), maxshape=(None, ))
+
+ def _add_conn(self, edge_type_id, src_id, trg_id, grp_id):
+ self._pop_root['edge_type_id'][self._nsyns] = edge_type_id
+ self._pop_root['source_node_id'][self._nsyns] = src_id
+ self._pop_root['target_node_id'][self._nsyns] = trg_id
+ self._pop_root['edge_group_id'][self._nsyns] = grp_id
+
+ self._nsyns += 1
+ if self._nsyns % self._block_size == 0:
+ self._pop_root['edge_type_id'].resize((self._nsyns + self._block_size,))
+ self._pop_root['source_node_id'].resize((self._nsyns + self._block_size, ))
+ self._pop_root['target_node_id'].resize((self._nsyns + self._block_size, ))
+ self._pop_root['edge_group_id'].resize((self._nsyns + self._block_size, ))
+
+ def add_bio_conn(self, edge_type_id, src_id, trg_id, syn_weight, sec_id, sec_x):
+ self._add_conn(edge_type_id, src_id, trg_id, 0)
+ self._pop_root['0/syn_weight'][self._n_biosyns] = syn_weight
+ self._pop_root['0/sec_id'][self._n_biosyns] = sec_id
+ self._pop_root['0/sec_x'][self._n_biosyns] = sec_x
+
+ self._n_biosyns += 1
+ if self._n_biosyns % self._block_size == 0:
+ self._pop_root['0/syn_weight'].resize((self._n_biosyns + self._block_size, ))
+ self._pop_root['0/sec_id'].resize((self._n_biosyns + self._block_size, ))
+ self._pop_root['0/sec_x'].resize((self._n_biosyns + self._block_size, ))
+
+ def add_point_conn(self, edge_type_id, src_id, trg_id, syn_weight):
+ self._add_conn(edge_type_id, src_id, trg_id, 1)
+ self._pop_root['1/syn_weight'][self._n_pointsyns] = syn_weight
+
+ self._n_pointsyns += 1
+ if self._n_pointsyns % self._block_size == 0:
+ self._pop_root['1/syn_weight'].resize((self._n_pointsyns + self._block_size, ))
+
+ def clean_ends(self):
+ self._pop_root['source_node_id'].resize((self._nsyns,))
+ self._pop_root['target_node_id'].resize((self._nsyns,))
+ self._pop_root['edge_group_id'].resize((self._nsyns,))
+ self._pop_root['edge_type_id'].resize((self._nsyns,))
+
+ self._pop_root['0/syn_weight'].resize((self._n_biosyns,))
+ self._pop_root['0/sec_id'].resize((self._n_biosyns,))
+ self._pop_root['0/sec_x'].resize((self._n_biosyns,))
+
+ self._pop_root['1/syn_weight'].resize((self._n_pointsyns,))
+
+ eg_ds = self._pop_root.create_dataset('edge_group_index', (self._nsyns, ), dtype=np.uint64)
+ bio_count, point_count = 0, 0
+ for idx, grp_id in enumerate(self._pop_root['edge_group_id']):
+ if grp_id == 0:
+ eg_ds[idx] = bio_count
+ bio_count += 1
+ elif grp_id == 1:
+ eg_ds[idx] = point_count
+ point_count += 1
+
+ self._create_index('target')
+
+ def _create_index(self, index_type='target'):
+ if index_type == 'target':
+ edge_nodes = np.array(self._pop_root['target_node_id'], dtype=np.int64)
+ output_grp = self._pop_root.create_group('indicies/target_to_source')
+ elif index_type == 'source':
+ edge_nodes = np.array(self._pop_root['source_node_id'], dtype=np.int64)
+ output_grp = self._pop_root.create_group('indicies/source_to_target')
+
+ edge_nodes = np.append(edge_nodes, [-1])
+ n_targets = np.max(edge_nodes)
+ ranges_list = [[] for _ in xrange(n_targets + 1)]
+
+ n_ranges = 0
+ begin_index = 0
+ cur_trg = edge_nodes[begin_index]
+ for end_index, trg_gid in enumerate(edge_nodes):
+ if cur_trg != trg_gid:
+ ranges_list[cur_trg].append((begin_index, end_index))
+ cur_trg = int(trg_gid)
+ begin_index = end_index
+ n_ranges += 1
+
+ node_id_to_range = np.zeros((n_targets + 1, 2))
+ range_to_edge_id = np.zeros((n_ranges, 2))
+ range_index = 0
+ for node_index, trg_ranges in enumerate(ranges_list):
+ if len(trg_ranges) > 0:
+ node_id_to_range[node_index, 0] = range_index
+ for r in trg_ranges:
+ range_to_edge_id[range_index, :] = r
+ range_index += 1
+ node_id_to_range[node_index, 1] = range_index
+
+ output_grp.create_dataset('range_to_edge_id', data=range_to_edge_id, dtype='uint64')
+ output_grp.create_dataset('node_id_to_range', data=node_id_to_range, dtype='uint64')
+
+ def __init__(self, network_dir):
+ self._network_dir = network_dir
+ self._pop_groups = {}
+
+ def _group_key(self, src_pop, trg_pop):
+ return (src_pop, trg_pop)
+
+ def _get_edge_group(self, src_pop, trg_pop):
+ grp_key = self._group_key(src_pop, trg_pop)
+ if grp_key not in self._pop_groups:
+ self._pop_groups[grp_key] = self.H5Index(self._network_dir, src_pop, trg_pop)
+
+ return self._pop_groups[grp_key]
+
+ def add_bio_conn(self, edge_type_id, src_id, src_pop, trg_id, trg_pop, syn_weight, sec_id, sec_x):
+ h5_grp = self._get_edge_group(src_pop, trg_pop)
+ h5_grp.add_bio_conn(edge_type_id, src_id, trg_id, syn_weight, sec_id, sec_x)
+
+ def add_point_conn(self, edge_type_id, src_id, src_pop, trg_id, trg_pop, syn_weight):
+ h5_grp = self._get_edge_group(src_pop, trg_pop)
+ h5_grp.add_point_conn(edge_type_id, src_id, trg_id, syn_weight)
+
+ def close(self):
+ for _, h5index in self._pop_groups.items():
+ h5index.clean_ends()
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/save_synapses.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/save_synapses.pyc
new file mode 100644
index 0000000..2e7d394
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/save_synapses.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/sim_module.py b/bmtk-vb/bmtk/simulator/bionet/modules/sim_module.py
new file mode 100644
index 0000000..f04e469
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/modules/sim_module.py
@@ -0,0 +1,72 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+class SimulatorMod(object):
+ """Class for writing custom bionet functions that will be called during the simulation. To use overwrite one or
+ more of the following methods in a subclass, and bionet will call the function at the appropiate time.
+
+ To call during a simulation:
+ ...
+ sim = Simulation(...)
+ mymod = MyModule(...)
+ sim.add_mod(mymod)
+ sim.run()
+
+ """
+
+ def initialize(self, sim):
+ """Will be called once at the beginning of the simulation run, after the network and simulation parameters have
+ all been finalized.
+
+ :param sim: Simulation object
+ """
+ pass
+
+ def step(self, sim, tstep):
+ """Called on every single time step (dt).
+
+ The step method is used for anything that should be recorded or changed continously. dt is determined during
+ the setup, and the sim parameter can be used to access simulation, network and individual cell properties
+
+ :param sim: Simulation object.
+ :param tstep: The decrete time-step
+ """
+ pass
+
+ def block(self, sim, block_interval):
+ """This method is called once after every block of time, as specified by the configuration.
+
+ Unlike the step method which is called during every time-step, the block method will typically be called only a
+ few times over the entire simulation. The block method is preferable for accessing and saving to the disk,
+ summing up existing data, or simular functionality
+
+ :param sim: Simulation object
+ :param block_interval: The time interval (tstep_start, tstep_end) for which the block is being called on.
+ """
+ pass
+
+ def finalize(self, sim):
+ """Call once at the very end of the simulation.
+
+ :param sim: Simulation object
+ """
+ pass
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/sim_module.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/sim_module.pyc
new file mode 100644
index 0000000..ce6ce8e
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/sim_module.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/xstim.py b/bmtk-vb/bmtk/simulator/bionet/modules/xstim.py
new file mode 100644
index 0000000..f2192ff
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/modules/xstim.py
@@ -0,0 +1,163 @@
+import os
+import math
+import pandas as pd
+import numpy as np
+import six
+from neuron import h
+
+from bmtk.simulator.bionet.modules.sim_module import SimulatorMod
+from bmtk.simulator.bionet.modules.xstim_waveforms import stimx_waveform_factory
+from bmtk.simulator.bionet.utils import rotation_matrix
+from bmtk.simulator.bionet.io_tools import io
+
+
+class XStimMod(SimulatorMod):
+ def __init__(self, positions_file, waveform, mesh_files_dir=None, cells=None, set_nrn_mechanisms=True,
+ node_set=None):
+ self._positions_file = positions_file
+ self._mesh_files_dir = mesh_files_dir if mesh_files_dir is not None \
+ else os.path.dirname(os.path.realpath(self._positions_file))
+
+ self._waveform = waveform # TODO: Check if waveform is a file or dict and load it appropiately
+
+ self._set_nrn_mechanisms = set_nrn_mechanisms
+ self._electrode = None
+ self._cells = cells
+ self._local_gids = []
+ self._fih = None
+
+ #def __set_extracellular_mechanism(self):
+ # for gid in self._local_gids:
+
+ def initialize(self, sim):
+ if self._cells is None:
+ # if specific gids not listed just get all biophysically detailed cells on this rank
+ self._local_gids = sim.biophysical_gids
+ else:
+ # get subset of selected gids only on this rank
+ self._local_gids = list(set(sim.local_gids) & set(self._all_gids))
+
+ self._electrode = StimXElectrode(self._positions_file, self._waveform, self._mesh_files_dir, sim.dt)
+ for gid in self._local_gids:
+ # cell = sim.net.get_local_cell(gid)
+ cell = sim.net.get_cell_gid(gid)
+ cell.setup_xstim(self._set_nrn_mechanisms)
+ self._electrode.set_transfer_resistance(gid, cell.get_seg_coords())
+
+ def set_pointers():
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+ #cell = sim.net.get_local_cell(gid)
+ cell.set_ptr2e_extracellular()
+
+ self._fih = sim.h.FInitializeHandler(0, set_pointers)
+
+ def step(self, sim, tstep):
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+ # Use tstep +1 to match isee-engine existing results. This will make it so that it begins a step earlier
+ # than if using just tstep.
+ self._electrode.calculate_waveforms(tstep+1)
+ vext_vec = self._electrode.get_vext(gid)
+ cell.set_e_extracellular(vext_vec)
+
+
+class StimXElectrode(object):
+ """
+ Extracellular Stimulating electrode
+ """
+ def __init__(self, positions_file, waveform, mesh_files_dir, dt):
+ self._dt = dt
+ self._mesh_files_dir = mesh_files_dir
+
+ stimelectrode_position_df = pd.read_csv(positions_file, sep=' ')
+
+ self.elmesh_files = stimelectrode_position_df['electrode_mesh_file']
+ self.elpos = stimelectrode_position_df[['pos_x', 'pos_y', 'pos_z']].T.values
+ self.elrot = stimelectrode_position_df[['rotation_x', 'rotation_y', 'rotation_z']].values
+ self.elnsites = self.elpos.shape[1] # Number of electrodes in electrode file
+ self.waveform = stimx_waveform_factory(waveform)
+
+ self.trans_X = {} # mapping segment coordinates
+ self.waveform_amplitude = []
+ self.el_mesh = {}
+ self.el_mesh_size = []
+
+ self.read_electrode_mesh()
+ self.rotate_the_electrodes()
+ self.place_the_electrodes()
+
+ def read_electrode_mesh(self):
+ el_counter = 0
+ for mesh_file in self.elmesh_files:
+ file_path = mesh_file if os.path.isabs(mesh_file) else os.path.join(self._mesh_files_dir, mesh_file)
+ mesh = pd.read_csv(file_path, sep=" ")
+ mesh_size = mesh.shape[0]
+ self.el_mesh_size.append(mesh_size)
+
+ self.el_mesh[el_counter] = np.zeros((3, mesh_size))
+ self.el_mesh[el_counter][0] = mesh['x_pos']
+ self.el_mesh[el_counter][1] = mesh['y_pos']
+ self.el_mesh[el_counter][2] = mesh['z_pos']
+ el_counter += 1
+
+ def place_the_electrodes(self):
+
+ transfer_vector = np.zeros((self.elnsites, 3))
+
+ for el in range(self.elnsites):
+ mesh_mean = np.mean(self.el_mesh[el], axis=1)
+ transfer_vector[el] = self.elpos[:, el] - mesh_mean[:]
+
+ for el in range(self.elnsites):
+ new_mesh = self.el_mesh[el].T + transfer_vector[el]
+ self.el_mesh[el] = new_mesh.T
+
+ def rotate_the_electrodes(self):
+ for el in range(self.elnsites):
+ phi_x = self.elrot[el][0]
+ phi_y = self.elrot[el][1]
+ phi_z = self.elrot[el][2]
+
+ rot_x = rotation_matrix([1, 0, 0], phi_x)
+ rot_y = rotation_matrix([0, 1, 0], phi_y)
+ rot_z = rotation_matrix([0, 0, 1], phi_z)
+ rot_xy = rot_x.dot(rot_y)
+ rot_xyz = rot_xy.dot(rot_z)
+ new_mesh = np.dot(rot_xyz, self.el_mesh[el])
+ self.el_mesh[el] = new_mesh
+
+ def set_transfer_resistance(self, gid, seg_coords):
+
+ rho = 300.0 # ohm cm
+ r05 = seg_coords['p05']
+ nseg = r05.shape[1]
+ cell_map = np.zeros((self.elnsites, nseg))
+ for el in six.moves.range(self.elnsites):
+
+ mesh_size = self.el_mesh_size[el]
+
+ for k in range(mesh_size):
+
+ rel = np.expand_dims(self.el_mesh[el][:, k], axis=1)
+ rel_05 = rel - r05
+ r2 = np.einsum('ij,ij->j', rel_05, rel_05)
+ r = np.sqrt(r2)
+ if not all(i >= 10 for i in r):
+ io.log_exception('External electrode is too close')
+ cell_map[el, :] += 1. / r
+
+ cell_map *= (rho / (4 * math.pi)) * 0.01
+ self.trans_X[gid] = cell_map
+
+ def calculate_waveforms(self, tstep):
+ simulation_time = self._dt * tstep
+ # copies waveform elnsites times (homogeneous)
+ self.waveform_amplitude = np.zeros(self.elnsites) + self.waveform.calculate(simulation_time)
+
+ def get_vext(self, gid):
+ waveform_per_mesh = np.divide(self.waveform_amplitude, self.el_mesh_size)
+ v_extracellular = np.dot(waveform_per_mesh, self.trans_X[gid]) * 1E6
+ vext_vec = h.Vector(v_extracellular)
+
+ return vext_vec
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/xstim.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/xstim.pyc
new file mode 100644
index 0000000..72dd0f7
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/xstim.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/xstim_waveforms.py b/bmtk-vb/bmtk/simulator/bionet/modules/xstim_waveforms.py
new file mode 100644
index 0000000..86e204d
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/modules/xstim_waveforms.py
@@ -0,0 +1,127 @@
+import os
+import numpy as np
+import pandas as pd
+import json
+from six import string_types
+
+from bmtk.simulator.bionet.io_tools import io
+
+class BaseWaveform(object):
+ """Abstraction of waveform class to ensure calculate method is implemented"""
+ def calculate(self, simulation_time):
+ raise NotImplementedError("Implement specific waveform calculation")
+
+
+class BaseWaveformType(object):
+ """Specific waveform type"""
+ def __init__(self, waveform_config):
+ self.amp = float(waveform_config["amp"]) # units? mA?
+ self.delay = float(waveform_config["del"]) # ms
+ self.duration = float(waveform_config["dur"]) # ms
+
+ def is_active(self, simulation_time):
+ stop_time = self.delay + self.duration
+ return self.delay < simulation_time < stop_time
+
+
+class WaveformTypeDC(BaseWaveformType, BaseWaveform):
+ """DC (step) waveform"""
+ def __init__(self, waveform_config):
+ super(WaveformTypeDC, self).__init__(waveform_config)
+
+ def calculate(self, t): # TODO better name
+ if self.is_active(t):
+ return self.amp
+ else:
+ return 0
+
+
+class WaveformTypeSin(BaseWaveformType, BaseWaveform):
+ """Sinusoidal waveform"""
+ def __init__(self, waveform_config):
+ super(WaveformTypeSin, self).__init__(waveform_config)
+ self.freq = float(waveform_config["freq"]) # Hz
+ self.phase_offset = float(waveform_config.get("phase", np.pi)) # radians, optional
+ self.amp_offset = float(waveform_config.get("offset", 0)) # units? mA? optional
+
+ def calculate(self, t): # TODO better name
+ if self.is_active(t):
+ f = self.freq / 1000. # Hz to mHz
+ a = self.amp
+ return a * np.sin(2 * np.pi * f * t + self.phase_offset) + self.amp_offset
+ else:
+ return 0
+
+
+class WaveformCustom(BaseWaveform):
+ """Custom waveform defined by csv file"""
+ def __init__(self, waveform_file):
+ self.definition = pd.read_csv(waveform_file, sep='\t')
+
+ def calculate(self, t):
+ return np.interp(t, self.definition["time"], self.definition["amplitude"])
+
+
+class ComplexWaveform(BaseWaveform):
+ """Superposition of simple waveforms"""
+ def __init__(self, el_collection):
+ self.electrodes = el_collection
+
+ def calculate(self, t):
+ val = 0
+ for el in self.electrodes:
+ val += el.calculate(t)
+
+ return val
+
+
+# mapping from 'shape' code to subclass, always lowercase
+shape_classes = {
+ 'dc': WaveformTypeDC,
+ 'sin': WaveformTypeSin,
+}
+
+
+def stimx_waveform_factory(waveform):
+ """
+ Factory to create correct waveform class based on conf.
+ Supports json config in conf as well as string pointer to a file.
+ :rtype: BaseWaveformType
+ """
+ if isinstance(waveform, string_types):
+ # if waveform_conf is str or unicode assume to be name of file in stim_dir
+ # waveform_conf = str(waveform_conf) # make consistent
+ file_ext = os.path.splitext(waveform)
+ if file_ext == 'csv':
+ return WaveformCustom(waveform)
+
+ elif file_ext == 'json':
+ with open(waveform, 'r') as f:
+ waveform = json.load(f)
+ else:
+ io.log_warning('Unknwon filetype for waveform')
+
+ shape_key = waveform["shape"].lower()
+
+ if shape_key not in shape_classes:
+ io.log_warning("Waveform shape not known") # throw error?
+
+ Constructor = shape_classes[shape_key]
+ return Constructor(waveform)
+
+
+def iclamp_waveform_factory(conf):
+ """
+ Factory to create correct waveform class based on conf.
+ Supports json config in conf as well as string pointer to a file.
+ :rtype: BaseWaveformType
+ """
+ iclamp_waveform_conf = conf["iclamp"]
+
+ shape_key = iclamp_waveform_conf["shape"].lower()
+
+ if shape_key not in shape_classes:
+ io.log_warning('iclamp waveform shape not known') # throw error?
+
+ Constructor = shape_classes[shape_key]
+ return Constructor(iclamp_waveform_conf)
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/bionet/modules/xstim_waveforms.pyc b/bmtk-vb/bmtk/simulator/bionet/modules/xstim_waveforms.pyc
new file mode 100644
index 0000000..560ade8
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/modules/xstim_waveforms.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/morphology.py b/bmtk-vb/bmtk/simulator/bionet/morphology.py
new file mode 100644
index 0000000..b0085fc
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/morphology.py
@@ -0,0 +1,245 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+from neuron import h
+
+
+pc = h.ParallelContext() # object to access MPI methods
+
+
+class Morphology(object):
+ """Methods for processing morphological data"""
+ def __init__(self, hobj):
+ """reuse hoc object from one of the cells which share the same morphology/model"""
+ self.hobj = hobj
+ self.sec_type_swc = {'soma': 1, 'somatic': 1, # convert section name and section list names
+ 'axon': 2, 'axonal': 2, # into a consistent swc notation
+ 'dend': 3, 'basal': 3,
+ 'apic': 4, 'apical': 4}
+ self.nseg = self.get_nseg()
+ self._segments = {}
+
+ def get_nseg(self):
+ nseg = 0
+ for sec in self.hobj.all:
+ nseg += sec.nseg # get the total # of segments in the cell
+ return nseg
+
+ def get_soma_pos(self):
+ n3dsoma = 0
+ r3dsoma = np.zeros(3)
+ for sec in self.hobj.somatic:
+ n3d = int(h.n3d()) # get number of n3d points in each section
+ r3d = np.zeros((3, n3d)) # to hold locations of 3D morphology for the current section
+ n3dsoma += n3d
+
+ for i in range(n3d):
+ r3dsoma[0] += h.x3d(i)
+ r3dsoma[1] += h.y3d(i)
+ r3dsoma[2] += h.z3d(i)
+
+ r3dsoma /= n3dsoma
+ return r3dsoma
+
+ def calc_seg_coords(self):
+ """Calculate segment coordinates from 3d point coordinates"""
+ ix = 0 # segment index
+
+ p3dsoma = self.get_soma_pos()
+ self.psoma = p3dsoma
+
+ p0 = np.zeros((3, self.nseg)) # hold the coordinates of segment starting points
+ p1 = np.zeros((3, self.nseg)) # hold the coordinates of segment end points
+ p05 = np.zeros((3, self.nseg))
+ d0 = np.zeros(self.nseg)
+ d1 = np.zeros(self.nseg)
+
+ for sec in self.hobj.all:
+ n3d = int(h.n3d()) # get number of n3d points in each section
+ p3d = np.zeros((3, n3d)) # to hold locations of 3D morphology for the current section
+ l3d = np.zeros(n3d) # to hold locations of 3D morphology for the current section
+ diam3d = np.zeros(n3d) # to diameters
+
+ for i in range(n3d):
+ p3d[0, i] = h.x3d(i) - p3dsoma[0]
+ p3d[1, i] = h.y3d(i) - p3dsoma[1] # shift coordinates such to place soma at the origin.
+ p3d[2, i] = h.z3d(i) - p3dsoma[2]
+ diam3d[i] = h.diam3d(i)
+ l3d[i] = h.arc3d(i)
+
+ l3d /= sec.L # normalize
+ nseg = sec.nseg
+
+ l0 = np.zeros(nseg) # keep range of segment starting point
+ l1 = np.zeros(nseg) # keep range of segment ending point
+ l05 = np.zeros(nseg)
+
+ for iseg, seg in enumerate(sec):
+ l0[iseg] = seg.x - 0.5*1/nseg # x (normalized distance along the section) for the beginning of the segment
+ l1[iseg] = seg.x + 0.5*1/nseg # x for the end of the segment
+ l05[iseg] = seg.x
+
+ if n3d != 0:
+ p0[0, ix:ix+nseg] = np.interp(l0, l3d, p3d[0, :])
+ p0[1, ix:ix+nseg] = np.interp(l0, l3d, p3d[1, :])
+ p0[2, ix:ix+nseg] = np.interp(l0, l3d, p3d[2, :])
+ d0[ix:ix+nseg] = np.interp(l0, l3d, diam3d[:])
+
+ p1[0, ix:ix+nseg] = np.interp(l1, l3d, p3d[0, :])
+ p1[1, ix:ix+nseg] = np.interp(l1, l3d, p3d[1, :])
+ p1[2, ix:ix+nseg] = np.interp(l1, l3d, p3d[2, :])
+ d1[ix:ix+nseg] = np.interp(l1, l3d, diam3d[:])
+
+ p05[0,ix:ix+nseg] = np.interp(l05, l3d, p3d[0,:])
+ p05[1,ix:ix+nseg] = np.interp(l05, l3d, p3d[1,:])
+ p05[2,ix:ix+nseg] = np.interp(l05, l3d, p3d[2,:])
+ else:
+ # If we are dealing with a stub axon, this compartment
+ # will be zero'd out in the calculation of transfer
+ # resistance in modules/ecp.py
+
+ if sec not in self.hobj.axonal:
+ raise Exception("Non-axonal section with 0 3d points (stub)")
+
+ if nseg != 1:
+ raise Exception("in calc_seg_coords(), n3d = 0, but nseg != 1")
+
+ ix += nseg
+
+ self.seg_coords = {}
+
+ self.seg_coords['p0'] = p0
+ self.seg_coords['p1'] = p1
+ self.seg_coords['p05'] = p05
+
+ self.seg_coords['d0'] = d0
+ self.seg_coords['d1'] = d1
+
+ return self.seg_coords
+
+ def set_seg_props(self):
+ """Set segment properties which are invariant for all cell using this morphology"""
+ seg_type = []
+ seg_area = []
+ seg_x = []
+ seg_dist = []
+ seg_length = []
+
+ h.distance(sec=self.hobj.soma[0]) # measure distance relative to the soma
+
+ for sec in self.hobj.all:
+ fullsecname = sec.name()
+ sec_type = fullsecname.split(".")[1][:4] # get sec name type without the cell name
+ sec_type_swc = self.sec_type_swc[sec_type] # convert to swc code
+
+ for seg in sec:
+
+ seg_area.append(h.area(seg.x))
+ seg_x.append(seg.x)
+ seg_length.append(sec.L/sec.nseg)
+ seg_type.append(sec_type_swc) # record section type in a list
+ seg_dist.append(h.distance(seg.x)) # distance to the center of the segment
+
+ self.seg_prop = {}
+ self.seg_prop['type'] = np.array(seg_type)
+ self.seg_prop['area'] = np.array(seg_area)
+ self.seg_prop['x'] = np.array(seg_x)
+ self.seg_prop['dist'] = np.array(seg_dist)
+ self.seg_prop['length'] = np.array(seg_length)
+ self.seg_prop['dist0'] = self.seg_prop['dist'] - self.seg_prop['length']/2
+ self.seg_prop['dist1'] = self.seg_prop['dist'] + self.seg_prop['length']/2
+
+ def get_target_segments(self, edge_type):
+ # Determine the target segments and their probabilities of connections for each new edge-type. Save the
+ # information for each additional time a given edge-type is used on this morphology
+ # TODO: Don't rely on edge-type-table, just use the edge?
+ if edge_type in self._segments:
+ return self._segments[edge_type]
+
+ else:
+ tar_seg_ix, tar_seg_prob = self.find_sections(edge_type.target_sections, edge_type.target_distance)
+ self._segments[edge_type] = (tar_seg_ix, tar_seg_prob)
+ return tar_seg_ix, tar_seg_prob
+
+ """
+ tar_sec_labels = edge_type.target_sections
+ drange = edge_type.target_distance
+ dmin, dmax = drange[0], drange[1]
+
+ seg_d0 = self.seg_prop['dist0'] # use a more compact variables
+ seg_d1 = self.seg_prop['dist1']
+ seg_length = self.seg_prop['length']
+ seg_area = self.seg_prop['area']
+ seg_type = self.seg_prop['type']
+
+ # Find the fractional overlap between the segment and the distance range:
+ # this is done by finding the overlap between [d0,d1] and [dmin,dmax]
+ # np.minimum(seg_d1,dmax) find the smaller of the two end locations
+ # np.maximum(seg_d0,dmin) find the larger of the two start locations
+ # np.maximum(0,overlap) is used to return zero when segments do not overlap
+ # and then dividing by the segment length
+ frac_overlap = np.maximum(0, (np.minimum(seg_d1, dmax) - np.maximum(seg_d0, dmin))) / seg_length
+ ix_drange = np.where(frac_overlap > 0) # find indexes with non-zero overlap
+ ix_labels = np.array([], dtype=np.int)
+
+ for tar_sec_label in tar_sec_labels: # find indexes within sec_labels
+ sec_type = self.sec_type_swc[tar_sec_label] # get swc code for the section label
+ ix_label = np.where(seg_type == sec_type)
+ ix_labels = np.append(ix_labels, ix_label) # target segment indexes
+
+ tar_seg_ix = np.intersect1d(ix_drange, ix_labels) # find intersection between indexes for range and labels
+ tar_seg_length = seg_length[tar_seg_ix] * frac_overlap[tar_seg_ix] # weighted length of targeted segments
+ tar_seg_prob = tar_seg_length / np.sum(tar_seg_length) # probability of targeting segments
+
+ self._segments[edge_type] = (tar_seg_ix, tar_seg_prob)
+ return tar_seg_ix, tar_seg_prob
+ """
+
+ def find_sections(self, target_sections, distance_range):
+ dmin, dmax = distance_range[0], distance_range[1]
+
+ seg_d0 = self.seg_prop['dist0'] # use a more compact variables
+ seg_d1 = self.seg_prop['dist1']
+ seg_length = self.seg_prop['length']
+ seg_area = self.seg_prop['area']
+ seg_type = self.seg_prop['type']
+
+ # Find the fractional overlap between the segment and the distance range:
+ # this is done by finding the overlap between [d0,d1] and [dmin,dmax]
+ # np.minimum(seg_d1,dmax) find the smaller of the two end locations
+ # np.maximum(seg_d0,dmin) find the larger of the two start locations
+ # np.maximum(0,overlap) is used to return zero when segments do not overlap
+ # and then dividing by the segment length
+ frac_overlap = np.maximum(0, (np.minimum(seg_d1, dmax) - np.maximum(seg_d0, dmin))) / seg_length
+ ix_drange = np.where(frac_overlap > 0) # find indexes with non-zero overlap
+ ix_labels = np.array([], dtype=np.int)
+
+ for tar_sec_label in target_sections: # find indexes within sec_labels
+ sec_type = self.sec_type_swc[tar_sec_label] # get swc code for the section label
+ ix_label = np.where(seg_type == sec_type)
+ ix_labels = np.append(ix_labels, ix_label) # target segment indexes
+
+ tar_seg_ix = np.intersect1d(ix_drange, ix_labels) # find intersection between indexes for range and labels
+ tar_seg_length = seg_length[tar_seg_ix] * frac_overlap[tar_seg_ix] # weighted length of targeted segments
+ tar_seg_prob = tar_seg_length / np.sum(tar_seg_length) # probability of targeting segments
+ return tar_seg_ix, tar_seg_prob
diff --git a/bmtk-vb/bmtk/simulator/bionet/morphology.pyc b/bmtk-vb/bmtk/simulator/bionet/morphology.pyc
new file mode 100644
index 0000000..fe2f023
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/morphology.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/nml_reader.py b/bmtk-vb/bmtk/simulator/bionet/nml_reader.py
new file mode 100644
index 0000000..c64b9cd
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/nml_reader.py
@@ -0,0 +1,168 @@
+class NMLTree(object):
+ nml_ns = '{http://www.neuroml.org/schema/neuroml2}'
+ element_registry = {}
+
+ def __init__(self, nml_path):
+ from xml.etree import ElementTree
+ self._nml_path = nml_path
+ self._nml_root = ElementTree.parse(nml_path).getroot()
+ #self._relevant_elements = {
+ # NMLTree.ns_name('channelDensity'): ChannelDensity,
+ # NMLTree.ns_name('resistivity'): Resistivity
+ #}
+
+ # For each section store a list of all the NML elements include
+ self._soma_props = {}
+ self._axon_props = {}
+ self._dend_props = {}
+ self._apic_props = {}
+ # For lookup by segmentGroup attribute, include common synonyms for diff sections
+ self._section_maps = {
+ 'soma': self._soma_props, 'somatic': self._soma_props,
+ 'axon': self._axon_props, 'axonal': self._axon_props,
+ 'dend': self._dend_props, 'basal': self._dend_props, 'dendritic': self._dend_props,
+ 'apic': self._apic_props, 'apical': self._apic_props
+ }
+
+ self._parse_root(self._nml_root)
+
+ @classmethod
+ def ns_name(cls, name):
+ return '{}{}'.format(cls.nml_ns, name)
+
+ @staticmethod
+ def common_name(elem):
+ if '}' in elem:
+ return elem.split('}')[-1]
+ else:
+ return elem
+
+ @staticmethod
+ def parse_value(value):
+ val_list = value.split(' ')
+ if len(val_list) == 2:
+ return float(val_list[0]), val_list[1]
+ elif len(val_list) == 1:
+ return float(val_list[0]), 'NONE'
+ else:
+ raise Exception('Cannot parse value {}'.format(value))
+
+ @classmethod
+ def register_module(cls, element_cls):
+ cls.element_registry[cls.ns_name(element_cls.element_tag())] = element_cls
+ return element_cls
+
+ def _parse_root(self, root):
+ for elem in root.iter():
+ if elem.tag in NMLTree.element_registry:
+ nml_element = NMLTree.element_registry[elem.tag](elem)
+ self._add_param(nml_element)
+
+ def _add_param(self, nml_element):
+ seggroup_str = nml_element.section
+ if seggroup_str is None:
+ raise Exception('Error: tag {} in {} is missing segmentGroup'.format(nml_element.id, self._nml_path))
+ elif seggroup_str.lower() == 'all':
+ sections = ['soma', 'axon', 'apic', 'dend']
+ else:
+ sections = [seggroup_str.lower()]
+
+ for sec_name in sections:
+ param_table = self._section_maps[sec_name]
+ if sec_name in param_table:
+ raise Exception('Error: {} already has a {} element in {}.'.format(nml_element.id, sec_name,
+ self._nml_path))
+
+ self._section_maps[sec_name][nml_element.id] = nml_element
+
+ def __getitem__(self, section_name):
+ return self._section_maps[section_name]
+
+
+class NMLElement(object):
+ def __init__(self, nml_element):
+ self._elem = nml_element
+ self._attribs = nml_element.attrib
+
+ self.tag_name = NMLTree.common_name(self._elem.tag)
+ self.section = self._attribs.get('segmentGroup', None)
+ self.id = self._attribs.get('id', self.tag_name)
+
+ @staticmethod
+ def element_tag():
+ raise NotImplementedError()
+
+
+@NMLTree.register_module
+class ChannelDensity(NMLElement):
+ def __init__(self, nml_element):
+ super(ChannelDensity, self).__init__(nml_element)
+ self.ion = self._attribs['ion']
+ self.ion_channel = self._attribs['ionChannel']
+
+ if 'erev' in self._attribs:
+ v_list = NMLTree.parse_value(self._attribs['erev'])
+ self.erev = v_list[0]
+ self.erev_units = v_list[1]
+ else:
+ self.erev = None
+
+ v_list = NMLTree.parse_value(self._attribs['condDensity'])
+ self.cond_density = v_list[0]
+ self.cond_density_units = v_list[1]
+
+ @staticmethod
+ def element_tag():
+ return 'channelDensity'
+
+
+@NMLTree.register_module
+class ChannelDensityNernst(ChannelDensity):
+
+ @staticmethod
+ def element_tag():
+ return 'channelDensityNernst'
+
+
+@NMLTree.register_module
+class Resistivity(NMLElement):
+ def __init__(self, nml_element):
+ super(Resistivity, self).__init__(nml_element)
+ v_list = NMLTree.parse_value(self._attribs['value'])
+ self.value = v_list[0]
+ self.value_units = v_list[1]
+
+ @staticmethod
+ def element_tag():
+ return 'resistivity'
+
+
+@NMLTree.register_module
+class SpecificCapacitance(NMLElement):
+ def __init__(self, nml_element):
+ super(SpecificCapacitance, self).__init__(nml_element)
+ v_list = NMLTree.parse_value(self._attribs['value'])
+ self.value = v_list[0]
+ self.value_units = v_list[1]
+
+ @staticmethod
+ def element_tag():
+ return 'specificCapacitance'
+
+
+@NMLTree.register_module
+class ConcentrationModel(NMLElement):
+ def __init__(self, nml_element):
+ super(ConcentrationModel, self).__init__(nml_element)
+ self.type = self._attribs['type']
+ v_list = NMLTree.parse_value(self._attribs['decay'])
+ self.decay = v_list[0]
+ self.decay_units = v_list[1]
+
+ v_list = NMLTree.parse_value(self._attribs['gamma'])
+ self.gamma = v_list[0]
+ self.gamma_units = v_list[1]
+
+ @staticmethod
+ def element_tag():
+ return 'concentrationModel'
diff --git a/bmtk-vb/bmtk/simulator/bionet/nml_reader.pyc b/bmtk-vb/bmtk/simulator/bionet/nml_reader.pyc
new file mode 100644
index 0000000..5db337a
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/nml_reader.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/nrn.py b/bmtk-vb/bmtk/simulator/bionet/nrn.py
new file mode 100644
index 0000000..c5f8419
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/nrn.py
@@ -0,0 +1,82 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import sys
+import os
+import glob
+import neuron
+from neuron import h
+
+from bmtk.simulator.bionet.pyfunction_cache import py_modules
+from bmtk.simulator.bionet.pyfunction_cache import load_py_modules
+from bmtk.simulator.bionet.pyfunction_cache import synapse_model, synaptic_weight, cell_model
+
+
+pc = h.ParallelContext()
+
+
+def quit_execution(): # quit the execution with a message
+ pc.done()
+ sys.exit()
+ return
+
+
+def clear_gids():
+ pc.gid_clear()
+ pc.barrier()
+
+
+def load_neuron_modules(mechanisms_dir, templates_dir, default_templates=True):
+ """
+
+ :param mechanisms_dir:
+ :param templates_dir:
+ :param default_templates:
+ """
+ h.load_file('stdgui.hoc')
+
+ bionet_dir = os.path.dirname(__file__)
+ # h.load_file(os.path.join(bionet_dir, 'import3d.hoc')) # customized import3d.hoc to supress warnings
+ # h.load_file('import3d.hoc')
+ h.load_file(os.path.join(bionet_dir,'default_templates', 'advance.hoc'))
+
+ if mechanisms_dir is not None:
+ neuron.load_mechanisms(str(mechanisms_dir))
+
+ # if default_templates:
+ # load_templates(os.path.join(bionet_dir, 'default_templates'))
+
+ # if templates_dir:
+ # load_templates(templates_dir)
+
+
+def load_templates(template_dir):
+ """Load all templates to be available in the hoc namespace for instantiating cells"""
+ cwd = os.getcwd()
+ os.chdir(template_dir)
+
+ hoc_templates = glob.glob("*.hoc")
+
+ for hoc_template in hoc_templates:
+ h.load_file(str(hoc_template))
+
+ os.chdir(cwd)
diff --git a/bmtk-vb/bmtk/simulator/bionet/nrn.pyc b/bmtk-vb/bmtk/simulator/bionet/nrn.pyc
new file mode 100644
index 0000000..826481f
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/nrn.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/pointprocesscell.py b/bmtk-vb/bmtk/simulator/bionet/pointprocesscell.py
new file mode 100644
index 0000000..8d0d893
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/pointprocesscell.py
@@ -0,0 +1,85 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from neuron import h
+import six
+from bmtk.simulator.bionet.cell import Cell
+
+
+pc = h.ParallelContext() # object to access MPI methods
+
+
+class PointProcessCell(Cell):
+ """Implimentation of a Leaky Integrate-and-file neuron type cell."""
+ def __init__(self, node, bionetwork):
+ super(PointProcessCell, self).__init__(node)
+ self.set_spike_detector()
+ self._src_gids = []
+ self._src_nets = []
+ self._edge_type_ids = []
+
+ def set_spike_detector(self):
+ nc = h.NetCon(self.hobj, None)
+ pc.cell(self.gid, nc)
+
+ def set_im_ptr(self):
+ pass
+
+ def set_syn_connection(self, edge_prop, src_node, stim=None):
+ syn_params = edge_prop.dynamics_params
+ nsyns = edge_prop.nsyns
+ delay = edge_prop.delay
+
+ syn_weight = edge_prop.syn_weight(src_node, self._node)
+ if not edge_prop.preselected_targets:
+ # TODO: this is not very robust, need some other way
+ syn_weight *= syn_params['sign'] * nsyns
+
+ if stim is not None:
+ src_gid = -1
+ nc = h.NetCon(stim.hobj, self.hobj)
+ else:
+ src_gid = src_node.node_id
+ nc = pc.gid_connect(src_gid, self.hobj)
+
+ weight = syn_weight
+ nc.weight[0] = weight
+ nc.delay = delay
+ self._netcons.append(nc)
+ self._src_gids.append(src_gid)
+ self._src_nets.append(-1)
+ self._edge_type_ids.append(edge_prop.edge_type_id)
+ return nsyns
+
+ def get_connection_info(self):
+ # TODO: There should be a more effecient and robust way to return synapse information.
+ return [[self.gid, self._src_gids[i], self.network_name, self._src_nets[i], 'NaN', 'NaN',
+ self.netcons[i].weight[0], self.netcons[i].delay, self._edge_type_id[i], 1]
+ for i in range(len(self._src_gids))]
+
+ def print_synapses(self):
+ rstr = ''
+ for i in six.moves.range(len(self._src_gids)):
+ rstr += '{}> <-- {} ({}, {})\n'.format(i, self._src_gids[i], self.netcons[i].weight[0],
+ self.netcons[i].delay)
+
+ return rstr
diff --git a/bmtk-vb/bmtk/simulator/bionet/pointprocesscell.pyc b/bmtk-vb/bmtk/simulator/bionet/pointprocesscell.pyc
new file mode 100644
index 0000000..c7ae873
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/pointprocesscell.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/pointsomacell.py b/bmtk-vb/bmtk/simulator/bionet/pointsomacell.py
new file mode 100644
index 0000000..0c96594
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/pointsomacell.py
@@ -0,0 +1,12 @@
+from neuron import h
+from bmtk.simulator.bionet.cell import Cell
+
+
+pc = h.ParallelContext() # object to access MPI methods
+
+
+class PointSomaCell(Cell):
+ """Used to represent single compartment cells with neural mechanisms"""
+ def __init__(self):
+ # TODO: Implement
+ raise NotImplementedError('Point Soma cell types are not currently implemented.')
diff --git a/bmtk-vb/bmtk/simulator/bionet/pointsomacell.pyc b/bmtk-vb/bmtk/simulator/bionet/pointsomacell.pyc
new file mode 100644
index 0000000..5e1ce56
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/pointsomacell.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/pyfunction_cache.py b/bmtk-vb/bmtk/simulator/bionet/pyfunction_cache.py
new file mode 100644
index 0000000..1fa5a26
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/pyfunction_cache.py
@@ -0,0 +1,252 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import types
+import warnings
+from functools import wraps
+
+
+class _PyFunctions(object):
+ """Structure for holding custom user-defined python functions.
+
+ Will store a set of functions created by the user. Should not access this directly but rather user the
+ decorators or setter functions, and use the py_modules class variable to access individual functions. Is divided
+ up into
+ synaptic_weight: functions for calcuating synaptic weight.
+ cell_model: should return NEURON cell hobj.
+ synapse model: should return a NEURON synapse object.
+ """
+ def __init__(self):
+ self.__syn_weights = {}
+ self.__cell_models = {}
+ self.__synapse_models = {}
+ self.__cell_processors = {}
+
+ def clear(self):
+ self.__syn_weights.clear()
+ self.__cell_models.clear()
+ self.__synapse_models.clear()
+ self.__cell_processors.clear()
+
+ def add_synaptic_weight(self, name, func, overwrite=True):
+ """stores synpatic fuction for given name"""
+ if overwrite or name not in self.__syn_weights:
+ self.__syn_weights[name] = func
+
+ @property
+ def synaptic_weights(self):
+ """return list of the names of all available synaptic weight functions"""
+ return self.__syn_weights.keys()
+
+ def synaptic_weight(self, name):
+ """return the synpatic weight function"""
+ return self.__syn_weights[name]
+
+ def has_synaptic_weight(self, name):
+ return name in self.__syn_weights
+
+ def __cell_model_key(self, directive, model_type):
+ return (directive, model_type)
+
+ def add_cell_model(self, directive, model_type, func, overwrite=True):
+ key = self.__cell_model_key(directive, model_type)
+ if overwrite or key not in self.__cell_models:
+ self.__cell_models[key] = func
+
+ @property
+ def cell_models(self):
+ return self.__cell_models.keys()
+
+ def cell_model(self, directive, model_type):
+ return self.__cell_models[self.__cell_model_key(directive, model_type)]
+
+ def has_cell_model(self, directive, model_type):
+ return self.__cell_model_key(directive, model_type) in self.__cell_models
+
+ def add_synapse_model(self, name, func, overwrite=True):
+ if overwrite or name not in self.__synapse_models:
+ self.__synapse_models[name] = func
+
+ @property
+ def synapse_models(self):
+ return self.__synapse_models.keys()
+
+ def synapse_model(self, name):
+ return self.__synapse_models[name]
+
+ @property
+ def cell_processors(self):
+ return self.__cell_processors.keys()
+
+ def cell_processor(self, name):
+ return self.__cell_processors[name]
+
+ def add_cell_processor(self, name, func, overwrite=True):
+ if overwrite or name not in self.__syn_weights:
+ self.__cell_processors[name] = func
+
+ def __repr__(self):
+ rstr = '{}: {}\n'.format('cell_models', self.cell_models)
+ rstr += '{}: {}\n'.format('synapse_models', self.synapse_models)
+ rstr += '{}: {}'.format('synaptic_weights', self.synaptic_weights)
+ return rstr
+
+py_modules = _PyFunctions()
+
+
+def synaptic_weight(*wargs, **wkwargs):
+ """A decorator for registering a function as a synaptic weight function.
+ To use either
+ @synaptic_weight
+ def weight_function(): ...
+
+ or
+ @synaptic_weight(name='name_in_edge_types')
+ def weight_function(): ...
+
+ Once the decorator has been attached and imported the functions will automatically be added to py_modules.
+ """
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_synaptic_weight(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_synaptic_weight(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def cell_model(*wargs, **wkwargs):
+ """A decorator for registering NEURON cell loader functions."""
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_cell_model(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_cell_model(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def synapse_model(*wargs, **wkwargs):
+ """A decorator for registering NEURON synapse loader functions."""
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_synapse_model(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_synapse_model(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def add_weight_function(func, name=None, overwrite=True):
+ assert(callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_synaptic_weight(func_name, func, overwrite)
+
+
+def add_cell_model(func, directive, model_type, overwrite=True):
+ assert(callable(func))
+ # func_name = name if name is not None else func.__name__
+ py_modules.add_cell_model(directive, model_type, func, overwrite)
+
+
+def add_cell_processor(func, name=None, overwrite=True):
+ assert(callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_cell_processor(func_name, func, overwrite)
+
+
+def add_synapse_model(func, name=None, overwrite=True):
+ assert (callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_synapse_model(func_name, func, overwrite)
+
+
+def load_py_modules(cell_models=None, syn_models=None, syn_weights=None, cell_processors=None):
+ # py_modules.clear()
+ warnings.warn('Do not call this method directly', DeprecationWarning)
+ if cell_models is not None:
+ assert(isinstance(cell_models, types.ModuleType))
+ for f in [cell_models.__dict__.get(f) for f in dir(cell_models)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_cell_model(f.__name__, f)
+
+ if syn_models is not None:
+ assert(isinstance(syn_models, types.ModuleType))
+ for f in [syn_models.__dict__.get(f) for f in dir(syn_models)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_synapse_model(f.__name__, f)
+
+ if syn_weights is not None:
+ assert(isinstance(syn_weights, types.ModuleType))
+ for f in [syn_weights.__dict__.get(f) for f in dir(syn_weights)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_synaptic_weight(f.__name__, f)
+
+ if cell_processors is not None:
+ assert(isinstance(cell_processors, types.ModuleType))
+ for f in [cell_processors.__dict__.get(f) for f in dir(cell_processors)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_cell_processor(f.__name__, f)
diff --git a/bmtk-vb/bmtk/simulator/bionet/pyfunction_cache.pyc b/bmtk-vb/bmtk/simulator/bionet/pyfunction_cache.pyc
new file mode 100644
index 0000000..d325c8b
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/pyfunction_cache.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/schemas/config_schema.json b/bmtk-vb/bmtk/simulator/bionet/schemas/config_schema.json
new file mode 100644
index 0000000..780e7bd
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/schemas/config_schema.json
@@ -0,0 +1,131 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "type": "object",
+
+ "properties": {
+ "target_simulator": {"$ref": "#/definitions/target_simulator"},
+ "components": {"$ref": "#/definitions/components"},
+ "networks": {
+ "type": "object",
+ "properties": {
+ "node_files": {"$ref": "#/definitions/nodes_files"},
+ "edge_files": {"$ref": "#/definitions/edges_files"}
+ }
+ },
+ "run": {"$ref": "#/definitions/run"},
+ "groups": {"$ref": "#/definitions/groups"},
+ "output": {"$ref": "#/definitions/output"},
+ "conditions": {"$ref": "#/definitions/conditions"},
+ "input": {
+ "type": "array",
+ "items": {
+ "oneOf": [
+ {"$ref": "#/definitions/input_file"}
+ ]
+ }
+ }
+ },
+
+ "definitions": {
+ "target_simulator": {
+ "type": "string"
+ },
+
+ "components": {
+ "type": "object",
+ "properties": {
+ "synaptic_models_dir": {"type": "directory", "exists": true},
+ "mechanisms_dir": {"type": "directory", "exists": true},
+ "morphologies_dir": {"type": "directory", "exists": true},
+ "biophysical_neuron_models_dir": {"type": "directory", "exists": true},
+ "point_neuron_models_dir": {"type": "directory", "exists": true},
+ "templates_dir": {"type": "directory", "exists": true}
+ }
+ },
+
+ "edges": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properites": {
+ "edges_file": {"type": "file", "exists": true},
+ "edge_types_file": {"type": "file", "exists": true}
+ }
+ }
+ },
+
+ "nodes": {
+ "type": "array",
+ "items": {
+ "type": "object",
+
+ "properties": {
+ "nodes_file": {"type": "file", "exists": true},
+ "node_types_file": {"type": "file", "exists": true}
+ }
+ }
+ },
+
+ "run": {
+ "type": "object",
+ "properties": {
+ "tstop": {"type": "number", "minimum": 0},
+ "dt": {"type": "number", "minimum": 0},
+ "dL": {"type": "number", "minimum": 0},
+ "overwrite_output_dir": {"type": "boolean"},
+ "spike_threshold": {"type": "number"},
+ "save_state": {"type": "boolean"},
+ "start_from_state": {"type": "boolean"},
+ "nsteps_block": {"type": "number", "minimum": 0},
+ "save_cell_vars": {"type": "array"},
+ "calc_ecp": {"type": "boolean"},
+ "optocell": {"type": "array", "items": {"type": "string"}}
+ }
+ },
+
+ "node_id_selections": {
+ "type": "object",
+ "properties": {
+ "save_cell_vars": {"type": "array", "items": {"type": "number"}}
+ }
+ },
+
+ "output": {
+ "type": "object",
+ "properties": {
+ "log_file": {"type": "file"},
+ "spikes_ascii": {"type": "file"},
+ "spikes_h5": {"type": "file"},
+ "cell_vars_dir": {"type": "file"},
+ "extra_cell_vars": {"type": "file"},
+ "ecp_file": {"type": "file"},
+ "state_dir": {"type": "directory"},
+ "output_dir": {"type": "directory"}
+ }
+ },
+
+ "conditions": {
+ "type": "object",
+ "properties": {
+ "celsius": {"type": "number"},
+ "v_init": {"type": "number"},
+ "cao0": {"type": "number"}
+ }
+ },
+
+ "extracellular_electrode": {
+ "type": "object",
+ "properties": {
+ "positions": {"type": "file"}
+ }
+ },
+
+ "input_file": {
+ "type": "object",
+ "properties": {
+ "format": {"type": "string", "enum": ["nwb", "csv"]},
+ "file": {"type": "file", "exists": true}
+ }
+ }
+ }
+}
diff --git a/bmtk-vb/bmtk/simulator/bionet/schemas/csv_edge_types.json b/bmtk-vb/bmtk/simulator/bionet/schemas/csv_edge_types.json
new file mode 100644
index 0000000..b3e6f59
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/schemas/csv_edge_types.json
@@ -0,0 +1,20 @@
+{
+ "file_type": "csv",
+ "file_properties": {
+ "sep": " "
+ },
+
+ "columns": {
+ "edge_type_id": {"required": true},
+ "target_query": {"required": false},
+ "source_query": {"required": false},
+ "weight_max": {"required": true},
+ "weight_function": {"required": false},
+ "weight_sigma": {"required": false},
+ "distance_range": {"required": true},
+ "target_sections": {"required": true},
+ "delay": {"required": true},
+ "params_file": {"required": true},
+ "set_params_function": {"required": true}
+ }
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/bionet/schemas/csv_node_types_external.json b/bmtk-vb/bmtk/simulator/bionet/schemas/csv_node_types_external.json
new file mode 100644
index 0000000..5dd3a08
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/schemas/csv_node_types_external.json
@@ -0,0 +1,11 @@
+{
+ "file_type": "csv",
+ "file_properties": {
+ "sep": " "
+ },
+
+ "columns": {
+ "node_type_id": {"required": true},
+ "level_of_detail": {"required": true}
+ }
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/bionet/schemas/csv_node_types_internal.json b/bmtk-vb/bmtk/simulator/bionet/schemas/csv_node_types_internal.json
new file mode 100644
index 0000000..6dc2188
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/schemas/csv_node_types_internal.json
@@ -0,0 +1,15 @@
+{
+ "file_type": "csv",
+ "file_properties": {
+ "sep": " "
+ },
+
+ "columns": {
+ "node_type_id": {"required": true},
+ "params_file": {"required": true},
+ "level_of_detail": {"required": true},
+ "morphology_file": {"required": true},
+ "rotation_angle_zaxis": {"required": true},
+ "set_params_function": {"required": true}
+ }
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/bionet/schemas/csv_nodes_external.json b/bmtk-vb/bmtk/simulator/bionet/schemas/csv_nodes_external.json
new file mode 100644
index 0000000..e7240b0
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/schemas/csv_nodes_external.json
@@ -0,0 +1,11 @@
+{
+ "file_type": "csv",
+ "file_properties": {
+ "sep": " "
+ },
+
+ "columns": {
+ "node_id": {"required": true},
+ "node_type_id": {"required": true}
+ }
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/bionet/schemas/csv_nodes_internal.json b/bmtk-vb/bmtk/simulator/bionet/schemas/csv_nodes_internal.json
new file mode 100644
index 0000000..f2287b0
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/schemas/csv_nodes_internal.json
@@ -0,0 +1,19 @@
+{
+ "file_type": "csv",
+ "file_properties": {
+ "sep": " "
+ },
+
+ "columns": {
+ "node_id": {"required": true},
+ "node_type_id": {"required": true},
+ "x_soma": {"required": true},
+ "y_soma": {"required": true},
+ "z_soma": {"required": true},
+ "rotation_angle_yaxis": {"required": true},
+ "pop_name": {"required": true},
+ "ei": {"required": true},
+ "location": {"required": false},
+ "tuning_angle": {"required": false}
+ }
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/bionet/sonata_adaptors.py b/bmtk-vb/bmtk/simulator/bionet/sonata_adaptors.py
new file mode 100644
index 0000000..91982c6
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/sonata_adaptors.py
@@ -0,0 +1,142 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import types
+import numpy as np
+
+from bmtk.simulator.core.sonata_reader import NodeAdaptor, SonataBaseNode, EdgeAdaptor, SonataBaseEdge
+from bmtk.simulator.bionet import nrn
+
+
+class BioNode(SonataBaseNode):
+ @property
+ def position(self):
+ return self._prop_adaptor.position(self._node)
+
+ @property
+ def morphology_file(self):
+ return self._node['morphology']
+
+ @property
+ def rotation_angle_xaxis(self):
+ return self._prop_adaptor.rotation_angle_xaxis(self._node)
+
+ @property
+ def rotation_angle_yaxis(self):
+ # TODO: Combine rotation alnges into a single property
+ return self._prop_adaptor.rotation_angle_yaxis(self._node)
+
+ @property
+ def rotation_angle_zaxis(self):
+ return self._prop_adaptor.rotation_angle_zaxis(self._node)
+
+ def load_cell(self):
+ model_template = self.model_template
+ template_name = model_template[1]
+ model_type = self.model_type
+ if nrn.py_modules.has_cell_model(self['model_template'], model_type):
+ cell_fnc = nrn.py_modules.cell_model(self['model_template'], model_type)
+ else:
+ cell_fnc = nrn.py_modules.cell_model(model_template[0], model_type)
+
+ dynamics_params = self.dynamics_params
+ hobj = cell_fnc(self, template_name, dynamics_params)
+
+ for model_processing_str in self.model_processing:
+ processing_fnc = nrn.py_modules.cell_processor(model_processing_str)
+ hobj = processing_fnc(hobj, self, dynamics_params)
+
+ return hobj
+
+
+class BioNodeAdaptor(NodeAdaptor):
+ def get_node(self, sonata_node):
+ return BioNode(sonata_node, self)
+
+ @classmethod
+ def patch_adaptor(cls, adaptor, node_group, network):
+ node_adaptor = NodeAdaptor.patch_adaptor(adaptor, node_group, network)
+
+ # Position
+ if 'positions' in node_group.all_columns:
+ node_adaptor.position = types.MethodType(positions, adaptor)
+ elif 'position' in node_group.all_columns:
+ node_adaptor.position = types.MethodType(position, adaptor)
+ else:
+ node_adaptor.position = types.MethodType(positions_default, adaptor)
+
+ # Rotation angles
+ if 'rotation_angle_xaxis' in node_group.all_columns:
+ node_adaptor.rotation_angle_xaxis = types.MethodType(rotation_angle_x, node_adaptor)
+ else:
+ node_adaptor.rotation_angle_xaxis = types.MethodType(rotation_angle_default, node_adaptor)
+
+ if 'rotation_angle_yaxis' in node_group.all_columns:
+ node_adaptor.rotation_angle_yaxis = types.MethodType(rotation_angle_y, node_adaptor)
+ else:
+ node_adaptor.rotation_angle_yaxis = types.MethodType(rotation_angle_default, node_adaptor)
+
+ if 'rotation_angle_zaxis' in node_group.all_columns:
+ node_adaptor.rotation_angle_zaxis = types.MethodType(rotation_angle_z, node_adaptor)
+ else:
+ node_adaptor.rotation_angle_zaxis = types.MethodType(rotation_angle_default, node_adaptor)
+
+ return node_adaptor
+
+
+def positions_default(self, node):
+ return np.array([0.0, 0.0, 0.0])
+
+
+def positions(self, node):
+ return node['positions']
+
+
+def position(self, node):
+ return node['position']
+
+
+def rotation_angle_default(self, node):
+ return 0.0
+
+
+def rotation_angle_x(self, node):
+ return node['rotation_angle_xaxis']
+
+
+def rotation_angle_y(self, node):
+ return node['rotation_angle_yaxis']
+
+
+def rotation_angle_z(self, node):
+ return node['rotation_angle_zaxis']
+
+
+class BioEdge(SonataBaseEdge):
+ def load_synapses(self, section_x, section_id):
+ synapse_fnc = nrn.py_modules.synapse_model(self.model_template)
+ return synapse_fnc(self.dynamics_params, section_x, section_id)
+
+
+class BioEdgeAdaptor(EdgeAdaptor):
+ def get_edge(self, sonata_edge):
+ return BioEdge(sonata_edge, self)
diff --git a/bmtk-vb/bmtk/simulator/bionet/sonata_adaptors.pyc b/bmtk-vb/bmtk/simulator/bionet/sonata_adaptors.pyc
new file mode 100644
index 0000000..ea79c88
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/sonata_adaptors.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/utils.py b/bmtk-vb/bmtk/simulator/bionet/utils.py
new file mode 100644
index 0000000..ca63bc6
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/utils.py
@@ -0,0 +1,84 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import numpy as np
+import math
+import json
+import pandas as pd
+import h5py
+
+from neuron import h
+
+
+def rotation_matrix(axis, theta):
+ """Return the rotation matrix associated with counterclockwise rotation about the given axis by theta radians.
+ """
+ axis = np.asarray(axis)
+ theta = np.asarray(theta)
+ axis = axis/math.sqrt(np.dot(axis, axis))
+ a = math.cos(theta/2.0)
+ b, c, d = -axis*math.sin(theta/2.0)
+ aa, bb, cc, dd = a*a, b*b, c*c, d*d
+ bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d
+
+ return np.array([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac)],
+ [2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab)],
+ [2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc]])
+
+
+def edge_converter_csv(output_dir, csv_file):
+ """urrently being used by BioNetwork.write_connections(), need to refactor
+
+ :param output_dir:
+ :param csv_file:
+ :return:
+ """
+ syns_df = pd.read_csv(csv_file, sep=' ')
+ for name, group in syns_df.groupby(['trg_network', 'src_network']):
+ trg_net, src_net = name
+ group_len = len(group.index)
+ with h5py.File(os.path.join(output_dir, '{}_{}_edges.h5'.format(trg_net, src_net)), 'w') as conns_h5:
+ conns_h5.create_dataset('edges/target_gid', data=group['trg_gid'])
+ conns_h5.create_dataset('edges/source_gid', data=group['src_gid'])
+ conns_h5.create_dataset('edges/edge_type_id', data=group['edge_type_id'])
+ conns_h5.create_dataset('edges/edge_group', data=group['connection_group'])
+
+ group_counters = {group_id: 0 for group_id in group.connection_group.unique()}
+ edge_group_indicies = np.zeros(group_len, dtype=np.uint)
+ for i, group_id in enumerate(group['connection_group']):
+ edge_group_indicies[i] = group_counters[group_id]
+ group_counters[group_id] += 1
+ conns_h5.create_dataset('edges/edge_group_indicies', data=edge_group_indicies)
+
+ for group_class, sub_group in group.groupby('connection_group'):
+ grp = conns_h5.create_group('edges/{}'.format(group_class))
+ if group_class == 0:
+ grp.create_dataset('sec_id', data=sub_group['segment'], dtype='int')
+ grp.create_dataset('sec_x', data=sub_group['section'])
+ grp.create_dataset('syn_weight', data=sub_group['weight'])
+ grp.create_dataset('delay', data=sub_group['delay'])
+ elif group_class == 1:
+ grp.create_dataset('syn_weight', data=sub_group['weight'])
+ grp.create_dataset('delay', data=sub_group['delay'])
+ else:
+ print('Unknown cell group {}'.format(group_class))
diff --git a/bmtk-vb/bmtk/simulator/bionet/utils.pyc b/bmtk-vb/bmtk/simulator/bionet/utils.pyc
new file mode 100644
index 0000000..efa923f
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/utils.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/bionet/virtualcell.py b/bmtk-vb/bmtk/simulator/bionet/virtualcell.py
new file mode 100644
index 0000000..64b3929
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/bionet/virtualcell.py
@@ -0,0 +1,51 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from neuron import h
+
+
+class VirtualCell(object):
+ """Representation of a Virtual/External node"""
+
+ def __init__(self, node, spike_train_dataset):
+ # VirtualCell is currently not a subclass of bionet.Cell class b/c the parent has a bunch of properties that
+ # just don't apply to a virtual cell. May want to make bionet.Cell more generic in the future.
+ self._node_id = node.node_id
+ self._hobj = None
+ self._spike_train_dataset = spike_train_dataset
+ self._train_vec = []
+ self.set_stim(node, self._spike_train_dataset)
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def hobj(self):
+ return self._hobj
+
+ def set_stim(self, stim_prop, spike_train):
+ """Gets the spike trains for each individual cell."""
+ self._train_vec = h.Vector(spike_train.get_spikes(self.node_id))
+ vecstim = h.VecStim()
+ vecstim.play(self._train_vec)
+ self._hobj = vecstim
diff --git a/bmtk-vb/bmtk/simulator/bionet/virtualcell.pyc b/bmtk-vb/bmtk/simulator/bionet/virtualcell.pyc
new file mode 100644
index 0000000..d11300f
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/bionet/virtualcell.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/__init__.py b/bmtk-vb/bmtk/simulator/core/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/bmtk-vb/bmtk/simulator/core/__init__.pyc b/bmtk-vb/bmtk/simulator/core/__init__.pyc
new file mode 100644
index 0000000..288e3ee
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/simulator/core/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..d76cbf5
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/__pycache__/io_tools.cpython-37.pyc b/bmtk-vb/bmtk/simulator/core/__pycache__/io_tools.cpython-37.pyc
new file mode 100644
index 0000000..0144fff
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/__pycache__/io_tools.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/__pycache__/network_reader.cpython-37.pyc b/bmtk-vb/bmtk/simulator/core/__pycache__/network_reader.cpython-37.pyc
new file mode 100644
index 0000000..9499c53
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/__pycache__/network_reader.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/__pycache__/node_sets.cpython-37.pyc b/bmtk-vb/bmtk/simulator/core/__pycache__/node_sets.cpython-37.pyc
new file mode 100644
index 0000000..e8b067d
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/__pycache__/node_sets.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/__pycache__/simulator.cpython-37.pyc b/bmtk-vb/bmtk/simulator/core/__pycache__/simulator.cpython-37.pyc
new file mode 100644
index 0000000..06c3afe
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/__pycache__/simulator.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/__pycache__/simulator_network.cpython-37.pyc b/bmtk-vb/bmtk/simulator/core/__pycache__/simulator_network.cpython-37.pyc
new file mode 100644
index 0000000..50dbed5
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/__pycache__/simulator_network.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/config.py b/bmtk-vb/bmtk/simulator/core/config.py
new file mode 100644
index 0000000..8a36dc7
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/core/config.py
@@ -0,0 +1,436 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+from bmtk.simulator.utils.config import ConfigDict
+
+'''
+import os
+import json
+import re
+import copy
+import datetime
+from six import string_types
+
+
+from bmtk.simulator.core.io_tools import io
+
+
+def from_json(config_file, validator=None):
+ """Builds and validates a configuration json file.
+
+ :param config_file: File object or path to a json file.
+ :param validator: A SimConfigValidator object to validate json file. Won't validate if set to None
+ :return: A dictionary, verified against json validator and with manifest variables resolved.
+ """
+ #print(config_file)
+ #if os.path.isfile(config_file):
+ #if isinstance(config_file, file):
+ # conf = json.load(config_file)
+ if isinstance(config_file, string_types):
+ conf = json.load(open(config_file, 'r'))
+ elif isinstance(config_file, dict):
+ conf = config_file.copy()
+ else:
+ raise Exception('{} is not a file or file path.'.format(config_file))
+
+ # insert file path into dictionary
+ if 'config_path' not in conf:
+ conf['config_path'] = os.path.abspath(config_file)
+ conf['config_dir'] = os.path.dirname(conf['config_path'])
+
+ # Will resolve manifest variables and validate
+ return from_dict(conf, validator)
+
+
+def from_dict(config_dict, validator=None):
+ """Builds and validates a configuration json dictionary object. Best to directly use from_json when possible.
+
+ :param config_dict: Dictionary object
+ :param validator: A SimConfigValidator object to validate json file. Won't validate if set to None
+ :return: A dictionary, verified against json validator and with manifest variables resolved.
+ """
+ assert(isinstance(config_dict, dict))
+ conf = copy.deepcopy(config_dict) # Since the functions will mutate the dictionary we will copy just-in-case.
+
+ if 'config_path' not in conf:
+ conf['config_path'] = os.path.join(os.getcwd(), 'tmp_cfg.dict')
+ conf['config_dir'] = os.path.dirname(conf['config_path'])
+
+ # Build the manifest and resolve variables.
+ # TODO: Check that manifest exists
+ manifest = __build_manifest(conf)
+ conf['manifest'] = manifest
+ __recursive_insert(conf, manifest)
+
+ # In our work with Blue-Brain it was agreed that 'network' and 'simulator' parts of config may be split up into
+ # separate files. If this is the case we build each sub-file separately and merge into this one
+ for childconfig in ['network', 'simulation']:
+ if childconfig in conf and isinstance(conf[childconfig], string_types):
+ # Try to resolve the path of the network/simulation config files. If an absolute path isn't used find
+ # the file relative to the current config file. TODO: test if this will work on windows?
+ conf_str = conf[childconfig]
+ conf_path = conf_str if conf_str.startswith('/') else os.path.join(conf['config_dir'], conf_str)
+
+ # Build individual json file and merge into parent.
+ child_json = from_json(conf_path)
+ del child_json['config_path'] # we don't want 'config_path' of parent being overwritten.
+ conf.update(child_json)
+
+ # Run the validator
+ if validator is not None:
+ validator.validate(conf)
+
+ return conf
+
+
+def copy_config(conf):
+ """Copy configuration file to different directory, with manifest variables resolved.
+
+ :param conf: configuration dictionary
+ """
+ output_dir = conf.output_dir
+ config_name = os.path.basename(conf['config_path'])
+ output_path = os.path.join(output_dir, config_name)
+ with open(output_path, 'w') as fp:
+ out_cfg = conf.copy()
+ if 'manifest' in out_cfg:
+ del out_cfg['manifest']
+ json.dump(out_cfg, fp, indent=2)
+
+
+def __special_variables(conf):
+ """A list of preloaded variables to insert into the manifest, containing things like path to run-time directory,
+ configuration directory, etc.
+ """
+ pre_manifest = dict()
+ pre_manifest['$workingdir'] = os.path.dirname(os.getcwd())
+ if 'config_path' in conf:
+ pre_manifest['$configdir'] = os.path.dirname(conf['config_path']) # path of configuration file
+ pre_manifest['$configfname'] = conf['config_path']
+
+ dt_now = datetime.datetime.now()
+ pre_manifest['$time'] = dt_now.strftime('%H-%M-%S')
+ pre_manifest['$date'] = dt_now.strftime('%Y-%m-%d')
+ pre_manifest['$datetime'] = dt_now.strftime('%Y-%m-%d_%H-%M-%S')
+
+ return pre_manifest
+
+
+def __build_manifest(conf):
+ """Resolves the manifest section and resolve any internal variables"""
+ if 'manifest' not in conf:
+ return __special_variables(conf)
+
+ manifest = conf["manifest"]
+ resolved_manifest = __special_variables(conf)
+ resolved_keys = set()
+ unresolved_keys = set(manifest.keys())
+
+ # No longer using recursion since that can lead to an infinite loop if the person who writes the config file isn't
+ # careful. Also added code to allow for ${VAR} format in-case user wants to user "$.../some_${MODEl}_here/..."
+ while unresolved_keys:
+ for key in unresolved_keys:
+ # Find all variables in manifest and see if they can be replaced by the value in resolved_manifest
+ value = __find_variables(manifest[key], resolved_manifest)
+
+ # If value no longer has variables, and key-value pair to resolved_manifest and remove from unresolved-keys
+ if value.find('$') < 0:
+ resolved_manifest[key] = value
+ resolved_keys.add(key)
+
+ # remove resolved key-value pairs from set, and make sure at every iteration unresolved_keys shrinks to prevent
+ # infinite loops
+ n_unresolved = len(unresolved_keys)
+ unresolved_keys -= resolved_keys
+ if n_unresolved == len(unresolved_keys):
+ msg = "Unable to resolve manifest variables: {}".format(unresolved_keys)
+ raise Exception(msg)
+
+ return resolved_manifest
+
+
+def __recursive_insert(json_obj, manifest):
+ """Loop through the config and substitute the path variables (e.g.: $MY_DIR) with the values from the manifest
+
+ :param json_obj: A json dictionary object that may contain variables needing to be resolved.
+ :param manifest: A dictionary of variable values
+ :return: A new json dictionar config file with variables resolved
+ """
+ if isinstance(json_obj, string_types):
+ return __find_variables(json_obj, manifest)
+
+ elif isinstance(json_obj, list):
+ new_list = []
+ for itm in json_obj:
+ new_list.append(__recursive_insert(itm, manifest))
+ return new_list
+
+ elif isinstance(json_obj, dict):
+ for key, val in json_obj.items():
+ if key == 'manifest':
+ continue
+ json_obj[key] = __recursive_insert(val, manifest)
+
+ return json_obj
+
+ else:
+ return json_obj
+
+
+def __find_variables(json_str, manifest):
+ """Replaces variables (i.e. $VAR, ${VAR}) with their values from the manifest.
+
+ :param json_str: a json string that may contain none, one or multiple variable
+ :param manifest: dictionary of variable lookup values
+ :return: json_str with resolved variables. Won't resolve variables that don't exist in manifest.
+ """
+ variables = [m for m in re.finditer('\$\{?[\w]+\}?', json_str)]
+ for var in variables:
+ var_lookup = var.group()
+ if var_lookup.startswith('${') and var_lookup.endswith('}'):
+ # replace ${VAR} with $VAR
+ var_lookup = "$" + var_lookup[2:-1]
+ if var_lookup in manifest:
+ json_str = json_str.replace(var.group(), manifest[var_lookup])
+
+ return json_str
+
+
+class ConfigDict(dict):
+ def __init__(self, *args, **kwargs):
+ self.update(*args, **kwargs)
+ self._env_built = False
+ self._io = None
+
+ self._node_set = {}
+ self._load_node_set()
+
+ @property
+ def io(self):
+ if self._io is None:
+ self._io = io
+ return self._io
+
+ @io.setter
+ def io(self, io):
+ self._io = io
+
+ @property
+ def run(self):
+ return self['run']
+
+ @property
+ def tstart(self):
+ return self.run.get('tstart', 0.0)
+
+ @property
+ def tstop(self):
+ return self.run['tstop']
+
+ @property
+ def dt(self):
+ return self.run.get('dt', 0.1)
+
+ @property
+ def spike_threshold(self):
+ return self.run.get('spike_threshold', -15.0)
+
+ @property
+ def dL(self):
+ return self.run.get('dL', 20.0)
+
+ @property
+ def gid_mappings(self):
+ return self.get('gid_mapping_file', None)
+
+ @property
+ def block_step(self):
+ return self.run.get('nsteps_block', 5000)
+
+ @property
+ def conditions(self):
+ return self['conditions']
+
+ @property
+ def celsius(self):
+ return self.conditions['celsius']
+
+ @property
+ def v_init(self):
+ return self.conditions['v_init']
+
+ @property
+ def path(self):
+ return self['config_path']
+
+ @property
+ def output(self):
+ return self['output']
+
+ @property
+ def output_dir(self):
+ return self.output['output_dir']
+
+ @property
+ def overwrite_output(self):
+ return self.output.get('overwrite_output_dir', False)
+
+ @property
+ def log_file(self):
+ return self.output['log_file']
+
+ @property
+ def components(self):
+ return self.get('components', {})
+
+ @property
+ def morphologies_dir(self):
+ return self.components['morphologies_dir']
+
+ @property
+ def synaptic_models_dir(self):
+ return self.components['synaptic_models_dir']
+
+ @property
+ def point_neuron_models_dir(self):
+ return self.components['point_neuron_models_dir']
+
+ @property
+ def mechanisms_dir(self):
+ return self.components['mechanisms_dir']
+
+ @property
+ def biophysical_neuron_models_dir(self):
+ return self.components['biophysical_neuron_models_dir']
+
+ @property
+ def templates_dir(self):
+ return self.components.get('templates_dir', None)
+
+ @property
+ def with_networks(self):
+ return 'networks' in self and len(self.nodes) > 0
+
+ @property
+ def networks(self):
+ return self['networks']
+
+ @property
+ def nodes(self):
+ return self.networks.get('nodes', [])
+
+ @property
+ def edges(self):
+ return self.networks.get('edges', [])
+
+ @property
+ def reports(self):
+ return self.get('reports', {})
+
+ @property
+ def inputs(self):
+ return self.get('inputs', {})
+
+ @property
+ def node_sets(self):
+ return self._node_set
+
+ def _load_node_set(self):
+ if 'node_sets_file' in self.keys():
+ node_set_val = self['node_sets_file']
+ elif 'node_sets' in self.keys():
+ node_set_val = self['node_sets']
+ else:
+ self._node_set = {}
+ return
+
+ if isinstance(node_set_val, dict):
+ self._node_set = node_set_val
+ else:
+ try:
+ self._node_set = json.load(open(node_set_val, 'r'))
+ except Exception as e:
+ io.log_exception('Unable to load node_sets_file {}'.format(node_set_val))
+
+ def copy_to_output(self):
+ copy_config(self)
+
+ def get_modules(self, module_name):
+ return [report for report in self.reports.values() if report['module'] == module_name]
+
+ def _set_logging(self):
+ """Check if log-level and/or log-format string is being changed through the config"""
+ output_sec = self.output
+ if 'log_format' in output_sec:
+ self._io.set_log_format(output_sec['log_format'])
+
+ if 'log_level' in output_sec:
+ self._io.set_log_level(output_sec['log_level'])
+
+ if 'log_to_console' in output_sec:
+ self._io.log_to_console = output_sec['log_to_console']
+
+ if 'quiet_simulator' in output_sec and output_sec['quiet_simulator']:
+ self._io.quiet_simulator()
+
+ def build_env(self):
+ if self._env_built:
+ return
+
+ self._set_logging()
+ self.io.setup_output_dir(self.output_dir, self.log_file, self.overwrite_output)
+ self.copy_to_output()
+ self._env_built = True
+
+ @staticmethod
+ def get_validator():
+ raise NotImplementedError
+
+ @classmethod
+ def from_json(cls, config_file, validate=False):
+ validator = cls.get_validator() if validate else None
+ return cls(from_json(config_file, validator))
+
+ @classmethod
+ def from_dict(cls, config_dict, validate=False):
+ validator = cls.get_validator() if validate else None
+ return cls(from_dict(config_dict, validator))
+
+ @classmethod
+ def from_yaml(cls, config_file, validate=False):
+ raise NotImplementedError
+
+ @classmethod
+ def load(cls, config_file, validate=False):
+ # Implement factory method that can resolve the format/type of input configuration.
+ if isinstance(config_file, dict):
+ return cls.from_dict(config_file, validate)
+ elif isinstance(config_file, string_types):
+ if config_file.endswith('yml') or config_file.endswith('yaml'):
+ return cls.from_yaml(config_file, validate)
+ else:
+ return cls.from_json(config_file, validate)
+ else:
+ raise Exception
+'''
+
diff --git a/bmtk-vb/bmtk/simulator/core/edge_population.py b/bmtk-vb/bmtk/simulator/core/edge_population.py
new file mode 100644
index 0000000..5dfa06c
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/core/edge_population.py
@@ -0,0 +1,21 @@
+class SimEdge(object):
+ @property
+ def node_id(self):
+ raise NotImplementedError()
+
+ @property
+ def gid(self):
+ raise NotImplementedError()
+
+
+class EdgePopulation(object):
+ @property
+ def source_nodes(self):
+ raise NotImplementedError()
+
+ @property
+ def target_nodes(self):
+ raise NotImplementedError()
+
+ def initialize(self, network):
+ raise NotImplementedError()
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/core/graph.py b/bmtk-vb/bmtk/simulator/core/graph.py
new file mode 100644
index 0000000..1e56ef1
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/core/graph.py
@@ -0,0 +1,435 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+import ast
+import numpy as np
+
+from bmtk.simulator.core.config import ConfigDict
+#import config as cfg
+from bmtk.simulator.utils.property_maps import NodePropertyMap, EdgePropertyMap
+from bmtk.utils import sonata
+from bmtk.simulator.core.io_tools import io
+
+from bmtk.simulator.core.node_sets import NodeSet, NodeSetAll
+
+
+"""Creates a graph of nodes and edges from multiple network files for all simulators.
+
+Consists of edges and nodes. All classes are abstract and should be reimplemented by a specific simulator. Also
+contains base factor methods for building a network from a config file (or other).
+"""
+
+
+class SimEdge(object):
+ def __init__(self, original_params, dynamics_params):
+ self._orig_params = original_params
+ self._dynamics_params = dynamics_params
+ self._updated_params = {'dynamics_params': self._dynamics_params}
+
+ @property
+ def edge_type_id(self):
+ return self._orig_params['edge_type_id']
+
+ def __getitem__(self, item):
+ if item in self._updated_params:
+ return self._updated_params[item]
+ else:
+ return self._orig_params[item]
+
+
+class SimNode(object):
+ def __init__(self, node_id, graph, network, params):
+ self._node_id = node_id
+ self._graph = graph
+ self._graph_params = params
+ self._node_type_id = params['node_type_id']
+ self._network = network
+ self._updated_params = {}
+
+ self._model_params = {}
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def node_type_id(self):
+ return self._node_type_id
+
+ @property
+ def network(self):
+ """Name of network node belongs too."""
+ return self._network
+
+ @property
+ def model_params(self):
+ """Parameters (json file, nml, dictionary) that describe a specific node"""
+ return self._model_params
+
+ @model_params.setter
+ def model_params(self, value):
+ self._model_params = value
+
+ def __contains__(self, item):
+ return item in self._updated_params or item in self._graph_params
+
+ def __getitem__(self, item):
+ if item in self._updated_params:
+ return self._updated_params[item]
+ else:
+ return self._graph_params[item]
+
+
+class SimGraph(object):
+ model_type_col = 'model_type'
+
+ def __init__(self):
+ self._components = {} # components table, i.e. paths to model files.
+ self._io = io
+
+ self._node_property_maps = {}
+ self._edge_property_maps = {}
+
+ self._node_populations = {}
+ self._internal_populations_map = {}
+ self._virtual_populations_map = {}
+
+ self._virtual_cells_nid = {}
+
+ self._recurrent_edges = {}
+ self._external_edges = {}
+
+ self._node_sets = {}
+ self._using_gids = False
+
+ @property
+ def io(self):
+ return self._io
+
+ '''
+ @property
+ def internal_pop_names(self):
+ return self
+ '''
+
+ @property
+ def node_populations(self):
+ return list(self._node_populations.keys())
+
+ def get_node_set(self, node_set):
+ if node_set in self._node_sets.keys():
+ return self._node_sets[node_set]
+
+ elif isinstance(node_set, (dict, list)):
+ return NodeSet(node_set, self)
+
+ else:
+ self.io.log_exception('Unable to load or find node_set "{}"'.format(node_set))
+
+ def get_node_populations(self):
+ return self._node_populations.values()
+
+ def get_node_population(self, population_name):
+ return self._node_populations[population_name]
+
+ def get_component(self, key):
+ """Get the value of item in the components dictionary.
+
+ :param key: name of component
+ :return: value assigned to component
+ """
+ return self._components[key]
+
+ def add_component(self, key, value):
+ """Add a component key-value pair
+
+ :param key: name of component
+ :param value: value
+ """
+ self._components[key] = value
+
+ '''
+ def _from_json(self, file_name):
+ return cfg.from_json(file_name)
+ '''
+
+ def _validate_components(self):
+ """Make sure various components (i.e. paths) exists before attempting to build the graph."""
+ return True
+
+ def _create_nodes_prop_map(self, grp):
+ return NodePropertyMap()
+
+ def _create_edges_prop_map(self, grp):
+ return EdgePropertyMap()
+
+ def __avail_model_types(self, population):
+ model_types = set()
+ for grp in population.groups:
+ if self.model_type_col not in grp.all_columns:
+ self.io.log_exception('model_type is missing from nodes.')
+
+ model_types.update(set(np.unique(grp.get_values(self.model_type_col))))
+ return model_types
+
+ def _preprocess_node_types(self, node_population):
+ # TODO: The following figures out the actually used node-type-ids. For mem and speed may be better to just
+ # process them all
+ node_type_ids = node_population.type_ids
+ # TODO: Verify all the node_type_ids are in the table
+ node_types_table = node_population.types_table
+
+ # TODO: Convert model_type to a enum
+ morph_dir = self.get_component('morphologies_dir')
+ if morph_dir is not None and 'morphology' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ if node_type['morphology'] is None:
+ continue
+ # TODO: Check the file exits
+ # TODO: See if absolute path is stored in csv
+ node_type['morphology'] = os.path.join(morph_dir, node_type['morphology'])
+
+ if 'dynamics_params' in node_types_table.columns and 'model_type' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ dynamics_params = node_type['dynamics_params']
+ if isinstance(dynamics_params, dict):
+ continue
+
+ model_type = node_type['model_type']
+ if model_type == 'biophysical':
+ params_dir = self.get_component('biophysical_neuron_models_dir')
+ elif model_type == 'point_process':
+ params_dir = self.get_component('point_neuron_models_dir')
+ elif model_type == 'point_soma':
+ params_dir = self.get_component('point_neuron_models_dir')
+ else:
+ # Not sure what to do in this case, throw Exception?
+ params_dir = self.get_component('custom_neuron_models')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ node_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ self.io.log_exception('Could not find node dynamics_params file {}.'.format(params_path))
+
+ def _preprocess_edge_types(self, edge_pop):
+ edge_types_table = edge_pop.types_table
+ edge_type_ids = np.unique(edge_pop.type_ids)
+
+ for et_id in edge_type_ids:
+ edge_type = edge_types_table[et_id]
+ if 'dynamics_params' in edge_types_table.columns:
+ dynamics_params = edge_type['dynamics_params']
+ params_dir = self.get_component('synaptic_models_dir')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ edge_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ self.io.log_exception('Could not find edge dynamics_params file {}.'.format(params_path))
+
+ # Split target_sections
+ if 'target_sections' in edge_type:
+ trg_sec = edge_type['target_sections']
+ if trg_sec is not None:
+ try:
+ edge_type['target_sections'] = ast.literal_eval(trg_sec)
+ except Exception as exc:
+ self.io.log_warning('Unable to split target_sections list {}'.format(trg_sec))
+ edge_type['target_sections'] = None
+
+ # Split target distances
+ if 'distance_range' in edge_type:
+ dist_range = edge_type['distance_range']
+ if dist_range is not None:
+ try:
+ # TODO: Make the distance range has at most two values
+ edge_type['distance_range'] = json.loads(dist_range)
+ except Exception as e:
+ try:
+ edge_type['distance_range'] = [0.0, float(dist_range)]
+ except Exception as e:
+ self.io.log_warning('Unable to parse distance_range {}'.format(dist_range))
+ edge_type['distance_range'] = None
+
+ def external_edge_populations(self, src_pop, trg_pop):
+ return self._external_edges.get((src_pop, trg_pop), [])
+
+ def add_nodes(self, sonata_file, populations=None):
+ """Add nodes from a network to the graph.
+
+ :param sonata_file: A NodesFormat type object containing list of nodes.
+ :param populations: name/identifier of network. If none will attempt to retrieve from nodes object
+ """
+ nodes = sonata_file.nodes
+
+ selected_populations = nodes.population_names if populations is None else populations
+ for pop_name in selected_populations:
+ if pop_name not in nodes:
+ # when user wants to simulation only a few populations in the file
+ continue
+
+ if pop_name in self.node_populations:
+ # Make sure their aren't any collisions
+ self.io.log_exception('There are multiple node populations with name {}.'.format(pop_name))
+
+ node_pop = nodes[pop_name]
+ self._preprocess_node_types(node_pop)
+ self._node_populations[pop_name] = node_pop
+
+ # Segregate into virtual populations and non-virtual populations
+ model_types = self.__avail_model_types(node_pop)
+ if 'virtual' in model_types:
+ self._virtual_populations_map[pop_name] = node_pop
+ self._virtual_cells_nid[pop_name] = {}
+ model_types -= set(['virtual'])
+ if model_types:
+ # We'll allow a population to have virtual and non-virtual nodes but it is not ideal
+ self.io.log_warning('Node population {} contains both virtual and non-virtual nodes which can ' +
+ 'cause memory and build-time inefficency. Consider separating virtual nodes ' +
+ 'into their own population'.format(pop_name))
+
+ if model_types:
+ self._internal_populations_map[pop_name] = node_pop
+
+ self._node_sets[pop_name] = NodeSet({'population': pop_name}, self)
+ self._node_property_maps[pop_name] = {grp.group_id: self._create_nodes_prop_map(grp)
+ for grp in node_pop.groups}
+
+ def build_nodes(self):
+ raise NotImplementedError
+
+ def build_recurrent_edges(self):
+ raise NotImplementedError
+
+ def add_edges(self, sonata_file, populations=None, source_pop=None, target_pop=None):
+ """
+
+ :param sonata_file:
+ :param populations:
+ :param source_pop:
+ :param target_pop:
+ :return:
+ """
+ edges = sonata_file.edges
+ selected_populations = edges.population_names if populations is None else populations
+
+ for pop_name in selected_populations:
+ if pop_name not in edges:
+ continue
+
+ edge_pop = edges[pop_name]
+ self._preprocess_edge_types(edge_pop)
+
+ # Check the source nodes exists
+ src_pop = source_pop if source_pop is not None else edge_pop.source_population
+ is_internal_src = src_pop in self._internal_populations_map.keys()
+ is_external_src = src_pop in self._virtual_populations_map.keys()
+
+ trg_pop = target_pop if target_pop is not None else edge_pop.target_population
+ is_internal_trg = trg_pop in self._internal_populations_map.keys()
+
+ if not is_internal_trg:
+ self.io.log_exception(('Node population {} does not exists (or consists of only virtual nodes). ' +
+ '{} edges cannot create connections.').format(trg_pop, pop_name))
+
+ if not (is_internal_src or is_external_src):
+ self.io.log_exception('Source node population {} not found. Please update {} edges'.format(src_pop,
+ pop_name))
+ if is_internal_src:
+ if trg_pop not in self._recurrent_edges:
+ self._recurrent_edges[trg_pop] = []
+ self._recurrent_edges[trg_pop].append(edge_pop)
+
+ if is_external_src:
+ if trg_pop not in self._external_edges:
+ self._external_edges[(src_pop, trg_pop)] = []
+ self._external_edges[(src_pop, trg_pop)].append(edge_pop)
+
+ self._edge_property_maps[pop_name] = {grp.group_id: self._create_edges_prop_map(grp)
+ for grp in edge_pop.groups}
+
+ @classmethod
+ def from_config(cls, conf, **properties):
+ """Generates a graph structure from a json config file or dictionary.
+
+ :param conf: name of json config file, or a dictionary with config parameters
+ :param properties: optional properties.
+ :return: A graph object of type cls
+ """
+ graph = cls(**properties)
+
+ # The simulation run script should create a config-dict since it's likely to vary based on the simulator engine,
+ # however in the case the user doesn't we will try a generic conversion from dict/json to ConfigDict
+ if isinstance(conf, ConfigDict):
+ config = conf
+ else:
+ try:
+ config = ConfigDict.load(conf)
+ except Exception as e:
+ graph.io.log_exception('Could not convert {} (type "{}") to json.'.format(conf, type(conf)))
+
+ if not config.with_networks:
+ graph.io.log_exception('Could not find any network files. Unable to build network.')
+
+ # TODO: These are simulator specific
+ graph.spike_threshold = config.spike_threshold
+ graph.dL = config.dL
+
+ # load components
+ for name, value in config.components.items():
+ graph.add_component(name, value)
+ graph._validate_components()
+
+ # load nodes
+ gid_map = config.gid_mappings
+ for node_dict in config.nodes:
+ nodes_net = sonata.File(data_files=node_dict['nodes_file'], data_type_files=node_dict['node_types_file'],
+ gid_table=gid_map)
+ graph.add_nodes(nodes_net)
+
+ # load edges
+ for edge_dict in config.edges:
+ target_network = edge_dict['target'] if 'target' in edge_dict else None
+ source_network = edge_dict['source'] if 'source' in edge_dict else None
+ edge_net = sonata.File(data_files=edge_dict['edges_file'], data_type_files=edge_dict['edge_types_file'])
+ graph.add_edges(edge_net, source_pop=target_network, target_pop=source_network)
+
+ graph._node_sets['all'] = NodeSetAll(graph)
+ for ns_name, ns_filter in conf.node_sets.items():
+ graph._node_sets[ns_name] = NodeSet(ns_filter, graph)
+
+ return graph
diff --git a/bmtk-vb/bmtk/simulator/core/io_tools.py b/bmtk-vb/bmtk/simulator/core/io_tools.py
new file mode 100644
index 0000000..8d80c9f
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/core/io_tools.py
@@ -0,0 +1,117 @@
+import os
+import sys
+import shutil
+import logging
+
+from mpi4py import MPI
+
+comm = MPI.COMM_WORLD
+rank = comm.Get_rank()
+size = comm.Get_size()
+
+class IOUtils(object):
+ """
+ For logging/mkdir commands we sometimes need to use different MPI classes depending on the simulator being used
+ (NEST and NEURON have their own barrier functions that don't work well with mpi). We also need to be able to
+ adjust the logging levels/format at run-time depending on the simulator/configuration options.
+
+ Thus the bulk of the io and logging functions are put into their own class and can be overwritten by specific
+ simulator modules
+ """
+ def __init__(self):
+ self.mpi_rank = rank
+ self.mpi_size = size
+
+ self._log_format = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s")
+ self._log_level = logging.DEBUG
+ self._log_to_console = True
+ self._logger = None
+
+ @property
+ def log_to_console(self):
+ return self._log_to_console
+
+ @log_to_console.setter
+ def log_to_console(self, flag):
+ assert(isinstance(flag, bool))
+ self._log_to_console = flag
+
+ @property
+ def logger(self):
+ if self._logger is None:
+ # Create the logger the first time it is accessed
+ self._logger = logging.getLogger(self.__class__.__name__)
+ self._logger.setLevel(self._log_level)
+ self._set_console_logging()
+
+ return self._logger
+
+ def _set_console_logging(self):
+ if not self._log_to_console:
+ return
+
+ console_handler = logging.StreamHandler(sys.stdout)
+ console_handler.setFormatter(self._log_format)
+ self._logger.addHandler(console_handler)
+
+ def set_log_format(self, format_str):
+ self._log_format = logging.Formatter(format_str)
+
+ def set_log_level(self, loglevel):
+ if isinstance(loglevel, int):
+ self._log_level = loglevel
+
+ elif isinstance(loglevel, (str, unicode)):
+ self._log_level = logging.getLevelName(loglevel)
+
+ else:
+ raise Exception('Error: cannot set logging levels to {}'.format(loglevel))
+
+ def barrier(self):
+ pass
+
+ def quiet_simulator(self):
+ pass
+
+ def setup_output_dir(self, output_dir, log_file, overwrite=True):
+ if self.mpi_rank == 0:
+ print("mpi rank 0")
+ # Create output directory
+ if os.path.exists(output_dir):
+ if overwrite:
+ shutil.rmtree(output_dir)
+ else:
+ self.log_exception('Directory already exists (remove or set to overwrite).')
+ os.makedirs(output_dir)
+
+ # Create log file
+ if log_file is not None:
+ log_path = log_file if os.path.isabs(log_file) else os.path.join(output_dir, log_file)
+ file_logger = logging.FileHandler(log_path)
+ file_logger.setFormatter(self._log_format)
+ self.logger.addHandler(file_logger)
+ self.log_info('Created log file')
+
+ self.barrier()
+
+ def log_info(self, message, all_ranks=False):
+ if all_ranks is False and self.mpi_rank != 0:
+ return
+
+ self.logger.info(message)
+
+ def log_warning(self, message, all_ranks=False):
+ if all_ranks is False and self.mpi_rank != 0:
+ return
+
+ self.logger.warning(message)
+
+ def log_exception(self, message):
+ if self.mpi_rank == 0:
+ self.logger.error(message)
+
+ self.barrier()
+ raise Exception(message)
+
+
+io = IOUtils()
diff --git a/bmtk-vb/bmtk/simulator/core/io_tools.pyc b/bmtk-vb/bmtk/simulator/core/io_tools.pyc
new file mode 100644
index 0000000..1a353f9
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/io_tools.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/network_reader.py b/bmtk-vb/bmtk/simulator/core/network_reader.py
new file mode 100644
index 0000000..8089e32
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/core/network_reader.py
@@ -0,0 +1,73 @@
+
+
+class NodesReader(object):
+ def __init__(self):
+ self._has_internal_nodes = False
+ self._has_virtual_nodes = False
+
+ @property
+ def name(self):
+ raise NotImplementedError()
+
+ @property
+ def internal_nodes_only(self):
+ return self._has_internal_nodes and not self._has_virtual_nodes
+
+ @property
+ def virtual_nodes_only(self):
+ return self._has_virtual_nodes and not self._has_internal_nodes
+
+ @property
+ def mixed_nodes(self):
+ return self._has_internal_nodes and self._has_virtual_nodes
+
+ def initialize(self, network):
+ raise NotImplementedError()
+
+ @classmethod
+ def load(cls, **properties):
+ raise NotImplementedError()
+
+
+class EdgesReader(object):
+ unknown = 0
+ recurrent = 0
+ virtual = 1
+ mixed = 2
+
+ def __init__(self):
+ self._connection_type = -1
+
+ @property
+ def recurrent_connections(self):
+ return self._connection_type == self.recurrent
+
+ @property
+ def virtual_connections(self):
+ return self._connection_type == self.virtual
+
+ @property
+ def mixed_connections(self):
+ return self._connection_type == self.mixed
+
+ @property
+ def source_nodes(self):
+ raise NotImplementedError()
+
+ @property
+ def target_nodes(self):
+ raise NotImplementedError()
+
+ def set_connection_type(self, src_pop, trg_pop):
+ if src_pop.internal_nodes_only and trg_pop.internal_nodes_only:
+ self._connection_type = self.recurrent
+
+ elif src_pop.virtual_nodes_only and trg_pop.internal_nodes_only:
+ self._connection_type = self.virtual
+
+ else:
+ self._connection_type = self.mixed
+
+ def initialize(self, network):
+ raise NotImplementedError()
+
diff --git a/bmtk-vb/bmtk/simulator/core/network_reader.pyc b/bmtk-vb/bmtk/simulator/core/network_reader.pyc
new file mode 100644
index 0000000..39e1b08
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/network_reader.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/node_population.py b/bmtk-vb/bmtk/simulator/core/node_population.py
new file mode 100644
index 0000000..353bd7a
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/core/node_population.py
@@ -0,0 +1,37 @@
+class SimNode(object):
+ @property
+ def node_id(self):
+ raise NotImplementedError()
+
+ @property
+ def gid(self):
+ raise NotImplementedError()
+
+
+class NodePopulation(object):
+ def __init__(self):
+ self._has_internal_nodes = False
+ self._has_virtual_nodes = False
+
+ @property
+ def name(self):
+ raise NotImplementedError()
+
+ @property
+ def internal_nodes_only(self):
+ return self._has_internal_nodes and not self._has_virtual_nodes
+
+ @property
+ def virtual_nodes_only(self):
+ return self._has_virtual_nodes and not self._has_internal_nodes
+
+ @property
+ def mixed_nodes(self):
+ return self._has_internal_nodes and self._has_virtual_nodes
+
+ def initialize(self, network):
+ raise NotImplementedError()
+
+ @classmethod
+ def load(cls, **properties):
+ raise NotImplementedError()
diff --git a/bmtk-vb/bmtk/simulator/core/node_sets.py b/bmtk-vb/bmtk/simulator/core/node_sets.py
new file mode 100644
index 0000000..5a67f95
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/core/node_sets.py
@@ -0,0 +1,57 @@
+from .io_tools import io
+
+
+class NodeSet(object):
+ def __init__(self, filter_params, network):
+ self._network = network
+ self._populations = []
+ self._preselected_gids = None
+
+ if isinstance(filter_params, list):
+ self._preselected_gids = filter_params
+ elif isinstance(filter_params, dict):
+ self._filter = filter_params.copy()
+ self._populations = self._find_populations()
+ else:
+ io.log_exception('Unknown node set params type {}'.format(type(filter_params)))
+
+ def _find_populations(self):
+ for k in ['population', 'populations']:
+ if k in self._filter:
+ node_pops = []
+ for pop_name in to_list(self._filter[k]):
+ node_pops.append(self._network.get_node_population(pop_name))
+ del self._filter[k]
+ return node_pops
+
+ return self._network.get_node_populations()
+
+ def populations(self):
+ return self._populations
+
+ def population_names(self):
+ return [p.name for p in self._populations]
+
+ def gids(self):
+ if self._preselected_gids is not None:
+ for gid in self._preselected_gids:
+ yield gid
+ else:
+ for pop in self._populations:
+ for node in pop.filter(self._filter):
+ yield node.node_id
+
+ def nodes(self):
+ return None
+
+
+class NodeSetAll(NodeSet):
+ def __init__(self, network):
+ super(NodeSetAll, self).__init__({}, network)
+
+
+def to_list(val):
+ if isinstance(val, list):
+ return val
+ else:
+ return [val]
diff --git a/bmtk-vb/bmtk/simulator/core/node_sets.pyc b/bmtk-vb/bmtk/simulator/core/node_sets.pyc
new file mode 100644
index 0000000..5618942
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/node_sets.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/simulator.py b/bmtk-vb/bmtk/simulator/core/simulator.py
new file mode 100644
index 0000000..4a84174
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/core/simulator.py
@@ -0,0 +1,9 @@
+class Simulator(object):
+ def __init__(self):
+ self._sim_mods = []
+
+ def add_mod(self, module):
+ self._sim_mods.append(module)
+
+ def run(self):
+ raise NotImplementedError()
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/core/simulator.pyc b/bmtk-vb/bmtk/simulator/core/simulator.pyc
new file mode 100644
index 0000000..8844715
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/simulator.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/simulator_network.py b/bmtk-vb/bmtk/simulator/core/simulator_network.py
new file mode 100644
index 0000000..e1da1b3
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/core/simulator_network.py
@@ -0,0 +1,200 @@
+from six import string_types
+
+from bmtk.simulator.core.io_tools import io
+#from bmtk.simulator.core.config import ConfigDict
+from bmtk.simulator.utils.config import ConfigDict
+from bmtk.simulator.core.node_sets import NodeSet, NodeSetAll
+from bmtk.simulator.core import sonata_reader
+
+
+class SimNetwork(object):
+ def __init__(self):
+ self._components = {}
+ self._io = io
+
+ self._node_adaptors = {}
+ self._edge_adaptors = {}
+ self._register_adaptors()
+
+ self._node_populations = {}
+ self._node_sets = {}
+
+ self._edge_populations = []
+
+ @property
+ def io(self):
+ return self._io
+
+ @property
+ def node_populations(self):
+ return self._node_populations.values()
+
+ @property
+ def recurrent_edges(self):
+ return [ep for ep in self._edge_populations if ep.recurrent_connections]
+
+ @property
+ def py_function_caches(self):
+ return None
+
+ def _register_adaptors(self):
+ self._node_adaptors['sonata'] = sonata_reader.NodeAdaptor
+ self._edge_adaptors['sonata'] = sonata_reader.EdgeAdaptor
+
+ def get_node_adaptor(self, name):
+ return self._node_adaptors[name]
+
+ def get_edge_adaptor(self, name):
+ return self._edge_adaptors[name]
+
+ def add_component(self, name, path):
+ self._components[name] = path
+
+ def get_component(self, name):
+ if name not in self._components:
+ self.io.log_exception('No network component set with name {}'.format(name))
+ else:
+ return self._components[name]
+
+ def has_component(self, name):
+ return name in self._components
+
+ def get_node_population(self, name):
+ return self._node_populations[name]
+
+ def get_node_populations(self):
+ return self._node_populations.values()
+
+ def add_node_set(self, name, node_set):
+ self._node_sets[name] = node_set
+
+ def get_node_set(self, node_set):
+ if isinstance(node_set, string_types) and node_set in self._node_sets:
+ return self._node_sets[node_set]
+
+ elif isinstance(node_set, (dict, list)):
+ return NodeSet(node_set, self)
+
+ else:
+ self.io.log_exception('Unable to load or find node_set "{}"'.format(node_set))
+
+ def add_nodes(self, node_population):
+ pop_name = node_population.name
+ if pop_name in self._node_populations:
+ # Make sure their aren't any collisions
+ self.io.log_exception('There are multiple node populations with name {}.'.format(pop_name))
+
+ node_population.initialize(self)
+ self._node_populations[pop_name] = node_population
+ if node_population.mixed_nodes:
+ # We'll allow a population to have virtual and non-virtual nodes but it is not ideal
+ self.io.log_warning(('Node population {} contains both virtual and non-virtual nodes which can cause ' +
+ 'memory and build-time inefficency. Consider separating virtual nodes into their ' +
+ 'own population').format(pop_name))
+
+ # Used in inputs/reports when needed to get all gids belonging to a node population
+ self._node_sets[pop_name] = NodeSet({'population': pop_name}, self)
+
+ def add_edges(self, edge_population):
+ edge_population.initialize(self)
+ pop_name = edge_population.name
+
+ # Check that source_population exists
+ src_pop_name = edge_population.source_nodes
+ if src_pop_name not in self._node_populations:
+ self.io.log_exception('Source node population {} not found. Please update {} edges'.format(src_pop_name,
+ pop_name))
+
+ # Check that the target population exists and contains non-virtual nodes (we cannot synapse onto virt nodes)
+ trg_pop_name = edge_population.target_nodes
+ if trg_pop_name not in self._node_populations or self._node_populations[trg_pop_name].virtual_nodes_only:
+ self.io.log_exception(('Node population {} does not exists (or consists of only virtual nodes). ' +
+ '{} edges cannot create connections.').format(trg_pop_name, pop_name))
+
+ edge_population.set_connection_type(src_pop=self._node_populations[src_pop_name],
+ trg_pop = self._node_populations[trg_pop_name])
+ self._edge_populations.append(edge_population)
+
+ def build(self):
+ self.build_nodes()
+ self.build_recurrent_edges()
+
+ def build_nodes(self):
+ raise NotImplementedError()
+
+ def build_recurrent_edges(self):
+ raise NotImplementedError()
+
+ def build_virtual_connections(self):
+ raise NotImplementedError()
+
+ @classmethod
+ def from_config(cls, conf, **properties):
+ """Generates a graph structure from a json config file or dictionary.
+
+ :param conf: name of json config file, or a dictionary with config parameters
+ :param properties: optional properties.
+ :return: A graph object of type cls
+ """
+ network = cls(**properties)
+
+ # The simulation run script should create a config-dict since it's likely to vary based on the simulator engine,
+ # however in the case the user doesn't we will try a generic conversion from dict/json to ConfigDict
+ if isinstance(conf, ConfigDict):
+ config = conf
+ else:
+ try:
+ config = ConfigDict.load(conf)
+ except Exception as e:
+ network.io.log_exception('Could not convert {} (type "{}") to json.'.format(conf, type(conf)))
+
+ if not config.with_networks:
+ network.io.log_exception('Could not find any network files. Unable to build network.')
+
+ # TODO: These are simulator specific
+ network.spike_threshold = config.spike_threshold
+ network.dL = config.dL
+
+ # load components
+ for name, value in config.components.items():
+ network.add_component(name, value)
+
+ # load nodes
+ gid_map = config.gid_mappings
+ node_adaptor = network.get_node_adaptor('sonata')
+ for node_dict in config.nodes:
+ nodes = sonata_reader.load_nodes(node_dict['nodes_file'], node_dict['node_types_file'], gid_map,
+ adaptor=node_adaptor)
+ for node_pop in nodes:
+ network.add_nodes(node_pop)
+
+ # TODO: Raise a warning if more than one internal population and no gids (node_id collision)
+
+ # load edges
+ edge_adaptor = network.get_edge_adaptor('sonata')
+ for edge_dict in config.edges:
+ if not edge_dict.get('enabled', True):
+ continue
+
+ edges = sonata_reader.load_edges(edge_dict['edges_file'], edge_dict['edge_types_file'],
+ adaptor=edge_adaptor)
+ for edge_pop in edges:
+ network.add_edges(edge_pop)
+
+ # Add nodeset section
+ network.add_node_set('all', NodeSetAll(network))
+ for ns_name, ns_filter in config.node_sets.items():
+ network.add_node_set(ns_name, NodeSet(ns_filter, network))
+
+ return network
+
+ @classmethod
+ def from_manifest(cls, manifest_json):
+ # TODO: Add adaptors to build a simulation network from model files downloaded celltypes.brain-map.org
+ raise NotImplementedError()
+
+ @classmethod
+ def from_builder(cls, network):
+ # TODO: Add adaptors to build a simulation network from a bmtk.builder Network object
+ raise NotImplementedError()
+
diff --git a/bmtk-vb/bmtk/simulator/core/simulator_network.pyc b/bmtk-vb/bmtk/simulator/core/simulator_network.pyc
new file mode 100644
index 0000000..a3aa722
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/simulator_network.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/sonata_reader/__init__.py b/bmtk-vb/bmtk/simulator/core/sonata_reader/__init__.py
new file mode 100644
index 0000000..9b09281
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/core/sonata_reader/__init__.py
@@ -0,0 +1,3 @@
+from .node_adaptor import NodeAdaptor, SonataBaseNode
+from .edge_adaptor import EdgeAdaptor, SonataBaseEdge
+from .network_reader import load_nodes, load_edges
diff --git a/bmtk-vb/bmtk/simulator/core/sonata_reader/__init__.pyc b/bmtk-vb/bmtk/simulator/core/sonata_reader/__init__.pyc
new file mode 100644
index 0000000..c1bb935
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/sonata_reader/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/sonata_reader/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/simulator/core/sonata_reader/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..5c4c1f9
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/sonata_reader/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/sonata_reader/__pycache__/edge_adaptor.cpython-37.pyc b/bmtk-vb/bmtk/simulator/core/sonata_reader/__pycache__/edge_adaptor.cpython-37.pyc
new file mode 100644
index 0000000..4d18dc2
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/sonata_reader/__pycache__/edge_adaptor.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/sonata_reader/__pycache__/network_reader.cpython-37.pyc b/bmtk-vb/bmtk/simulator/core/sonata_reader/__pycache__/network_reader.cpython-37.pyc
new file mode 100644
index 0000000..673b99b
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/sonata_reader/__pycache__/network_reader.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/sonata_reader/__pycache__/node_adaptor.cpython-37.pyc b/bmtk-vb/bmtk/simulator/core/sonata_reader/__pycache__/node_adaptor.cpython-37.pyc
new file mode 100644
index 0000000..fabdabb
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/sonata_reader/__pycache__/node_adaptor.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/sonata_reader/edge_adaptor.py b/bmtk-vb/bmtk/simulator/core/sonata_reader/edge_adaptor.py
new file mode 100644
index 0000000..7642808
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/core/sonata_reader/edge_adaptor.py
@@ -0,0 +1,208 @@
+import os
+import ast
+import json
+import types
+
+import numpy as np
+
+
+class SonataBaseEdge(object):
+ def __init__(self, sonata_edge, edge_adaptor):
+ self._edge = sonata_edge
+ self._prop_adaptor = edge_adaptor
+
+ @property
+ def source_node_id(self):
+ return self._edge.source_node_id
+
+ @property
+ def target_node_id(self):
+ return self._edge.target_node_id
+
+ @property
+ def dynamics_params(self):
+ return self._prop_adaptor.dynamics_params(self._edge)
+
+ @property
+ def delay(self):
+ return self._edge['delay']
+
+ @property
+ def weight_function(self):
+ return self._prop_adaptor.weight_function(self._edge)
+
+ @property
+ def preselected_targets(self):
+ return self._prop_adaptor.preselected_targets
+
+ @property
+ def target_sections(self):
+ return self._edge['target_sections']
+
+ @property
+ def target_distance(self):
+ return self._edge['distance_range']
+
+ @property
+ def edge_type_id(self):
+ return self._edge.edge_type_id
+
+ @property
+ def nsyns(self):
+ return self._prop_adaptor.nsyns(self._edge)
+
+ @property
+ def model_template(self):
+ return self._edge['model_template']
+
+ def syn_weight(self, src_node, trg_node):
+ return self._prop_adaptor.syn_weight(self, src_node=src_node, trg_node=trg_node)
+
+ def __getitem__(self, item):
+ return self._edge[item]
+
+ def __contains__(self, prop_key):
+ return self._edge.__contains__(prop_key)
+
+class EdgeAdaptor(object):
+ def __init__(self, network):
+ self._network = network
+ self._func_caches = self._network.py_function_caches
+
+ @property
+ def batch_process(self):
+ return False
+
+ @batch_process.setter
+ def batch_process(self, flag):
+ pass
+
+ def get_edge(self, sonata_node):
+ return SonataBaseEdge(sonata_node, self)
+
+ @staticmethod
+ def preprocess_edge_types(network, edge_population):
+ edge_types_table = edge_population.types_table
+ edge_type_ids = np.unique(edge_population.type_ids)
+
+ for et_id in edge_type_ids:
+ edge_type = edge_types_table[et_id]
+ if 'dynamics_params' in edge_types_table.columns:
+ dynamics_params = edge_type['dynamics_params']
+ params_dir = network.get_component('synaptic_models_dir')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ edge_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ network.io.log_exception('Could not find edge dynamics_params file {}.'.format(params_path))
+
+ # Split target_sections
+ if 'target_sections' in edge_type:
+ trg_sec = edge_type['target_sections']
+ if trg_sec is not None:
+ try:
+ edge_type['target_sections'] = ast.literal_eval(trg_sec)
+ except Exception as exc:
+ network.io.log_warning('Unable to split target_sections list {}'.format(trg_sec))
+ edge_type['target_sections'] = None
+
+ # Split target distances
+ if 'distance_range' in edge_type:
+ dist_range = edge_type['distance_range']
+ if dist_range is not None:
+ try:
+ # TODO: Make the distance range has at most two values
+ edge_type['distance_range'] = json.loads(dist_range)
+ except Exception as e:
+ try:
+ edge_type['distance_range'] = [0.0, float(dist_range)]
+ except Exception as e:
+ network.io.log_warning('Unable to parse distance_range {}'.format(dist_range))
+ edge_type['distance_range'] = None
+
+ @classmethod
+ def create_adaptor(cls, edge_group, network):
+ prop_map = cls(network)
+ return cls.patch_adaptor(prop_map, edge_group)
+
+ @staticmethod
+ def patch_adaptor(adaptor, edge_group):
+ # dynamics_params
+ if edge_group.has_dynamics_params:
+ adaptor.dynamics_params = types.MethodType(group_dynamics_params, adaptor)
+ else: # 'dynamics_params' in node_group.all_columns:
+ adaptor.dynamics_params = types.MethodType(types_dynamics_params, adaptor)
+
+ # For fetching/calculating synaptic weights
+ if 'weight_function' in edge_group.all_columns:
+ # Customized function for user to calculate the synaptic weight
+ adaptor.weight_function = types.MethodType(weight_function, adaptor)
+ adaptor.syn_weight = types.MethodType(syn_weight_function, adaptor)
+ elif 'syn_weight' in edge_group.all_columns:
+ # Just return the synaptic weight
+ adaptor.weight_function = types.MethodType(ret_none_function, adaptor)
+ adaptor.syn_weight = types.MethodType(syn_weight, adaptor)
+ else:
+ raise Exception('Could not find syn_weight or weight_function properties. Cannot create connections.')
+
+ # For determining the synapse placement
+ if 'sec_id' in edge_group.all_columns:
+ adaptor.preselected_targets = True
+ adaptor.nsyns = types.MethodType(no_nsyns, adaptor)
+ elif 'nsyns' in edge_group.all_columns:
+ adaptor.preselected_targets = False
+ adaptor.nsyns = types.MethodType(nsyns, adaptor)
+ else:
+ # It will get here for connections onto point neurons
+ adaptor.preselected_targets = True
+ adaptor.nsyns = types.MethodType(no_nsyns, adaptor)
+
+ return adaptor
+
+
+def ret_none_function(self, edge):
+ return None
+
+
+def weight_function(self, edge):
+ return edge['weight_function']
+
+
+def syn_weight(self, edge, src_node, trg_node):
+ return edge['syn_weight']
+
+
+def syn_weight_function(self, edge, src_node, trg_node):
+ weight_fnc_name = edge.weight_function
+ if weight_fnc_name is None:
+ weight_fnc = self._func_caches.py_modules.synaptic_weight('default_weight_fnc')
+ return weight_fnc(edge, src_node, trg_node)
+
+ elif self._func_caches.py_modules.has_synaptic_weight(weight_fnc_name):
+ weight_fnc = self._func_caches.py_modules.synaptic_weight(weight_fnc_name)
+ return weight_fnc(edge, src_node, trg_node)
+
+ else:
+ self._network.io.log_exception('weight_function {} is not defined.'.format(weight_fnc_name))
+
+
+def nsyns(self, edge):
+ return edge['nsyns']
+
+
+def no_nsyns(self, edge):
+ return 1
+
+
+def types_dynamics_params(self, node):
+ return node['dynamics_params']
+
+
+def group_dynamics_params(self, node):
+ return node.dynamics_params
diff --git a/bmtk-vb/bmtk/simulator/core/sonata_reader/edge_adaptor.pyc b/bmtk-vb/bmtk/simulator/core/sonata_reader/edge_adaptor.pyc
new file mode 100644
index 0000000..107e4c6
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/sonata_reader/edge_adaptor.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/sonata_reader/network_reader.py b/bmtk-vb/bmtk/simulator/core/sonata_reader/network_reader.py
new file mode 100644
index 0000000..92effcb
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/core/sonata_reader/network_reader.py
@@ -0,0 +1,244 @@
+import os
+import numpy as np
+import json
+import ast
+
+from bmtk.simulator.core.network_reader import NodesReader, EdgesReader
+from bmtk.simulator.core.sonata_reader.node_adaptor import NodeAdaptor
+from bmtk.simulator.core.sonata_reader.edge_adaptor import EdgeAdaptor
+from bmtk.utils import sonata
+
+
+def load_nodes(nodes_h5, node_types_csv, gid_table=None, selected_nodes=None, adaptor=NodeAdaptor):
+ return SonataNodes.load(nodes_h5, node_types_csv, gid_table, selected_nodes, adaptor)
+
+
+def load_edges(edges_h5, edge_types_csv, selected_populations=None, adaptor=EdgeAdaptor):
+ return SonataEdges.load(edges_h5, edge_types_csv, selected_populations, adaptor)
+
+
+class SonataNodes(NodesReader):
+ def __init__(self, sonata_node_population, prop_adaptor):
+ super(SonataNodes, self).__init__()
+ self._node_pop = sonata_node_population
+ self._pop_name = self._node_pop.name
+ self._prop_adaptors = {}
+ self._adaptor = prop_adaptor
+
+ @property
+ def name(self):
+ return self._pop_name
+
+ @property
+ def adaptor(self):
+ return self._adaptor
+
+ def n_nodes(self):
+ return len(self._node_pop)
+
+ def initialize(self, network):
+ # Determine the various mode-types available in the Node Population, whether or not a population of nodes
+ # contains virtual/external nodes, internal nodes, or a mix of both affects how to nodes are built
+ model_types = set()
+ for grp in self._node_pop.groups:
+ if self._adaptor.COL_MODEL_TYPE not in grp.all_columns:
+ network.io.log_exception('property {} is missing from nodes.'.format(self._adaptor.COL_MODEL_TYPE))
+
+ model_types.update(set(np.unique(grp.get_values(self._adaptor.COL_MODEL_TYPE))))
+
+ if 'virtual' in model_types:
+ self._has_virtual_nodes = True
+ model_types -= set(['virtual'])
+ else:
+ self._has_virtual_nodes = False
+
+ if model_types:
+ self._has_internal_nodes = True
+
+ self._adaptor.preprocess_node_types(network, self._node_pop)
+ #self._preprocess_node_types(network)
+ self._prop_adaptors = {grp.group_id: self._create_adaptor(grp, network) for grp in self._node_pop.groups}
+
+ def _create_adaptor(self, grp, network):
+ return self._adaptor.create_adaptor(grp, network)
+
+ '''
+ def _preprocess_node_types(self, network):
+ # TODO: The following figures out the actually used node-type-ids. For mem and speed may be better to just
+ # process them all
+ node_type_ids = self._node_pop.type_ids
+ # TODO: Verify all the node_type_ids are in the table
+ node_types_table = self._node_pop.types_table
+
+ # TODO: Convert model_type to a enum
+ if network.has_component('morphologies_dir'):
+ morph_dir = network.get_component('morphologies_dir')
+ if morph_dir is not None and 'morphology_file' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ if node_type['morphology_file'] is None:
+ continue
+ # TODO: Check the file exits
+ # TODO: See if absolute path is stored in csv
+ node_type['morphology_file'] = os.path.join(morph_dir, node_type['morphology_file'])
+
+ if 'dynamics_params' in node_types_table.columns and 'model_type' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ dynamics_params = node_type['dynamics_params']
+ if isinstance(dynamics_params, dict):
+ continue
+
+ model_type = node_type['model_type']
+ if model_type == 'biophysical':
+ params_dir = network.get_component('biophysical_neuron_models_dir')
+ elif model_type == 'point_process':
+ params_dir = network.get_component('point_neuron_models_dir')
+ elif model_type == 'point_soma':
+ params_dir = network.get_component('point_neuron_models_dir')
+ else:
+ # Not sure what to do in this case, throw Exception?
+ params_dir = network.get_component('custom_neuron_models')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ node_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ network.io.log_exception('Could not find node dynamics_params file {}.'.format(params_path))
+
+ # TODO: Use adaptor to validate model_type and model_template values
+ '''
+
+ @classmethod
+ def load(cls, nodes_h5, node_types_csv, gid_table=None, selected_nodes=None, adaptor=NodeAdaptor):
+ sonata_file = sonata.File(data_files=nodes_h5, data_type_files=node_types_csv, gid_table=gid_table)
+ node_populations = []
+ for node_pop in sonata_file.nodes.populations:
+ node_populations.append(cls(node_pop, adaptor))
+
+ return node_populations
+
+ def get_node(self, node_id):
+ return self._node_pop.get_node_id(node_id)
+
+ def __getitem__(self, item):
+ for base_node in self._node_pop[item]:
+ snode = self._prop_adaptors[base_node.group_id].get_node(base_node)
+ yield snode
+
+ def __iter__(self):
+ return self
+
+ def filter(self, filter_conditons):
+ for node in self._node_pop.filter(**filter_conditons):
+ yield node
+
+ def get_nodes(self):
+ for node_group in self._node_pop.groups:
+ node_adaptor = self._prop_adaptors[node_group.group_id]
+ if node_adaptor.batch_process:
+ for batch in node_adaptor.get_batches(node_group):
+ yield batch
+ else:
+ for node in node_group:
+ yield node_adaptor.get_node(node)
+
+
+class SonataEdges(EdgesReader):
+ def __init__(self, edge_population, adaptor):
+ self._edge_pop = edge_population
+ self._adaptor_cls = adaptor
+ self._edge_adaptors = {}
+
+ @property
+ def name(self):
+ return self._edge_pop.name
+
+ @property
+ def source_nodes(self):
+ return self._edge_pop.source_population
+
+ @property
+ def target_nodes(self):
+ return self._edge_pop.target_population
+
+ def initialize(self, network):
+ self._adaptor_cls.preprocess_edge_types(network, self._edge_pop)
+ # self._preprocess_edge_types(network)
+ self._edge_adaptors = {grp.group_id: self._adaptor_cls.create_adaptor(grp, network)
+ for grp in self._edge_pop.groups}
+
+ def get_target(self, node_id):
+ for edge in self._edge_pop.get_target(node_id):
+ yield self._edge_adaptors[edge.group_id].get_edge(edge)
+
+ def get_edges(self):
+ for edge_group in self._edge_pop.groups:
+ edge_adaptor = self._edge_adaptors[edge_group.group_id]
+ if edge_adaptor.batch_process:
+ for edge in edge_adaptor.get_batches(edge_group):
+ yield edge
+ else:
+ for edge in self._edge_pop:
+ yield edge_adaptor.get_edge(edge)
+
+ '''
+ def _preprocess_edge_types(self, network):
+ edge_types_table = self._edge_pop.types_table
+ edge_type_ids = np.unique(self._edge_pop.type_ids)
+
+ for et_id in edge_type_ids:
+ edge_type = edge_types_table[et_id]
+ if 'dynamics_params' in edge_types_table.columns:
+ dynamics_params = edge_type['dynamics_params']
+ params_dir = network.get_component('synaptic_models_dir')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ edge_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ self.io.log_exception('Could not find edge dynamics_params file {}.'.format(params_path))
+
+ # Split target_sections
+ if 'target_sections' in edge_type:
+ trg_sec = edge_type['target_sections']
+ if trg_sec is not None:
+ try:
+ edge_type['target_sections'] = ast.literal_eval(trg_sec)
+ except Exception as exc:
+ self.io.log_warning('Unable to split target_sections list {}'.format(trg_sec))
+ edge_type['target_sections'] = None
+
+ # Split target distances
+ if 'distance_range' in edge_type:
+ dist_range = edge_type['distance_range']
+ if dist_range is not None:
+ try:
+ # TODO: Make the distance range has at most two values
+ edge_type['distance_range'] = json.loads(dist_range)
+ except Exception as e:
+ try:
+ edge_type['distance_range'] = [0.0, float(dist_range)]
+ except Exception as e:
+ self.io.log_warning('Unable to parse distance_range {}'.format(dist_range))
+ edge_type['distance_range'] = None
+ '''
+
+ @classmethod
+ def load(cls, edges_h5, edge_types_csv, selected_populations=None, adaptor=EdgeAdaptor):
+ sonata_file = sonata.File(data_files=edges_h5, data_type_files=edge_types_csv)
+ edge_populations = []
+ for edge_pop in sonata_file.edges.populations:
+ edge_populations.append(cls(edge_pop, adaptor))
+
+ return edge_populations
diff --git a/bmtk-vb/bmtk/simulator/core/sonata_reader/network_reader.pyc b/bmtk-vb/bmtk/simulator/core/sonata_reader/network_reader.pyc
new file mode 100644
index 0000000..722aa7d
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/sonata_reader/network_reader.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/core/sonata_reader/node_adaptor.py b/bmtk-vb/bmtk/simulator/core/sonata_reader/node_adaptor.py
new file mode 100644
index 0000000..f8d980c
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/core/sonata_reader/node_adaptor.py
@@ -0,0 +1,207 @@
+import os
+import json
+import types
+import numpy as np
+
+
+class SonataBaseNode(object):
+ def __init__(self, node, prop_adaptor):
+ self._node = node
+ self._prop_adaptor = prop_adaptor
+
+ @property
+ def node_id(self):
+ return self._prop_adaptor.node_id(self._node)
+
+ @property
+ def gid(self):
+ return self._prop_adaptor.gid(self._node)
+
+ @property
+ def dynamics_params(self):
+ return self._prop_adaptor.dynamics_params(self._node)
+
+ @property
+ def model_type(self):
+ return self._prop_adaptor.model_type(self._node)
+
+ @property
+ def model_template(self):
+ return self._prop_adaptor.model_template(self._node)
+
+ @property
+ def model_processing(self):
+ return self._prop_adaptor.model_processing(self._node)
+
+ @property
+ def network(self):
+ return self._prop_adaptor.network
+
+ @property
+ def population(self):
+ return self._prop_adaptor.network
+
+ def __getitem__(self, prop_key):
+ return self._node[prop_key]
+
+
+class NodeAdaptor(object):
+ COL_MODEL_TYPE = 'model_type'
+ COL_GID = 'gid'
+ COL_DYNAMICS_PARAM = 'dynamics_params'
+ COL_MODEL_TEMPLATE = 'model_template'
+ COL_MODEL_PROCESSING = 'model_processing'
+
+ def __init__(self, network):
+ self._network = network
+ self._model_template_cache = {}
+ self._model_processing_cache = {}
+
+ @property
+ def batch_process(self):
+ return False
+
+ @batch_process.setter
+ def batch_process(self, flag):
+ pass
+
+ def node_id(self, node):
+ return node.node_id
+
+ def model_type(self, node):
+ return node[self.COL_MODEL_TYPE]
+
+ def model_template(self, node):
+ # TODO: If model-template comes from the types table we should split it in _preprocess_types
+ model_template_str = node[self.COL_MODEL_TEMPLATE]
+ if model_template_str is None:
+ return None
+ elif model_template_str in self._model_template_cache:
+ return self._model_template_cache[model_template_str]
+ else:
+ template_parts = model_template_str.split(':')
+ directive, template = template_parts[0], template_parts[1]
+ self._model_template_cache[model_template_str] = (directive, template)
+ return directive, template
+
+ def model_processing(self, node):
+ model_processing_str = node[self.COL_MODEL_PROCESSING]
+ if model_processing_str is None:
+ return []
+ else:
+ # TODO: Split in the node_types_table when possible
+ return model_processing_str.split(',')
+
+ @staticmethod
+ def preprocess_node_types(network, node_population):
+ # TODO: The following figures out the actually used node-type-ids. For mem and speed may be better to just
+ # process them all
+ #node_type_ids = node_population.type_ids
+ node_type_ids = np.unique(node_population.type_ids)
+ # TODO: Verify all the node_type_ids are in the table
+ node_types_table = node_population.types_table
+
+ # TODO: Convert model_type to a enum
+ if network.has_component('morphologies_dir'):
+ morph_dir = network.get_component('morphologies_dir')
+ if morph_dir is not None and 'morphology' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ if node_type['morphology'] is None:
+ continue
+
+ # TODO: See if absolute path is stored in csv
+ swc_path = os.path.join(morph_dir, node_type['morphology'])
+
+ # According to Sonata format, the .swc extension is not needed. Thus we need to add it if req.
+ if not os.path.exists(swc_path) and not swc_path.endswith('.swc'):
+ swc_path += '.swc'
+ if not os.path.exists(swc_path):
+ network.io.log_exception('Could not find node morphology file {}.'.format(swc_path))
+
+ node_type['morphology'] = swc_path
+
+ if 'dynamics_params' in node_types_table.columns and 'model_type' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ dynamics_params = node_type['dynamics_params']
+ if isinstance(dynamics_params, dict):
+ continue
+
+ if dynamics_params is None:
+ continue
+
+ model_type = node_type['model_type']
+ if model_type == 'biophysical':
+ params_dir = network.get_component('biophysical_neuron_models_dir')
+ elif model_type == 'point_process':
+ params_dir = network.get_component('point_neuron_models_dir')
+ elif model_type == 'point_soma':
+ params_dir = network.get_component('point_neuron_models_dir')
+ elif model_type == 'population':
+ params_dir = network.get_component('population_models_dir')
+ else:
+ # Not sure what to do in this case, throw Exception?
+ params_dir = network.get_component('custom_neuron_models')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ node_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ network.io.log_exception('Could not find node dynamics_params file {}.'.format(params_path))
+
+ # TODO: Use adaptor to validate model_type and model_template values
+
+ @classmethod
+ def create_adaptor(cls, node_group, network):
+ prop_map = cls(network)
+ return cls.patch_adaptor(prop_map, node_group, network)
+
+ @classmethod
+ def patch_adaptor(cls, adaptor, node_group, network):
+ adaptor.network = network
+
+ # Use node_id if the user hasn't specified a gid table
+ if not node_group.has_gids:
+ adaptor.gid = types.MethodType(NodeAdaptor.node_id, adaptor)
+
+ # dynamics_params
+ if node_group.has_dynamics_params:
+ adaptor.dynamics_params = types.MethodType(group_dynamics_params, adaptor)
+ elif 'dynamics_params' in node_group.all_columns:
+ adaptor.dynamics_params = types.MethodType(types_dynamics_params, adaptor)
+ else:
+ adaptor.dynamics_params = types.MethodType(none_function, adaptor)
+
+ if 'model_template' not in node_group.all_columns:
+ adaptor.model_template = types.MethodType(none_function, adaptor)
+
+ if 'model_processing' not in node_group.all_columns:
+ adaptor.model_processing = types.MethodType(empty_list, adaptor)
+
+ return adaptor
+
+ def get_node(self, sonata_node):
+ return SonataBaseNode(sonata_node, self)
+
+
+def none_function(self, node):
+ return None
+
+
+def empty_list(self, node):
+ return []
+
+
+def types_dynamics_params(self, node):
+ return node['dynamics_params']
+
+
+def group_dynamics_params(self, node):
+ return node.dynamics_params
+
diff --git a/bmtk-vb/bmtk/simulator/core/sonata_reader/node_adaptor.pyc b/bmtk-vb/bmtk/simulator/core/sonata_reader/node_adaptor.pyc
new file mode 100644
index 0000000..1c82499
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/core/sonata_reader/node_adaptor.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/filternet/__init__.py b/bmtk-vb/bmtk/simulator/filternet/__init__.py
new file mode 100644
index 0000000..9e6712b
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/__init__.py
@@ -0,0 +1,5 @@
+from bmtk.simulator.filternet.filternetwork import FilterNetwork
+from bmtk.simulator.filternet.filtersimulator import FilterSimulator
+from bmtk.simulator.filternet.config import Config
+
+import bmtk.simulator.filternet.default_setters
diff --git a/bmtk-vb/bmtk/simulator/filternet/cell.py b/bmtk-vb/bmtk/simulator/filternet/cell.py
new file mode 100644
index 0000000..240cab5
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/cell.py
@@ -0,0 +1,28 @@
+from bmtk.simulator.filternet.pyfunction_cache import py_modules
+
+
+class Cell(object):
+ def __init__(self, node):
+ self._node = node
+ self._gid = node.gid
+ self._node_id = node.node_id
+ self._lgn_cell_obj = None
+
+ @property
+ def gid(self):
+ return self._gid
+
+ @property
+ def lgn_cell_obj(self):
+ return self._lgn_cell_obj
+
+ def build(self):
+ cell_loaders = self._node.model_processing
+ if len(cell_loaders) > 0:
+ raise Exception('Cannot use more than one model_processing method per cell. Exiting.')
+ elif len(cell_loaders) == 1:
+ model_processing_fnc = py_modules.cell_processor(cell_loaders[0])
+ else:
+ model_processing_fnc = py_modules.cell_processor('default')
+
+ self._lgn_cell_obj = model_processing_fnc(self._node)
diff --git a/bmtk-vb/bmtk/simulator/filternet/cell_models.py b/bmtk-vb/bmtk/simulator/filternet/cell_models.py
new file mode 100644
index 0000000..a415e9f
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/cell_models.py
@@ -0,0 +1 @@
+from bmtk.simulator.filternet.lgnmodel.cellmodel import *
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/filternet/config.py b/bmtk-vb/bmtk/simulator/filternet/config.py
new file mode 100644
index 0000000..b10ee10
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/config.py
@@ -0,0 +1,8 @@
+import os
+import json
+
+from bmtk.simulator.core.config import ConfigDict
+
+
+class Config(ConfigDict):
+ pass
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/filternet/default_setters/__init__.py b/bmtk-vb/bmtk/simulator/filternet/default_setters/__init__.py
new file mode 100644
index 0000000..6ec46cc
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/default_setters/__init__.py
@@ -0,0 +1 @@
+from cell_loaders import *
diff --git a/bmtk-vb/bmtk/simulator/filternet/default_setters/cell_loaders.py b/bmtk-vb/bmtk/simulator/filternet/default_setters/cell_loaders.py
new file mode 100644
index 0000000..c0c74ad
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/default_setters/cell_loaders.py
@@ -0,0 +1,9 @@
+from bmtk.simulator.filternet.pyfunction_cache import add_cell_processor
+
+
+def default_cell_loader(node):
+ print(node.model_template)
+ print('DEFAULT')
+ exit()
+
+add_cell_processor(default_cell_loader, 'default', overwrite=False)
diff --git a/bmtk-vb/bmtk/simulator/filternet/filternetwork.py b/bmtk-vb/bmtk/simulator/filternet/filternetwork.py
new file mode 100644
index 0000000..170a9e7
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/filternetwork.py
@@ -0,0 +1,28 @@
+from bmtk.simulator.core.simulator_network import SimNetwork
+from bmtk.simulator.filternet.cell import Cell
+from bmtk.simulator.filternet.pyfunction_cache import py_modules
+
+
+class FilterNetwork(SimNetwork):
+ def __init__(self):
+ super(FilterNetwork, self).__init__()
+
+ self._local_cells = []
+
+ def cells(self):
+ return self._local_cells
+
+ def build(self):
+ self.build_nodes()
+
+ def set_default_processing(self, processing_fnc):
+ py_modules.add_cell_processor('default', processing_fnc)
+
+ def build_nodes(self):
+ for node_pop in self.node_populations:
+ for node in node_pop.get_nodes():
+ cell = Cell(node)
+ cell.build()
+ self._local_cells.append(cell)
+
+
diff --git a/bmtk-vb/bmtk/simulator/filternet/filters.py b/bmtk-vb/bmtk/simulator/filternet/filters.py
new file mode 100644
index 0000000..ae53df5
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/filters.py
@@ -0,0 +1,3 @@
+from bmtk.simulator.filternet.lgnmodel.temporalfilter import *
+from bmtk.simulator.filternet.lgnmodel.spatialfilter import *
+from bmtk.simulator.filternet.lgnmodel.linearfilter import *
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/filternet/filtersimulator.py b/bmtk-vb/bmtk/simulator/filternet/filtersimulator.py
new file mode 100644
index 0000000..7d6742a
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/filtersimulator.py
@@ -0,0 +1,193 @@
+import csv
+
+from bmtk.simulator.core.simulator import Simulator
+import bmtk.simulator.utils.simulation_inputs as inputs
+from bmtk.simulator.filternet.config import Config
+from bmtk.simulator.filternet.lgnmodel.movie import *
+from bmtk.simulator.filternet import modules as mods
+from bmtk.simulator.filternet.io_tools import io
+from six import string_types
+
+
+class FilterSimulator(Simulator):
+ def __init__(self, network, dt, tstop):
+ super(FilterSimulator, self).__init__()
+ self._network = network
+ self._dt = dt
+ self._tstop = tstop/1000.0
+
+ self.rates_csv = None
+ self._movies = []
+
+ def add_movie(self, movie_type, params):
+ # TODO: Move this into its own factory
+ movie_type = movie_type.lower() if isinstance(movie_type, string_types) else 'movie'
+ if movie_type == 'movie' or not movie_type:
+ raise NotImplementedError
+
+ elif movie_type == 'full_field':
+ raise NotImplementedError
+
+ elif movie_type == 'full_field_flash':
+ raise NotImplementedError
+
+ elif movie_type == 'graiting':
+ init_params = FilterSimulator.find_params(['row_size', 'col_size', 'frame_rate'], **params)
+ create_params = FilterSimulator.find_params(['gray_screen_dur', 'cpd', 'temporal_f', 'theta', 'contrast'],
+ **params)
+ gm = GratingMovie(**init_params)
+ graiting_movie = gm.create_movie(t_min=0.0, t_max=self._tstop, **create_params)
+ self._movies.append(graiting_movie)
+
+ else:
+ raise Exception('Unknown movie type {}'.format(movie_type))
+
+ def run(self):
+ for mod in self._sim_mods:
+ mod.initialize(self)
+
+ io.log_info('Evaluating rates.')
+ for cell in self._network.cells():
+ for movie in self._movies:
+ ts, f_rates = cell.lgn_cell_obj.evaluate(movie, downsample=1, separable=True)
+
+ for mod in self._sim_mods:
+ mod.save(self, cell.gid, ts, f_rates)
+
+ """
+ if self.rates_csv is not None:
+ print 'saving {}'.format(cell.gid)
+ for t, f in zip(t, f_tot):
+ csv_writer.writerow([t, f, cell.gid])
+ csv_fhandle.flush()
+ """
+ io.log_info('Done.')
+ for mod in self._sim_mods:
+ mod.finalize(self)
+
+ """
+ def generate_spikes(LGN, trials, duration, output_file_name):
+ # f_tot = np.loadtxt(output_file_name + "_f_tot.csv", delimiter=" ")
+ # t = f_tot[0, :]
+
+ f = h5.File(output_file_name + "_f_tot.h5", 'r')
+ f_tot = np.array(f.get('firing_rates_Hz'))
+
+ t = np.array(f.get('time'))
+ # For h5 files that don't have time explicitly saved
+ t = np.linspace(0, duration, f_tot.shape[1])
+
+
+ #create output file
+ f = nwb.create_blank_file(output_file_name + '_spikes.nwb', force=True)
+
+ for trial in range(trials):
+ for counter in range(len(LGN.nodes())):
+ try:
+ spike_train = np.array(f_rate_to_spike_train(t*1000., f_tot[counter, :], np.random.randint(10000), 1000.*min(t), 1000.*max(t), 0.1))
+ except:
+ spike_train = 1000.*np.array(pg.generate_inhomogenous_poisson(t, f_tot[counter, :], seed=np.random.randint(10000))) #convert to milliseconds and hence the multiplication by 1000
+
+ nwb.SpikeTrain(spike_train, unit='millisecond').add_to_processing(f, 'trial_%s' % trial)
+ f.close()
+
+ """
+
+
+ @staticmethod
+ def find_params(param_names, **kwargs):
+ ret_dict = {}
+ for pn in param_names:
+ if pn in kwargs:
+ ret_dict[pn] = kwargs[pn]
+
+ return ret_dict
+
+ @classmethod
+ def from_config(cls, config, network):
+ if not isinstance(config, Config):
+ try:
+ config = Config.load(config, False)
+ except Exception as e:
+ network.io.log_exception('Could not convert {} (type "{}") to json.'.format(config, type(config)))
+
+ if not config.with_networks:
+ network.io.log_exception('Could not find any network files. Unable to build network.')
+
+ sim = cls(network=network, dt=config.dt, tstop=config.tstop)
+
+ network.io.log_info('Building cells.')
+ network.build_nodes()
+
+ # TODO: Need to create a gid selector
+ for sim_input in inputs.from_config(config):
+ if sim_input.input_type == 'movie':
+ sim.add_movie(sim_input.module, sim_input.params)
+ else:
+ raise Exception('Unable to load input type {}'.format(sim_input.input_type))
+
+
+ """
+ node_set = network.get_node_set(sim_input.node_set)
+ if sim_input.input_type == 'spikes':
+ spikes = spike_trains.SpikesInput.load(name=sim_input.name, module=sim_input.module,
+ input_type=sim_input.input_type, params=sim_input.params)
+ io.log_info('Build virtual cell stimulations for {}'.format(sim_input.name))
+ network.add_spike_trains(spikes, node_set)
+
+ elif sim_input.module == 'IClamp':
+ # TODO: Parse from csv file
+ amplitude = sim_input.params['amp']
+ delay = sim_input.params['delay']
+ duration = sim_input.params['duration']
+ gids = sim_input.params['node_set']
+ sim.attach_current_clamp(amplitude, delay, duration, node_set)
+
+ elif sim_input.module == 'xstim':
+ sim.add_mod(mods.XStimMod(**sim_input.params))
+
+ else:
+ io.log_exception('Can not parse input format {}'.format(sim_input.name))
+ """
+
+
+ rates_csv = config.output.get('rates_csv', None)
+ rates_h5 = config.output.get('rates_h5', None)
+ if rates_csv or rates_h5:
+ sim.add_mod(mods.RecordRates(rates_csv, rates_h5, config.output_dir))
+
+ spikes_csv = config.output.get('spikes_csv', None)
+ spikes_h5 = config.output.get('spikes_h5', None)
+ spikes_nwb = config.output.get('spikes_nwb', None)
+ if spikes_csv or spikes_h5 or spikes_nwb:
+ sim.add_mod(mods.SpikesGenerator(spikes_csv, spikes_h5, spikes_nwb, config.output_dir))
+
+ # Parse the "reports" section of the config and load an associated output module for each report
+ """
+ sim_reports = reports.from_config(config)
+ for report in sim_reports:
+ if isinstance(report, reports.SpikesReport):
+ mod = mods.SpikesMod(**report.params)
+
+ elif isinstance(report, reports.MembraneReport):
+ if report.params['sections'] == 'soma':
+ mod = mods.SomaReport(**report.params)
+
+ else:
+ #print report.params
+ mod = mods.MembraneReport(**report.params)
+
+ elif isinstance(report, reports.ECPReport):
+ mod = mods.EcpMod(**report.params)
+ # Set up the ability for ecp on all relevant cells
+ # TODO: According to spec we need to allow a different subset other than only biophysical cells
+ for gid, cell in network.cell_type_maps('biophysical').items():
+ cell.setup_ecp()
+ else:
+ # TODO: Allow users to register customized modules using pymodules
+ io.log_warning('Unrecognized module {}, skipping.'.format(report.module))
+ continue
+
+ sim.add_mod(mod)
+ """
+ return sim
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/filternet/io_tools.py b/bmtk-vb/bmtk/simulator/filternet/io_tools.py
new file mode 100644
index 0000000..dfdcfaa
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/io_tools.py
@@ -0,0 +1 @@
+from bmtk.simulator.core.io_tools import io
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/__init__.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/__init__.py
new file mode 100644
index 0000000..72b9443
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/__init__.py
@@ -0,0 +1,7 @@
+__version__ = '0.1.0'
+
+# from lgnmodel import lgnmodel
+# from lgnmodel.dev import mask
+# from lgnmodel.dev import movie
+# from lgnmodel import cellmodel
+# from lgnmodel.dev import boundcellmodel
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/sOFF_cell_data.csv b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/sOFF_cell_data.csv
new file mode 100755
index 0000000..f6438be
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/sOFF_cell_data.csv
@@ -0,0 +1,29 @@
+Mouse_id,Shank,Clu,Area,Layer,WVF_ratio,WVF_duration,Bl_ctr_row,Bl_ctr_col,Wh_ctr_row,Wh_ctr_col,Bl_rad,Wh_rad,Bl_lat,Wh_lat,Bl_mfr,Wh_mfr,Bl_SI,Wh_SI,Area_Overlap,Area_Union,OverlapIdx,PSTH_correln,Subfield_sep,Bl_stimc,Wh_stimc,Bl_chk_num,Wh_chk_num,Spont,Spont_cyc,Pref_ori,Pref_SF,Pref_TF,Max_rate,Max_rate_cyc,F0_pref,Sharp,CV,OSI,OSI_wnull,DSI_sev,DSI_chen,f1f0b,f1f0,Pval,F0_TF1,F0_TF2,F0_TF4,F0_TF8,F0_TF15,F1_TF1,F1_TF2,F1_TF4,F1_TF8,F1_TF15
+60,3,12,1,NaN,0.227894433917728,0.275,8,13,10,9,3.31738342162619,0,94.5,NaN,21.0980965620245,9.81223461549439,0.312689578226931,NaN,0,56.309018948858,0,NaN,NaN,224,154,6,7,1.15555555555555,1.17037037037037,315,0.02,2,50,22.2222222222222,3.22222222222222,0.605603448275862,0.262695167743211,0.372781065088757,0.620071684587814,0.0265486725663716,0.0517241379310343,1.06592499221608,1.6739255292924,0.0435181045378624,2.16666666666667,3.22222222222222,2.22222222222222,1.61111111111111,0.833333333333333,2.71549870589566,3.4346471971407,3.7301484517374,2.33418898771623,1.50047031747581
+63,3,12,1,NaN,0.264513134151242,0.3,3,9,3,9,4.03491385399393,0,73.5,188.5,127.560438680997,48.7988511553626,0.342208990487044,0.322079894957013,0,51.146792844982,0,-0.436401558435213,0,147,147,5,11,9.35,9.16388888888889,135,0.04,1,75,54.1666666666667,17.625,0.700945626477541,0.110300729852412,0.254262416604892,0.464038359083644,0.231441048034935,0.375886524822695,1.108780641646,2.30965632673797,0.000512599358863601,17.625,11.2916666666667,15.4166666666667,8.875,2.875,19.5422588090107,11.9779602766624,14.173133697861,7.52200682626415,5.00527135935301
+72,2,25,1,NaN,0.23674463616793,0.375,6,14,15,19,11.6876336352714,0,144.5,NaN,23.6240071690253,18.4133737675155,0.356643544987323,NaN,0,454.751365334866,0,NaN,NaN,240,339,9,4,3.3,3.25,270,0.08,8,62.5,37.5,14.6666666666667,0.747422680412371,0.0658453670825114,0.193220338983051,0.422330097087379,0.20136518771331,0.335227272727273,0.352768488949166,0.453191635438344,0.000689821836055592,13.0833333333333,9.75,11.8333333333333,14.6666666666667,8.58333333333333,3.03189515980365,2.35162873257691,3.71804674030783,5.1739378379211,2.90346262297405
+72,3,2,1,NaN,0.17161401270352,0.45,7,14,5,32,3.2114115745907,0,106.5,NaN,10.7361652151892,6.0966299947273,0.357411817046969,NaN,0,37.6978353638313,0,NaN,NaN,241,563,7,6,2.3,0.25,0,0.16,8,37.5,33.3333333333333,6.08333333333333,0.415239726027397,0.345586939365797,0.654390934844193,0.759368836291913,0.309417040358744,0.472602739726027,0.459642275182134,0.479341229832797,0.0120917698906574,3.91666666666667,4.33333333333333,3.625,6.08333333333333,1.33333333333333,1.12042247030086,2.64044245685633,1.30729519345545,2.79615717402465,0.992882005175792
+72,3,15,1,NaN,0.191960207571084,0.525,7,14,14,2,3.8287025341613,0,128.5,NaN,25.8455956185529,12.3750239323341,0.464097667283985,NaN,0,50.2637804851085,0,NaN,NaN,241,32,8,5,0.1,0,180,0.16,8,25,12.5,2.16666666666667,0.432692307692308,0.461412217618803,0.664,0.816593886462882,0,0,0.527894845241589,0.527894845241589,0.0354251640054324,1.375,1.66666666666667,1.54166666666667,2.16666666666667,0.291666666666667,1.17481549663973,1.67067644404671,1.08148048724396,1.14377216469011,0.462933359250596
+72,3,19,1,NaN,0.193829748450602,0.45,7,12,17,18,3.80888656544766,0,95.5,NaN,15.6969845209821,9.88260583506573,0.397834985620538,NaN,0,61.6070931080991,0,NaN,NaN,205,323,6,2,1.9,1.75,45,0.64,15,75,16.6666666666667,3.875,0.75,0.0396701498587432,0.166144200626959,0.396825396825397,0.207792207792208,0.344086021505376,0.516975271992441,0.942719613633275,0.00462256555845095,2.79166666666667,3.20833333333333,3.29166666666667,2.91666666666667,3.875,1.12745038398484,0.965812716058466,1.85618045420095,1.42340203076708,2.00327917897071
+72,3,24,1,NaN,0.214114220695846,0.45,7,12,18,10,2.81304915787261,0,117.5,NaN,18.7368033776989,7.57736847420232,0.338538785120179,NaN,0,30.7695845402083,0,NaN,NaN,205,180,7,10,1.2,1.75,45,0.16,2,37.5,16.6666666666667,4.33333333333333,0.681490384615384,0.104771405575162,0.27217125382263,0.442622950819672,0.350649350649351,0.519230769230769,0.45560461795616,0.764240004313558,0.0375202720639944,3.54166666666667,4.33333333333333,3.08333333333333,2.29166666666667,2.20833333333333,2.00193125072158,1.97428667781003,1.89661235690541,1.16926096300871,1.4021824426304
+72,4,22,1,NaN,0.243258295337443,0.5,16,1,5,26,1.89448348117648,0,84.5,NaN,18.1616089127425,15.941831811114,0.313254251849423,NaN,0,32.7393813430031,0,NaN,NaN,16,455,6,5,0.7,0.5,135,0.02,4,62.5,25,6.33333333333333,0.689144736842105,0.168836493478692,0.208747514910537,0.496839443742099,0.027027027027027,0.0526315789473684,1.49136304384079,1.61919416188429,0.00797095394639537,3.875,4.33333333333333,6.33333333333333,3.70833333333333,5.04166666666667,5.18251916951976,6.44403480524721,9.44529927765837,5.83524176474689,7.47958040965291
+72,4,26,1,NaN,0.251147052076164,0.425,6,8,7,16,3.36590940295369,0,125.5,NaN,31.2988506309783,13.1607172188833,0.435950524637569,NaN,0,36.6789749485927,0,NaN,NaN,132,277,9,4,0,0,270,0.08,4,37.5,20.8333333333333,4.08333333333333,0.514030612244898,0.245707692206953,0.561752988047809,0.686609686609687,0.324324324324324,0.489795918367347,1.42520239084387,1.42520239084387,0.0153220356313431,2.91666666666667,2.54166666666667,4.08333333333333,3.25,0.541666666666667,3.15832004061644,3.12001867713679,5.81957642927912,4.76093191575991,0.932792515297761
+72,4,29,1,NaN,0.173264409742832,0.45,6,9,9,20,4.22853114375315,0,132.5,NaN,28.993229578405,10.4456145698005,0.445186921014291,NaN,0,60.180688526765,0,NaN,NaN,150,351,8,3,1.2,1.25,135,0.04,4,37.5,16.6666666666667,4,0.638020833333333,0.0560388988479427,0.207547169811321,0.405660377358491,0.288590604026846,0.447916666666667,1.43135854910831,2.08197607143027,0.00448418831120022,2.91666666666667,3.75,4,1.91666666666667,1.29166666666667,3.20708787843897,5.32832274286747,5.72543419643325,2.42846036962814,1.69737352881297
+72,5,16,1,NaN,0.32528169876946,0.45,12,17,2,20,9.76242188228036,0,10.5,NaN,16.2352767444367,16.1883204032263,0.318095563667322,NaN,0.203772083047737,434.645853140823,0.00046882325363338,NaN,NaN,300,344,2,5,3.2,2.75,135,0.04,4,87.5,58.3333333333333,16.4583333333333,0.715822784810127,0.0625111146961294,0.202435312024353,0.417130144605117,0.240188383045526,0.387341772151899,1.54763513865801,1.85810297802405,9.82286176845653e-05,13.8333333333333,15.75,16.4583333333333,10.2083333333333,15,14.5196957230697,19.9547738711671,25.4714949904131,17.8859216430096,23.7133703939669
+72,5,25,1,NaN,0.154176850826486,0.525,6,11,8,1,3.71986146157398,0,103.5,NaN,39.9487382339472,18.7840458760451,0.53388333550947,NaN,0,47.8864395162182,0,NaN,NaN,186,8,7,3,0.8,0,0,0.08,2,50,37.5,12.4166666666667,0.690855704697987,0.0835000262684289,0.337822671156005,0.490940465918896,0.37962962962963,0.550335570469799,0.91276594444903,0.91276594444903,0.00130098648313454,4.54166666666667,12.4166666666667,12.25,5.70833333333333,8.375,4.13731107678691,11.3335104769088,13.4014603108746,7.15836649799689,8.40963025038677
+72,6,18,1,NaN,0.192842487635676,0.425,6,9,7,24,2.86634576035391,0,92.5,NaN,46.4315953853779,13.4560643344102,0.321303060480483,NaN,0,25.81113051938,0,NaN,NaN,150,421,6,3,2,1,180,0.02,8,62.5,33.3333333333333,7.54166666666667,0.785714285714286,0.0543590713583877,-0.0189701897018971,0.234215885947047,0.194719471947195,0.325966850828729,1.72875777040663,1.99302647416306,0.0121928458017285,4.79166666666667,7.33333333333333,4.625,7.54166666666667,5.04166666666667,6.49902397742458,11.0959764435025,7.67992950266336,13.0377148518167,8.94178529614187
+72,6,27,1,NaN,0.163265955223293,0.425,6,9,11,31,5.26067852910692,0,187.5,NaN,28.1086462945887,18.259239443644,0.405490551796678,NaN,0,120.293453025847,0,NaN,NaN,150,551,11,10,0.9,1.75,135,0.02,1,62.5,33.3333333333333,6.875,0.756060606060606,0.0708240362608107,0.217712177121771,0.445026178010471,0.195652173913044,0.327272727272727,1.21203148773004,1.62589589817444,0.000917138003879289,6.875,5.54166666666667,3.33333333333333,3.875,2.83333333333333,8.332716478144,5.88975257891834,3.79454087673417,3.75877880958225,2.74849780724932
+73,5,9,1,NaN,0.268914895339528,0.375,6,10,4,21,5.3138386985443,0,94.5,NaN,13.3907614920431,8.32550972859402,0.351692126044824,NaN,0,104.874698741902,0,NaN,NaN,168,364,6,2,0.1,0.25,90,0.04,1,50,33.3333333333333,7.83333333333333,0.575581395348837,0.226393360499366,0.305555555555555,0.602385685884692,-0.0669975186104219,-0.143617021276596,1.35611352253308,1.40082056173747,0.000140317384885218,7.83333333333333,7.04166666666667,2.625,0.5,0.291666666666667,10.6228892598424,10.5808897672901,4.49326770686497,0.887518649793165,0.522100042786368
+73,6,14,1,NaN,0.242726417237974,0.3,9,11,3,6,9.14964940445724,0,65.5,NaN,45.3938924646263,25.8702040095581,0.334086340574054,NaN,0,278.556437526256,0,NaN,NaN,189,93,5,10,4.7,4.25,270,0.02,15,150,141.666666666667,24.0416666666667,0.868381240544629,0.0272134135049013,0.0382366171839857,0.370991468078847,-0.00944206008583688,-0.0190641247833622,1.76909268952665,2.14898206706711,1.04506321347993e-05,6.625,8.375,16.7083333333333,21.7083333333333,24.0416666666667,8.51928922805708,12.8457058640276,27.4766733503038,37.16245657985,42.5319367440365
+75,2,8,1,NaN,0.189102268383661,0.3,7,11,3,26,3.7946689474635,0,68.5,NaN,18.1486917869246,13.3610202594852,0.38722203250154,NaN,0,67.5844075441661,0,NaN,NaN,187,453,5,9,3.3,4,225,0.32,4,50,25,6.29166666666667,0.779801324503311,0.0309498508218965,0.100182149362477,0.352555701179554,0.170542635658915,0.291390728476821,0.425835896651261,1.16911309807892,0.000120970070208297,4.5,4.08333333333333,6.29166666666667,4.54166666666667,5.70833333333333,1.81889501379154,1.53151973185195,2.67921751643085,1.86448756331264,2.16687851095823
+75,3,6,1,NaN,0.156525539257234,0.3,6,10,1,16,4.3619242059316,0,71.5,NaN,31.3866228823021,18.946005773115,0.527174587558903,NaN,0,77.3654675304575,0,NaN,NaN,168,271,5,3,5.8,5.25,90,0.04,15,75,50,11.1666666666667,0.625932835820895,0.0860841200396548,0.271648873072361,0.466550825369244,0.270142180094787,0.425373134328358,1.6825021632129,3.17542661789476,0.000773426379641316,7.20833333333333,6.16666666666667,6.95833333333333,7.04166666666667,11.1666666666667,9.23802191648321,8.63768439604191,11.3629232423904,11.461549916428,18.787940822544
+75,3,10,1,NaN,0.205059107016681,0.3,7,11,13,4,3.35626033560117,0,102.5,NaN,27.7293416501643,9.88926872507762,0.5511360577852,NaN,0,37.8336834191965,0,NaN,NaN,187,67,7,4,7.9,8.75,45,0.02,15,125,91.6666666666667,20,0.761197916666666,0.0682387190512284,0.15872057936029,0.438582360048329,0.0750279955207168,0.139583333333334,1.63914777665678,2.91404049183427,8.49051217147285e-05,7.625,11.0833333333333,12.8333333333333,12.625,20,10.5593851547536,16.3644705428685,19.8691468326841,19.4162516634255,32.7829555331355
+75,3,14,1,NaN,0.193391587044465,0.275,7,8,17,27,5.13169011499309,0,72.5,NaN,22.85892380357,16.9770380143854,0.467112310488397,NaN,0,112.753885953081,0,NaN,NaN,133,485,5,2,6.6,6.25,225,0.08,4,87.5,62.5,17.4166666666667,0.634569377990431,0.0609248992044004,0.207220216606498,0.359019264448336,0.436426116838488,0.607655502392345,1.0870687813283,1.69550280072847,0.0313438418094254,11.75,12.875,17.4166666666667,13.0416666666667,7.08333333333333,9.36215604059938,9.84202467011974,18.9331146081346,16.3988326909231,8.89559308939555
+75,3,15,1,NaN,0.313001745093912,0.3,8,15,4,25,10.6510472403218,0,86.5,NaN,37.0214120367194,22.9181216449019,0.4520114522557,NaN,0,485.385101819709,0,NaN,NaN,260,436,6,7,5.2,5.75,180,0.32,1,87.5,54.1666666666667,24.1666666666667,0.500215517241379,0.348202850860347,0.688500727802038,0.810451727192205,0.135029354207436,0.237931034482759,0.126202913691368,0.165605633350663,9.89709604734052e-05,24.1666666666667,23.5416666666667,15.8333333333333,17,17.1666666666667,3.04990374754138,3.93465250177409,4.45322723594722,4.14212991396283,4.00548296424652
+77,2,25,1,NaN,0.258379026929274,0.375,10,22,7,7,8.9065730588013,0,187.5,NaN,24.7351110224677,22.4579227596474,0.50715649625158,NaN,0,322.503283436885,0,NaN,NaN,388,115,11,4,5.3,5,180,0.04,4,50,37.5,13.375,0.500389408099688,0.287783111281432,0.539568345323741,0.6751269035533,0.296969696969697,0.457943925233645,0.282369399924872,0.450948146148676,0.00118075407152943,12.4583333333333,13.25,13.375,10.5416666666667,12.6666666666667,4.24022664788561,5.37433560144109,3.77669072399516,3.62812819243134,3.8117929952908
+78,1,14,1,NaN,0.161806152197335,0.3,3,4,7,30,6.62335148075923,0,96.5,NaN,68.5583245747765,33.7420784958353,0.531495707189419,NaN,0,149.908329095452,0,NaN,NaN,57,529,6,11,13.2,16.25,225,0.02,2,87.5,58.3333333333333,20.5,0.715193089430894,0.0761481984225665,0.111236589497459,0.356763383735186,0.185542168674699,0.313008130081301,0.791461156384882,3.81763616609178,0.0017595645200145,13.1666666666667,20.5,14.9583333333333,14.2083333333333,11.75,13.4953037568487,16.2249537058901,14.9736415138084,14.2612878116158,12.0939263761713
+78,2,7,1,NaN,0.239215201127631,0.275,7,9,7,22,6.017889160438,0,59.5,NaN,56.0686653175444,22.7928361604366,0.366315406681982,NaN,0,123.350034271563,0,NaN,NaN,151,385,4,4,10.4,8.25,225,0.08,8,87.5,70.8333333333333,23.375,0.665552584670232,0.162892660368509,0.322333529758397,0.549902152641879,0.133333333333333,0.235294117647059,1.27533261529656,1.9709685872765,2.15623430262214e-05,9.375,11.8333333333333,22.2083333333333,23.375,11.5833333333333,8.63429439311284,13.7068058472998,25.8353763950164,29.810899882557,15.0089329977661
+78,2,10,1,NaN,0.255564925100401,0.3,3,31,16,24,3.96464206223105,0,187.5,NaN,14.4639652570043,12.2962025411232,0.406956185701321,NaN,0,179.794901275787,0,NaN,NaN,543,430,11,2,3,3.75,180,0.32,4,50,29.1666666666667,8.5,0.915441176470588,0.0360452798335642,0.0381679389312977,0.349397590361446,0.0408163265306121,0.0784313725490193,0.358570589289018,0.641652633464558,0.00353540111464831,5.58333333333333,6.08333333333333,8.5,4.33333333333333,2.41666666666667,3.03472779299531,2.19925771875221,3.04785000895665,2.34220359090903,1.89677282603666
+78,2,12,1,NaN,0.22823020436422,0.275,5,10,13,7,5.30569487966127,0,62.5,NaN,56.1729585863476,20.8519488568425,0.38673327032744,NaN,0,95.9087270878015,0,NaN,NaN,167,121,5,3,4.2,4.5,225,0.16,1,50,29.1666666666667,10.9583333333333,0.708650190114068,0.0776988864537472,0.190045248868778,0.450920245398773,0.112050739957717,0.201520912547529,0.746702318947234,1.2669852250524,0.000150046958143631,10.9583333333333,8.25,6.45833333333333,4.16666666666667,6.33333333333333,8.18261291179678,8.00310352210064,8.01003028585456,4.33601505186198,5.82912768418014
+63,2,2,1,NaN,0.119049915500518,0.35,5,7,5,7,5.36848635382528,0,63.5,NaN,61.5955705313946,41.2808571747207,0.366689375305713,NaN,0,90.5427289008778,0,NaN,NaN,113,113,5,11,6.30333333333333,6.41111111111111,270,0.08,2,62.5,41.6666666666667,16.2083333333333,0.686083123425693,0.0849905885078977,0.0843205574912892,0.318818040435459,0.223270440251572,0.365038560411311,0.754052323967631,1.24748945572757,0.00361592014864013,14.4166666666667,16.2083333333333,15.4583333333333,11.5833333333333,10.5833333333333,11.3388618807977,12.221931417642,16.030682212326,12.9797678284226,13.1982179165743
+63,2,6,1,NaN,0.246787403028116,0.275,4,9,4,9,6.01429531768067,0,63.5,NaN,47.5346848059286,54.4310078449639,0.437604558761872,NaN,0,113.636898312955,0,NaN,NaN,148,148,5,11,9.81166666666667,10.1472222222222,0,0.16,4,75,45.8333333333333,23.625,0.845017636684303,0.0868065796926466,0.163673678809646,0.463639355051004,0.0197841726618705,0.0388007054673722,0.158324117572715,0.277524035440218,0.000692696526636554,15.7083333333333,20.4583333333333,23.625,19.2916666666667,14.6666666666667,4.3983451939541,3.38407253347712,3.74040727765539,3.93974714619665,4.81246566442344
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/sON_cell_data.csv b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/sON_cell_data.csv
new file mode 100755
index 0000000..08ee244
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/sON_cell_data.csv
@@ -0,0 +1,23 @@
+Mouse_id,Shank,Clu,Area,Layer,WVF_ratio,WVF_duration,Bl_ctr_row,Bl_ctr_col,Wh_ctr_row,Wh_ctr_col,Bl_rad,Wh_rad,Bl_lat,Wh_lat,Bl_mfr,Wh_mfr,Bl_SI,Wh_SI,Area_Overlap,Area_Union,OverlapIdx,PSTH_correln,Subfield_sep,Bl_stimc,Wh_stimc,Bl_chk_num,Wh_chk_num,Spont,Spont_cyc,Pref_ori,Pref_SF,Pref_TF,Max_rate,Max_rate_cyc,F0_pref,Sharp,CV,OSI,OSI_wnull,DSI_sev,DSI_chen,f1f0b,f1f0,Pval,F0_TF1,F0_TF2,F0_TF4,F0_TF8,F0_TF15,F1_TF1,F1_TF2,F1_TF4,F1_TF8,F1_TF15
+60,2,10,1,NaN,0.195334635936007,0.3,10,17,7,10,0,8.66541108694164,NaN,61.5,14.3369503257721,23.3418634859177,NaN,0.369894205590993,0,795.186592079952,0,NaN,NaN,298,169,6,5,4.55999999999999,4.54814814814815,225,0.64,15,50,27.7777777777778,9.11111111111111,0.510670731707317,0.184133379263178,0.413793103448276,0.575,0.301587301587302,0.463414634146341,0.362847818857151,0.724517560380351,0.00303825454171586,5.55555555555556,4.61111111111111,5.55555555555556,5.5,9.11111111111111,2.18477310973599,2.21429766905465,1.28442570797043,1.66852860111851,3.30594679403182
+72,2,6,1,NaN,0.164650901025171,0.55,14,26,7,11,0,2.67920423277157,NaN,153.5,13.6987630666981,14.536343558531,NaN,0.508970866157443,0,39.1242399451655,0,NaN,NaN,464,187,7,11,2.3,1.75,270,0.04,8,37.5,25,2.66666666666667,0.46484375,0.103773857438895,0.326424870466321,0.46058091286307,0.454545454545455,0.625,1.68037570610877,4.88836569049824,0.0382100920720876,1.04166666666667,2.29166666666667,1.16666666666667,2.66666666666667,1.58333333333333,1.27921707995628,3.50166981622075,2.0656675945114,4.48100188295672,2.67678022646792
+72,2,26,1,NaN,0.3344343145235,0.55,14,13,8,11,0,22.338050703905,NaN,176.5,22.1850478435311,24.8512546002087,NaN,0.523543835583338,409.717734981316,3234.27050213368,0.126680107526882,NaN,NaN,230,188,10,10,5.8,3.5,180,0.32,1,62.5,41.6666666666667,18.8333333333333,0.673119469026549,0.0931761102566501,0.21098459477562,0.399286078531361,0.317784256559767,0.482300884955752,0.304905314300113,0.374503266477313,6.43124629642561e-05,18.8333333333333,14.5833333333333,11.125,17.125,14,5.7423834193188,3.92013733095239,1.51569285463246,2.65128842556021,2.54752025533966
+72,3,20,1,NaN,0.267229517066826,0.525,11,28,4,29,0,5.20905138663336,NaN,8.49999999999999,9.61684879280928,11.3614898494724,NaN,0.351299958425315,0,117.98403608464,0,NaN,NaN,497,508,5,2,1,0.25,180,0.04,8,62.5,50,8.75,0.726785714285714,0.0796959761336222,0.0383189122373301,0.288197621225984,0.193181818181818,0.323809523809524,1.55465310904619,1.60037820048872,0.000505355333596263,4.66666666666667,3.125,5.54166666666667,8.75,8.66666666666667,6.30360793581716,3.76268647580404,8.91959591843672,13.6032147041541,11.6831246427099
+72,3,25,1,NaN,0.167781666216991,0.45,7,3,7,14,0,2.91496304979481,NaN,98.5,11.5325019439142,26.7238773557078,NaN,0.434989832625236,0,33.7582417582418,0,NaN,NaN,43,241,9,6,6.4,4.5,0,0.08,2,75,45.8333333333333,20.3333333333333,0.61859631147541,0.143142512106825,0.369824561403509,0.539250897896357,0.301333333333333,0.463114754098361,1.13193139299252,1.45363820994829,0.000107554353819693,6.70833333333333,20.3333333333333,16.5833333333333,8.33333333333333,10.4166666666667,6.38270556956685,23.0159383241812,23.8107421763215,13.6121294525348,13.2136843916077
+72,3,28,1,NaN,0.285846367748608,0.475,6,23,8,13,0,3.20467198358483,NaN,163.5,11.5653476737154,27.6895556751833,NaN,0.54583551614614,0,41.2298848033254,0,NaN,NaN,402,224,2,10,3.3,1.25,270,0.04,8,100,66.6666666666667,13.9583333333333,0.786940298507462,0.023712659554386,0.112033195020747,0.341538461538462,0.229357798165138,0.373134328358209,1.82736605362479,2.00710697693215,1.97344460780919e-05,10.125,9.91666666666667,12.1666666666667,13.9583333333333,7.16666666666667,13.6431561330579,14.7953235714228,20.2497869884287,25.5069844985128,11.6064123684217
+72,4,6,1,NaN,0.196289462787722,0.45,8,31,7,11,0,3.0384403606745,NaN,76.5,9.73233924408585,13.2693675358585,NaN,0.480610176730089,0,54.4750702014284,0,NaN,NaN,548,187,4,5,1.1,1,225,0.08,1,37.5,20.8333333333333,4.16666666666667,0.555,0.170130788000192,0.36518771331058,0.566433566433566,0.19047619047619,0.32,0.996095214877805,1.31065159852343,0.000711129623582446,4.16666666666667,2.25,3.08333333333333,1.54166666666667,1.79166666666667,4.15039672865752,2.62884608494724,3.55879221873478,2.2213841981537,2.26591428588672
+72,4,9,1,NaN,0.214294572215156,0.475,18,7,6,9,0,4.27177138381067,NaN,135.5,9.52936353633489,16.4833587567275,NaN,0.427509241026204,0,64.1882061600371,0,NaN,NaN,126,150,3,6,0.1,0.25,315,0.02,8,50,20.8333333333333,3.5,0.388392857142857,0.258528435369155,0.615384615384615,0.68503937007874,0.570093457943925,0.726190476190476,1.74310505967557,1.877190064266,0.00505336318565361,1.04166666666667,1.875,2.16666666666667,3.5,1.04166666666667,1.24763858363152,2.38776711999282,3.05709788515879,6.10086770886451,1.7155806622439
+72,4,21,1,NaN,0.217844151005732,0.475,3,31,8,11,0,3.18776066773677,NaN,170.5,12.2025615058367,20.7168108660928,NaN,0.435841801543017,0,45.4411745196453,0,NaN,NaN,543,188,7,10,1.7,2.5,135,0.16,4,62.5,37.5,12.5,0.608333333333334,0.0958757327317662,0.160541586073501,0.369186046511628,0.273885350318471,0.43,0.2603860195393,0.325482524424125,0.00547165806238821,8.375,6.83333333333333,12.5,5.875,4.45833333333333,3.59489882839589,3.85625472720877,3.25482524424125,2.26673443467376,2.10461606793623
+72,4,24,1,NaN,0.10878335818925,0.525,18,29,7,12,0,2.53404897104881,NaN,145.5,10.3511882503664,12.4980165446304,NaN,0.438140238886373,0,30.973356623256,0,NaN,NaN,522,205,5,9,0.1,0,135,0.02,8,37.5,16.6666666666667,3.04166666666667,0.571917808219178,0.0984843365472965,0.247863247863248,0.460122699386503,0.226890756302521,0.36986301369863,1.77737719143541,1.77737719143541,0.00472255324225911,1.66666666666667,1.20833333333333,1.54166666666667,3.04166666666667,0.625,3.07106646108599,1.92539901200301,2.56057988567349,5.40618895728271,1.12497421428348
+72,5,15,1,NaN,0.208480053180709,0.45,8,5,9,12,0,3.84279423632037,NaN,152.5,16.6943191530683,32.5814332173706,NaN,0.40223320762938,0,55.3580825613019,0,NaN,NaN,80,207,7,9,7.9,6.75,180,0.64,1,87.5,50,22.0833333333333,0.75872641509434,0.0549956127678157,0.184357541899441,0.422924901185771,0.177777777777778,0.30188679245283,0.161878539451814,0.233140287797449,0.00279839051556877,22.0833333333333,11.8333333333333,21.0416666666667,15.0416666666667,13.2083333333333,3.57481774622756,3.11748344851772,3.57759609685268,3.40194551628711,3.06055137359331
+72,5,19,1,NaN,0.428997620088616,0.15,10,28,7,11,0,5.33819561166472,NaN,112.5,23.9938894732253,35.949116935107,NaN,0.49103810252798,0,113.229354146859,0,NaN,NaN,496,187,4,7,3,3.5,90,0.02,4,62.5,37.5,11.125,0.410112359550562,0.170485431073922,0.457025920873124,0.55431131019037,0.538904899135447,0.700374531835206,0.875806996894982,1.27781676596153,0.0117853881341222,4.70833333333333,4.79166666666667,11.125,3.91666666666667,0.958333333333333,2.50967638970477,2.53217635520425,9.74335284045668,5.73578914722165,0.959235740480376
+72,6,2,1,NaN,0.190965268808285,0.375,13,21,7,9,0,3.98910700374141,NaN,120.5,13.3659124777888,33.0006033745198,NaN,0.447981807753197,0,52.8448935370465,0,NaN,NaN,373,151,5,8,2.9,2.75,135,0.08,8,62.5,45.8333333333333,14.7916666666667,0.602816901408451,0.162490299501509,0.303948576675849,0.525953721075672,0.163934426229508,0.28169014084507,0.605162911628848,0.743366206326093,0.000537158073086421,3.66666666666667,4.75,5.16666666666667,14.7916666666667,11.25,2.23704014273044,4.48402994329032,6.85878410708804,8.95136806784337,10.3292341381655
+72,6,6,1,NaN,0.197515761108188,0.375,13,11,8,9,0,3.70821867413724,NaN,100.5,9.40118730320646,14.0110191514325,NaN,0.400352438170497,0,66.2259269905145,0,NaN,NaN,193,152,8,7,2,1.5,90,0.04,8,37.5,29.1666666666667,5,0.78125,0.0987026961243827,0.212121212121212,0.493506493506494,0.0434782608695652,0.0833333333333334,1.68961352013503,2.41373360019289,0.00198568320815603,2.20833333333333,2.79166666666667,3,5,2.33333333333333,3.09566678759879,4.06940890689401,5.40946818090603,8.44806760067513,3.39229267789208
+73,4,30,1,NaN,0.331675635535987,0.375,10,27,9,14,0,21.623444609417,NaN,126.5,17.1084521879945,20.9154487535644,NaN,0.525931485916512,323.454219824441,3480.35925442766,0.0929370206288179,NaN,NaN,478,243,4,8,1.4,1,45,0.02,2,62.5,41.6666666666667,11.5833333333333,0.602517985611511,0.164829106773475,0.300584795321637,0.481352992194276,0.302107728337236,0.464028776978417,1.28689372636422,1.40848998397344,3.78393090054118e-05,10.25,11.5833333333333,5.83333333333333,6.08333333333333,3.45833333333333,13.730833515557,14.9065189970522,8.154312506586,8.07544576276451,3.1756889386636
+73,5,15,1,NaN,0.269035257341049,0.425,9,2,8,11,0,21.5824105195833,NaN,76.5,25.5259179241373,34.2880657737409,NaN,0.644064874608546,57.2599553364141,1944.66491255224,0.0294446384910933,NaN,NaN,27,188,4,5,2.3,2,90,0.02,8,100,83.3333333333333,10.9583333333333,0.735245901639344,0.172830579196914,0.101570680628272,0.38139870223504,0.0981210855949896,0.178707224334601,1.84476433113939,2.25661869344028,3.26507399354889e-05,4.375,5.375,7.875,10.9583333333333,6.66666666666667,5.48434521400257,7.56330018968726,13.1400678542315,20.2155424620692,12.0456918874321
+73,5,20,1,NaN,0.35689874574847,0.375,7,22,9,14,0,48.3327076539571,NaN,52.5,33.2042213652416,40.0433630233324,NaN,0.517788682219814,5856.13797068123,8744.33555177217,0.669706455797479,NaN,NaN,385,243,4,4,17.2,16,90,0.02,15,187.5,195.833333333333,36.9166666666667,0.693707674943567,0.106838257011537,0.270250896057348,0.498027613412229,0.166556945358789,0.285553047404063,1.73169924739543,3.05634568365011,1.87970305511782e-06,18.0833333333333,19.0833333333333,24.9166666666667,29.625,36.9166666666667,24.3957816777468,25.610858029386,38.0768763948049,53.4565594288953,63.9285638830148
+73,6,10,1,NaN,0.253528460654241,0.4,1,26,7,11,0,26.5284743983064,NaN,182.5,27.0272474277681,43.0864002273731,NaN,0.433202727809693,76.6183032259491,2659.90492404979,0.0288049029622063,NaN,NaN,451,187,7,11,2.8,4.25,270,0.02,2,62.5,33.3333333333333,8.91666666666667,0.604771784232365,0.242878254862887,0.515044247787611,0.718974358974359,0.0214797136038186,0.0420560747663551,0.824283051631535,1.57496940222454,0.00271873587544628,8.875,8.91666666666667,8.41666666666667,6.875,4.25,8.14317431824067,7.34985721038119,7.45736442349215,8.44350960466603,3.32440853257175
+74,1,15,1,NaN,0.183727313954152,0.15,14,21,7,21,0,28.4981661351972,NaN,58.5,28.4534386038884,41.8461851356699,NaN,0.604351837481679,1143.02553784244,5694.27501271364,0.200732408478761,NaN,NaN,374,367,10,4,6,6.5,45,0.04,1,87.5,58.3333333333333,19.9166666666667,0.622907949790795,0.0651809699658942,0.325017325017325,0.450028232636928,0.489096573208723,0.656903765690377,1.10528441215473,1.64076381680112,0.000190181231333335,19.9166666666667,13.875,5.41666666666667,14.2916666666667,15.9583333333333,22.0135812087484,15.8633444518911,6.53086993544498,21.5396018940375,25.7521269169952
+74,2,14,1,NaN,0.231251511914634,0.325,2,18,7,10,0,5.46824379552042,NaN,153.5,16.1446156076403,18.2826899152246,NaN,0.340041370352017,0,119.750060804387,0,NaN,NaN,308,169,5,9,1.9,1.5,315,0.08,8,62.5,20.8333333333333,5.375,0.531976744186047,0.223628682474149,0.402173913043478,0.60431654676259,0.15695067264574,0.271317829457364,0.322173076149067,0.446885234658383,0.00149819575287729,3.04166666666667,2.125,3.125,5.375,3.375,1.62585277528586,1.79821781640358,1.39978294787706,1.73168028430123,1.53350083156145
+74,3,20,1,NaN,0.377663793477819,0.275,12,11,10,16,0,8.41605810757602,NaN,53.5,35.373197432754,40.4894314308,NaN,0.460420129411097,0.815088332190948,604.727618458001,0.00134786027181849,NaN,NaN,192,280,9,4,6.6,5,45,0.02,4,100,54.1666666666667,17.125,0.811739659367397,0.107705702431509,0.262672811059908,0.52286282306163,0.073107049608355,0.13625304136253,1.25678507377187,1.77504695986336,1.77457002212552e-05,9.5,11.5416666666667,17.125,13.375,13.2916666666667,10.8379556037253,12.8763796531722,21.5224443883432,23.1589205101968,21.360342369141
+78,1,17,1,NaN,0.322199590418021,0.3,2,23,4,3,0,37.4243183273526,NaN,29.5,32.3773236372484,33.0802094356025,NaN,0.608504560227227,2204.67809052115,6874.72668980918,0.320693198434968,NaN,NaN,398,40,8,3,21.7,21.25,225,0.08,4,75,50,18.8333333333333,0.768252212389381,0.0876143581084309,0.21505376344086,0.477638640429338,0.0944309927360775,0.172566371681416,1.09063082238159,-8.49939882269789,0.0312489529126974,14.625,14.1666666666667,18.8333333333333,16.5416666666667,18.75,13.0475087788804,17.0262378450584,20.5402138215199,21.0534416364737,24.8113490972229
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/sus_sus_cells_v3.csv b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/sus_sus_cells_v3.csv
new file mode 100755
index 0000000..240114e
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/sus_sus_cells_v3.csv
@@ -0,0 +1,11 @@
+Mouse_id,Shank,Clu,Area,Layer,WVF_ratio,WVF_duration,Bl_ctr_row,Bl_ctr_col,Wh_ctr_row,Wh_ctr_col,Bl_rad,Wh_rad,Bl_lat,Wh_lat,Bl_mfr,Wh_mfr,Bl_SI,Wh_SI,Area_Overlap,Area_Union,OverlapIdx,PSTH_correln,Subfield_sep,Bl_stimc,Wh_stimc,Bl_chk_num,Wh_chk_num,Spont,Spont_cyc,Pref_ori,Pref_SF,Pref_TF,Max_rate,Max_rate_cyc,F0_pref,Sharp,CV,OSI,OSI_wnull,DSI_sev,DSI_chen,f1f0b,f1f0,Pval,F0_TF1,F0_TF2,F0_TF4,F0_TF8,F0_TF15,F1_TF1,F1_TF2,F1_TF4,F1_TF8,F1_TF15
+60,3,17,1,NaN,0.21988379,0.275,8,11,8,12,6.327865943,6.701237761,193.5,78.5,37.37712406,45.7133433,0.444857153,0.456371041,104.5350786,162.3384262,0.643933054,0.35553935,4,188,206,11,5,10.43555556,10.72222222,225,0.02,15,66.66666667,66.66666667,15.72222222,0.747699387,0.109007838,0.161025641,0.497234173,-0.070607553,-0.151943463,1.587591025,4.992091778,0.001231846,10.44444444,11.44444444,12.44444444,13.44444444,14.44444444,15.44444444,16.44444444,17.44444444,18.44444444,19.44444444
+61,3,2,1,NaN,0.217966289,0.275,10,9,9,11,16.52292262,8.516929279,120.5,87.5,31.04554547,33.76316153,0.510116736,0.418572618,222.5870387,862.9747717,0.257929949,0.668366904,8.94427191,154,189,7,6,2.620952381,2.650793651,180,0.04,8,100,80.95238095,15.23809524,0.428125,0.261312339,0.610062893,0.698736638,0.464530892,0.634375,0.803898246,0.973193337,0.006674439,5.666666667,3.619047619,8.19047619,15.23809524,3.904761905,2.677247676,1.879182797,6.742832439,12.24987804,3.509487327
+73,5,14,1,NaN,0.403757221,0.1,9,12,10,11,23.60903496,10.13939494,99.5,182.5,24.94225614,16.10278371,0.510265909,0.542734315,98.21814403,1975.842041,0.049709512,0.474228965,5.656854249,207,190,7,11,2.2,2,90,0.02,4,87.5,45.83333333,19.75,0.850140713,0.041302918,0.029315961,0.369089626,-0.022680412,-0.046413502,0.326612259,0.36341364,0.000343899,18.83333333,16.5,19.75,18.625,16.95833333,6.325000537,3.901611464,6.450592115,13.91496496,9.869628164
+73,5,24,1,NaN,0.314723274,0.4,9,10,9,10,4.541621803,1.727334002,98.5,131.5,29.1787525,10.99404131,0.451261624,0.411681223,9.37351582,64.79952241,0.144654088,0.382149377,0,171,171,6,8,0.6,0.5,45,0.16,4,37.5,16.66666667,5.291666667,0.431102362,0.486649606,0.687707641,0.828153565,0.016,0.031496063,0.630096291,0.695845469,0.029047724,2.125,5,5.291666667,4.75,0.625,1.586805628,2.579259347,3.334259537,2.30995181,0.564028931
+75,2,14,1,NaN,0.142546732,0.425,8,8,7,5,29.57267929,25.99540155,112.5,169.5,43.1770015,30.45673398,0.643575616,0.507588764,847.4201694,4023.004312,0.210643615,0.578750606,12.64911064,134,79,7,10,11.9,15,90,0.32,4,75,50,25.33333333,0.577919408,0.299900872,0.559974343,0.745643307,0.033135089,0.064144737,0.240499114,0.589610732,0.008356979,21.66666667,16.54166667,25.33333333,16,14.20833333,7.120547858,4.453865703,6.092644228,4.714501425,3.586799092
+75,3,12,1,NaN,0.045806832,0.35,7,9,8,9,3.652408635,1.029283045,74.5,105.5,50.48434961,19.19976844,0.57805952,0.496553402,0.067924028,45.16947841,0.001503759,0.463704924,4,151,152,5,7,3.9,2.75,90,0.16,4,50,45.83333333,17.58333333,0.441646919,0.181271486,0.464006938,0.547915143,0.595463138,0.746445498,0.836846909,0.991992685,0.001800185,11.04166667,11.5,17.58333333,11,8.041666667,6.885552531,6.495778681,14.71455815,11.77583318,5.816827922
+78,2,8,1,NaN,0.337036581,0.3,6,11,7,10,4.865683305,4.845646089,148.5,108.5,25.39909748,20.21502353,0.4358264,0.550653522,31.38090079,116.7614036,0.268760908,0.209763145,5.656854249,186,169,9,5,3,2.25,180,0.16,15,50,33.33333333,13.70833333,0.488981763,0.27877874,0.602923264,0.728106756,0.27027027,0.425531915,0.35278986,0.42206496,0.001579724,5.708333333,7.375,7.25,7.541666667,13.70833333,2.465575603,3.101341176,3.666361028,2.655445907,4.836161001
+78,3,13,1,NaN,0.146747351,0.3,5,7,5,8,5.36647229,1.800870186,79.5,109.5,40.72018458,16.63837297,0.497626777,0.479883721,10.18860415,90.47480487,0.112612613,0.398203514,4,113,131,5,7,4.1,2.25,315,0.08,4,62.5,37.5,13.5,0.602623457,0.21507263,0.367088608,0.613899614,0.033492823,0.064814815,1.364038002,1.636845602,6.12E-06,9.083333333,11.41666667,13.5,11.70833333,5.666666667,6.518626565,11.17129891,18.41451302,18.08773131,7.404301371
+,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
+,,,,,,,,,,,,,,,36,18,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/tOFF_cell_data.csv b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/tOFF_cell_data.csv
new file mode 100755
index 0000000..5a0c882
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/tOFF_cell_data.csv
@@ -0,0 +1,18 @@
+Mouse_id,Shank,Clu,Area,Layer,WVF_ratio,WVF_duration,Bl_ctr_row,Bl_ctr_col,Wh_ctr_row,Wh_ctr_col,Bl_rad,Wh_rad,Bl_lat,Wh_lat,Bl_mfr,Wh_mfr,Bl_SI,Wh_SI,Area_Overlap,Area_Union,OverlapIdx,PSTH_correln,Subfield_sep,Bl_stimc,Wh_stimc,Bl_chk_num,Wh_chk_num,Spont,Spont_cyc,Pref_ori,Pref_SF,Pref_TF,Max_rate,Max_rate_cyc,F0_pref,Sharp,CV,OSI,OSI_wnull,DSI_sev,DSI_chen,f1f0b,f1f0,Pval,F0_TF1,F0_TF2,F0_TF4,F0_TF8,F0_TF15,F1_TF1,F1_TF2,F1_TF4,F1_TF8,F1_TF15
+62,2,2,1,NaN,0.215549039041187,0.275,9,10,1,1,4.40139943774984,0,89.5,NaN,11.4607553064083,7.23264495463483,0.21704329596622,NaN,0,64.052358104672,0,NaN,NaN,171,1,6,3,0.834285714285718,0.742857142857142,135,0.32,4,42.8571428571429,9.52380952380952,2.38095238095238,0.565,0.22221025715911,0.428571428571429,0.63302752293578,0.123595505617978,0.22,0.981766325561374,1.42698593831595,0.0185094155784453,2,2.04761904761905,2.38095238095238,1.38095238095238,1,1.78402771385951,2.02612115745981,2.33753887038422,1.62320220368668,1.0868034839115
+62,3,3,1,NaN,0.218390570366483,0.275,6,9,12,2,5.60685057436199,0,66.5,NaN,21.0423061972472,10.8957804159064,0.265842714788767,NaN,0,107.319963738475,0,NaN,NaN,150,30,5,4,4.40571428571431,4.32063492063492,225,0.08,4,57.1428571428571,28.5714285714286,9,0.680555555555555,0.111063879629385,0.18125,0.427947598253275,0.155963302752294,0.26984126984127,0.921889826679877,1.7731056028748,0.00675805091247906,5.19047619047619,5.47619047619048,9,7.42857142857143,8.95238095238095,3.3031912443396,5.33219727104051,8.2970084401189,6.89913212674369,9.68056390590482
+62,3,5,1,NaN,0.197926357567497,0.275,4,8,2,4,5.57785442804055,0,75.5,NaN,27.7666141318318,13.313964158145,0.283153286968822,NaN,0,104.874698741902,0,NaN,NaN,130,56,5,10,2.79238095238097,2.64444444444444,45,0.08,2,71.4285714285714,23.8095238095238,5.57142857142857,0.757478632478632,0.0486345265149977,0.0758620689655171,0.354735152487961,0.109004739336493,0.196581196581197,0.846904980659619,1.61205882978051,0.00170523629049793,5.38095238095238,5.57142857142857,5.42857142857143,2.80952380952381,1.66666666666667,4.62218839714155,4.71847060653216,5.71702372992491,3.36081934119775,1.9838377906711
+72,2,3,1,NaN,0.219228375065107,0.45,9,20,13,6,3.18097096744501,0,171.5,NaN,12.0243263448861,10.7547248446559,0.22550229499408,NaN,0,53.9316779799677,0,NaN,NaN,351,103,10,6,0.8,0.25,315,0.64,15,37.5,12.5,2.95833333333333,0.60387323943662,0.0556996302464804,0.285067873303167,0.399239543726236,0.543478260869565,0.704225352112676,0.634369758786599,0.692926967289978,0.0188630922948254,2.29166666666667,0.375,1.33333333333333,2.83333333333333,2.95833333333333,1.18786881647464,0.352855348578216,1.28629593926641,1.88896647489622,1.87667720307702
+72,3,6,1,NaN,0.303651340054094,0.475,5,11,10,5,2.88514167165197,0,175.5,NaN,7.61894386746631,5.11392365433839,0.19984893684438,NaN,0,33.554469675194,0,NaN,NaN,185,82,10,4,0.1,0,0,0.02,15,25,8.33333333333333,0.791666666666667,0.592105263157895,0.191162783712058,0.310344827586207,0.5,0.266666666666667,0.421052631578947,1.10025474012538,1.10025474012538,0.00671478395873842,0.333333333333333,0.208333333333333,0.416666666666667,0.625,0.791666666666667,0.657769746816372,0.402959104280664,0.832019116885747,1.15589846012821,0.87103500259926
+72,3,8,1,NaN,0.204060705496308,0.45,9,14,7,7,2.94448241594942,0,91.5,NaN,23.6539725775758,5.40307877080598,0.227148188798464,NaN,0,27.2375351007142,0,NaN,NaN,243,115,6,2,0.2,0,225,0.02,15,50,37.5,5.875,0.554078014184397,0.131560480388299,0.247787610619469,0.492537313432836,0.128,0.226950354609929,1.63830360823029,1.63830360823029,0.00851226121028642,0.958333333333333,1.41666666666667,3,5.25,5.875,1.41866533560815,2.07280982427571,4.97146086971592,8.95908439080849,9.62503369835296
+72,4,11,1,NaN,0.121467735132855,0.425,2,13,8,3,2.53404897104881,0,51.5,NaN,8.72315355165845,7.84027106480733,0.250936754016357,NaN,0,37.6978353638313,0,NaN,NaN,218,44,4,9,0.1,0,90,0.02,4,37.5,12.5,1.75,0.476190476190476,0.212316096893288,0.541284403669725,0.6,0.68,0.80952380952381,1.42202602918566,1.42202602918566,0.00127938050810905,0.708333333333333,1.08333333333333,1.75,1.125,0.333333333333333,0.890715366202271,1.5105587568751,2.4885455510749,1.15101886396704,0.446888264581374
+72,4,13,1,NaN,0.165064781163654,0.4,5,9,14,1,2.72322696550062,0,102.5,NaN,38.6534392818895,6.76555110911782,0.246768639760459,NaN,0,23.2979414951246,0,NaN,NaN,149,14,7,2,0.2,0.25,45,0.04,8,37.5,16.6666666666667,3.79166666666667,0.581043956043956,0.271826494989175,0.510373443983402,0.708641975308642,0.0520231213872832,0.0989010989010988,1.53364475775656,1.64190203477467,0.00668181748704225,2.66666666666667,3.41666666666667,2.45833333333333,3.79166666666667,2.875,3.78601437983779,5.32693847854503,3.57133286263097,5.81506970649364,4.30216788976989
+74,4,26,1,NaN,0.213066995798835,0.375,5,16,7,5,7.32403078236779,0,73.5,NaN,10.6567039774559,8.21846108392401,0.226105620480015,NaN,0,187.945784597696,0,NaN,NaN,275,79,5,4,3.5,1.5,180,0.16,8,87.5,58.3333333333333,21.9583333333333,0.688242784380306,0.17555884492751,0.340966921119593,0.571192052980132,0.110642781875659,0.199240986717268,0.246462806198488,0.264533398913652,0.000186389460257988,11.1666666666667,15.625,10.4583333333333,21.9583333333333,6.54166666666667,3.93733712996556,3.61454481579941,4.59466872918316,5.41191245277514,2.77995569376241
+78,1,2,1,NaN,0.161143475730707,0.275,4,8,11,20,4.15372962981313,0,67.5,NaN,17.4880308364894,10.3810798326495,0.284779069546012,NaN,0,62.4221814402901,0,NaN,NaN,130,353,5,5,0.9,1.75,90,0.64,15,37.5,20.8333333333333,4.04166666666667,0.716494845360825,0.0814333195875668,0.371024734982332,0.484057971014493,0.515625,0.680412371134021,0.639572350375804,1.12797305429914,0.0248302824856542,2.625,3.29166666666667,2.04166666666667,3.58333333333333,4.04166666666667,1.86726284810314,1.71094863790475,1.19978031351727,2.27556720947382,2.58493824943554
+60,3,13,1,NaN,0.327845840428655,0.275,5,11,5,12,13.7959664558697,0,70.5,NaN,21.4829912811113,26.345156682018,0.271986804553519,NaN,0,597.935215689743,0,NaN,NaN,185,203,5,5,3.25333333333332,3.28148148148148,225,0.08,15,66.6666666666667,27.7777777777778,6.66666666666667,0.8125,0.106649406887618,0.126760563380282,0.413249211356467,0.0714285714285714,0.133333333333333,0.380618892553829,0.749577687742771,0.0225767012180496,3.44444444444445,3.22222222222222,4.66666666666667,3.83333333333333,6.66666666666667,1.4754221906457,2.22143660105183,2.05276157715755,2.03956046517732,2.5374592836922
+74,4,27,1,NaN,0.171967475142122,0.425,6,13,5,14,7.83326780213671,0,70.5,NaN,45.5289049512588,48.0564686452002,0.284859348176703,NaN,0,192.768390563159,0,NaN,NaN,222,239,5,7,4.4,5,180,0.04,4,50,33.3333333333333,13.375,0.470015576323987,0.229015558550861,0.42825361512792,0.610900832702498,0.206766917293233,0.342679127725857,0.37341006160471,0.596341441667224,0.0273395799205358,5.91666666666667,7.95833333333333,13.375,6.58333333333333,5.41666666666667,5.24514293869152,3.83526909785319,4.994359573963,3.69495923488874,1.75006614065008
+74,4,30,1,NaN,0.21589970754208,0.375,6,18,6,18,9.14964940445724,0,69.5,NaN,18.8875477576379,20.901651232031,0.23827178541323,NaN,0,263.001835186946,0,NaN,NaN,312,312,5,5,5.5,3.75,135,0.04,1,62.5,29.1666666666667,9.83333333333334,0.511122881355932,0.275602863630443,0.568106312292359,0.709821428571429,0.232375979112272,0.377118644067797,0.456952101507818,0.738634903807158,0.0336237550414378,9.83333333333334,4,4.58333333333333,5.20833333333333,1.45833333333333,4.49336233149354,1.84107264032154,1.91663363575812,3.67203571593219,1.5244219695287
+60,2,17,1,NaN,0.210579601832644,0.275,6,11,6,11,3.94276800806811,0,64.5,NaN,68.3907979469579,33.0741293106546,0.291222198592647,NaN,0,48.8373759037743,0,NaN,NaN,186,186,5,10,10.8777777777778,10.7888888888889,225,0.02,4,116.666666666667,66.6666666666667,16.6111111111111,0.70443143812709,0.110744129662319,0.202010050251256,0.46747149564051,0.0932358318098721,0.17056856187291,1.61675665077938,4.61269311625032,0.00359490460352649,12,10.3888888888889,16.6111111111111,13.6111111111111,14.3333333333333,17.0709984900058,15.2742718731787,26.8561243657241,21.8665174082843,23.9662722866475
+62,2,11,1,NaN,0.16565594104601,0.275,8,8,8,8,5.33414384970586,0,73.5,NaN,36.9172103761967,20.7529629579445,0.279188248021387,NaN,0,89.3880204302739,0,NaN,NaN,134,134,5,11,3.61333333333336,3.39365079365079,135,0.08,8,42.8571428571429,33.3333333333333,6.95238095238095,0.780821917808219,0.0658077460005423,0.17741935483871,0.441095890410959,0.110266159695818,0.198630136986302,1.55334081094115,3.03462656196353,7.45592685020831e-06,2.90476190476191,4.14285714285714,6.80952380952381,6.95238095238095,3.57142857142857,2.59084283971845,5.36811590918796,10.7597210527297,10.7994170665432,4.90613730007544
+62,3,4,1,NaN,0.137829754624094,0.275,6,10,6,10,4.15633140246399,0,70.5,NaN,39.7539863338688,27.282756259574,0.268876729571066,NaN,0,54.2712981183806,0,NaN,NaN,168,168,5,11,4.02666666666669,3.87936507936508,270,0.16,2,71.4285714285714,33.3333333333333,9.95238095238095,0.633771929824561,0.220457590731079,0.312401883830455,0.599268069533394,-0.0434782608695652,-0.0909090909090909,0.971607039042687,1.59225722289536,0.00223569037496728,5.28571428571429,9.95238095238095,7.38095238095238,9.76190476190476,6.61904761904762,3.26139269755511,9.6698033885677,10.6971171002793,12.7083262519171,5.85903230667237
+62,3,9,1,NaN,0.149611066777769,0.275,4,9,4,9,4.26417261458555,0,71.5,NaN,54.7282727298168,33.5884915562644,0.25687180983123,NaN,0,57.1241072810489,0,NaN,NaN,148,148,5,11,3.04952380952383,3.14285714285714,225,0.08,4,71.4285714285714,38.0952380952381,14.9047619047619,0.579472843450479,0.168516811168717,0.306889352818372,0.543956043956044,0.113879003558719,0.204472843450479,0.980894394884076,1.24299573116889,0.00103948076226042,12.1428571428571,10.3809523809524,14.9047619047619,12.2380952380952,5.71428571428572,9.28207380412442,13.8969989848769,14.6199974094627,13.1194669863453,7.60851795281775
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/tON_cell_data.csv b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/tON_cell_data.csv
new file mode 100755
index 0000000..c4713a6
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/tON_cell_data.csv
@@ -0,0 +1,2 @@
+Mouse_id,Shank,Clu,Area,Layer,WVF_ratio,WVF_duration,Bl_ctr_row,Bl_ctr_col,Wh_ctr_row,Wh_ctr_col,Bl_rad,Wh_rad,Bl_lat,Wh_lat,Bl_mfr,Wh_mfr,Bl_SI,Wh_SI,Area_Overlap,Area_Union,OverlapIdx,PSTH_correln,Subfield_sep,Bl_stimc,Wh_stimc,Bl_chk_num,Wh_chk_num,Spont,Spont_cyc,Pref_ori,Pref_SF,Pref_TF,Max_rate,Max_rate_cyc,F0_pref,Sharp,CV,OSI,OSI_wnull,DSI_sev,DSI_chen,f1f0b,f1f0,Pval,F0_TF1,F0_TF2,F0_TF4,F0_TF8,F0_TF15,F1_TF1,F1_TF2,F1_TF4,F1_TF8,F1_TF15
+72,3,27,1,NaN,0.132502493887977,0.475,12,11,7,15,0,3.32064054899067,NaN,143.5,12.0701457420764,16.5396350519869,NaN,0.298956651109785,0,53.6599818692374,0,NaN,NaN,192,259,2,9,2.6,3.75,0,0.32,8,62.5,29.1666666666667,10.9166666666667,0.508587786259542,0.227164140602856,0.536656891495601,0.657266811279826,0.3717277486911,0.541984732824427,0.385321305601915,0.586942918998266,0.00329535217373217,9.25,8.83333333333334,10.7083333333333,10.9166666666667,5.45833333333333,3.86117629231897,4.97689599926721,7.40574622372637,4.20642425282091,2.24653624184082
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/trans_sus_cells_v3.csv b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/trans_sus_cells_v3.csv
new file mode 100755
index 0000000..a18e315
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cell_metrics/trans_sus_cells_v3.csv
@@ -0,0 +1,8 @@
+Mouse_id,Shank,Clu,Area,Layer,WVF_ratio,WVF_duration,Bl_ctr_row,Bl_ctr_col,Wh_ctr_row,Wh_ctr_col,Bl_rad,Wh_rad,Bl_lat,Wh_lat,Bl_mfr,Wh_mfr,Bl_SI,Wh_SI,Area_Overlap,Area_Union,OverlapIdx,PSTH_correln,Subfield_sep,Bl_stimc,Wh_stimc,Bl_chk_num,Wh_chk_num,Spont,Spont_cyc,Pref_ori,Pref_SF,Pref_TF,Max_rate,Max_rate_cyc,F0_pref,Sharp,CV,OSI,OSI_wnull,DSI_sev,DSI_chen,f1f0b,f1f0,Pval,F0_TF1,F0_TF2,F0_TF4,F0_TF8,F0_TF15,F1_TF1,F1_TF2,F1_TF4,F1_TF8,F1_TF15
+74,1,16,1,NaN,0.180751985,0.175,8,17,8,18,3.49511124,4.859013414,80.5,95.5,34.18268004,35.83521367,0.345587999,0.460766164,37.83368342,74.71643045,0.506363636,0.607703791,4,296,314,5,6,5,6.25,0,0.16,1,125,79.16666667,36,0.50072338,0.258886811,0.56238698,0.691915977,0.300225734,0.461805556,0.245893542,0.297551849,6.79E-07,36,21.04166667,22.41666667,23.54166667,12.04166667,8.852167517,7.717443623,8.798522874,16.1074593,8.09474585
+74,4,27,1,NaN,0.171967475,0.425,6,13,5,14,7.833267802,6.991768818,70.5,109.5,45.52890495,48.05646865,0.284859348,0.552612747,142.8442302,203.5003869,0.701935915,0.00279346,5.656854249,222,239,5,7,4.4,5,180,0.04,4,50,33.33333333,13.375,0.470015576,0.229015559,0.428253615,0.610900833,0.206766917,0.342679128,0.373410062,0.596341442,0.02733958,5.916666667,7.958333333,13.375,6.583333333,5.416666667,5.245142939,3.835269098,4.994359574,3.694959235,1.750066141
+75,1,2,1,NaN,0.179020336,0.3,7,12,7,13,5.870579009,5.231830094,64.5,87.5,14.71608404,10.38016037,0.416686052,0.30332155,31.99221704,162.2705021,0.197153621,0.435460466,4,205,223,5,6,1,0.5,270,0.16,2,37.5,12.5,3.291666667,0.674157303,0.064616927,0.244094488,0.451428571,0.244094488,0.392405063,1.063534332,1.254018092,0.001120253,3.166666667,3.291666667,3.25,1.75,2.291666667,2.326214764,3.500800508,2.853026994,2.125669008,2.369890296
+78,3,7,1,NaN,0.25704279,0.3,6,6,8,5,8.514390322,3.457795816,69.5,68.5,24.97441569,17.92925808,0.255980641,0.343553303,14.87536206,250.4358901,0.059397884,0.774372866,8.94427191,96,80,5,5,3.3,2.5,225,0.02,15,50,25,5.708333333,0.682432432,0.051506564,0.033962264,0.37254902,-0.021428571,-0.04379562,1.492219197,2.654987403,0.005427301,3.208333333,3.458333333,3.791666667,5.125,5.708333333,4.43147768,4.621665853,5.537427652,8.081975,8.518084585
+78,3,11,1,NaN,0.2538379,0.25,6,9,6,10,5.969189721,2.135885692,59.5,83.5,78.43921026,33.31181738,0.268908037,0.485015497,1.290556526,124.9802109,0.010326087,0.398062208,4,150,168,4,6,11.2,13,315,0.04,8,75,50,21.83333333,0.654341603,0.112935538,0.195664575,0.454123113,0.116080937,0.208015267,1.650983868,4.080733712,0.000335046,10.29166667,13.125,19.16666667,21.83333333,17.29166667,10.79237938,19.16833868,29.71094753,36.04648112,25.37906864
+,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
+,,,,,,,,,,,,,,,46,30,,,,,,,,,,,,5,,,,,,,,,,,,,,,,,,,,,,,,,,
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cellmodel.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cellmodel.py
new file mode 100644
index 0000000..bc64495
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cellmodel.py
@@ -0,0 +1,358 @@
+#import isee_engine
+import os
+import itertools
+import matplotlib.pyplot as plt
+import numpy as np
+from . import utilities as util
+import importlib
+from .kernel import Kernel2D, Kernel3D
+from .linearfilter import SpatioTemporalFilter
+import json
+from .spatialfilter import GaussianSpatialFilter
+from .transferfunction import ScalarTransferFunction
+from .temporalfilter import TemporalFilterCosineBump
+from .cursor import LNUnitCursor, MultiLNUnitCursor
+from .movie import Movie
+from .lgnmodel1 import LGNModel, heat_plot
+from .transferfunction import MultiTransferFunction, ScalarTransferFunction
+from .lnunit import LNUnit, MultiLNUnit
+from sympy.abc import x as symbolic_x
+from sympy.abc import y as symbolic_y
+
+
+
+class OnUnit(LNUnit):
+
+ def __init__(self, linear_filter, transfer_function):
+ assert linear_filter.amplitude > 0
+ super(OnUnit, self).__init__(linear_filter, transfer_function)
+
+class OffUnit(LNUnit):
+
+ def __init__(self, linear_filter, transfer_function):
+ assert linear_filter.amplitude < 0
+ super(OffUnit, self).__init__(linear_filter, transfer_function)
+
+class LGNOnOffCell(MultiLNUnit):
+ """A cell model for a OnOff cell"""
+ def __init__(self, on_filter, off_filter, transfer_function=MultiTransferFunction((symbolic_x, symbolic_y), 'Heaviside(x)*(x)+Heaviside(y)*(y)')):
+ """Summary
+
+ :param on_filter:
+ :param off_filter:
+ :param transfer_function:
+ """
+ self.on_filter = on_filter
+ self.off_filter = off_filter
+ self.on_unit = OnUnit(self.on_filter, ScalarTransferFunction('s'))
+ self.off_unit = OffUnit(self.off_filter, ScalarTransferFunction('s'))
+ super(LGNOnOffCell, self).__init__([self.on_unit, self.off_unit], transfer_function)
+
+class TwoSubfieldLinearCell(MultiLNUnit):
+
+ def __init__(self, dominant_filter, nondominant_filter,subfield_separation=10, onoff_axis_angle=45, dominant_subfield_location=(30,40),
+ transfer_function = MultiTransferFunction((symbolic_x, symbolic_y), 'Heaviside(x)*(x)+Heaviside(y)*(y)')):
+
+ self.subfield_separation = subfield_separation
+ self.onoff_axis_angle = onoff_axis_angle
+ self.dominant_subfield_location = dominant_subfield_location
+ self.dominant_filter = dominant_filter
+ self.nondominant_filter = nondominant_filter
+ self.transfer_function= transfer_function
+
+ self.dominant_unit = LNUnit(self.dominant_filter, ScalarTransferFunction('s'), amplitude=self.dominant_filter.amplitude)
+ self.nondominant_unit = LNUnit(self.nondominant_filter, ScalarTransferFunction('s'), amplitude=self.dominant_filter.amplitude)
+
+ super(TwoSubfieldLinearCell, self).__init__([self.dominant_unit, self.nondominant_unit], self.transfer_function)
+
+ self.dominant_filter.spatial_filter.translate = self.dominant_subfield_location
+ hor_offset = np.cos(self.onoff_axis_angle*np.pi/180.)*self.subfield_separation + self.dominant_subfield_location[0]
+ vert_offset = np.sin(self.onoff_axis_angle*np.pi/180.)*self.subfield_separation+ self.dominant_subfield_location[1]
+ rel_translation = (hor_offset,vert_offset)
+ self.nondominant_filter.spatial_filter.translate = rel_translation
+
+
+class LGNOnCell(object):
+
+ def __init__(self, **kwargs):
+
+ self.position = kwargs.pop('position', None)
+ self.weights = kwargs.pop('weights', None)
+ self.kpeaks = kwargs.pop('kpeaks', None)
+ self.amplitude = kwargs.pop('amplitude', None)
+ self.sigma = kwargs.pop('sigma', None)
+ self.transfer_function_str = kwargs.pop('transfer_function_str', 's') # 'Heaviside(s)*s')
+ self.metadata = kwargs.pop('metadata', {})
+
+ temporal_filter = TemporalFilterCosineBump(self.weights, self.kpeaks)
+ spatial_filter = GaussianSpatialFilter(translate=self.position, sigma=self.sigma, origin=(0,0)) # all distances measured from BOTTOM LEFT
+ spatiotemporal_filter = SpatioTemporalFilter(spatial_filter, temporal_filter, amplitude=self.amplitude)
+ transfer_function = ScalarTransferFunction(self.transfer_function_str)
+ self.unit = OnUnit(spatiotemporal_filter, transfer_function)
+
+class LGNOffCell(OffUnit):
+
+ def __init__(self, **kwargs):
+
+ lattice_unit_center = kwargs.pop('lattice_unit_center', None)
+ weights = kwargs.pop('weights', None)
+ kpeaks = kwargs.pop('kpeaks', None)
+ amplitude = kwargs.pop('amplitude', None)
+ sigma = kwargs.pop('sigma', None)
+ width = kwargs.pop('width', 5)
+ transfer_function_str = kwargs.pop('transfer_function_str', 'Heaviside(s)*s')
+
+ dxi = np.random.uniform(-width*1./2,width*1./2)
+ dyi = np.random.uniform(-width*1./2,width*1./2)
+ temporal_filter = TemporalFilterCosineBump(weights, kpeaks)
+ spatial_filter = GaussianSpatialFilter(translate=(dxi,dyi), sigma=sigma, origin=lattice_unit_center) # all distances measured from BOTTOM LEFT
+ spatiotemporal_filter = SpatioTemporalFilter(spatial_filter, temporal_filter, amplitude=amplitude)
+ transfer_function = ScalarTransferFunction(transfer_function_str)
+ super(LGNOnCell, self).__init__(spatiotemporal_filter, transfer_function)
+
+if __name__ == "__main__":
+
+ movie_file = '/data/mat/iSee_temp_shared/movies/TouchOfEvil.npy'
+ m_data = np.load(movie_file, 'r')
+ m = Movie(m_data[1000:], frame_rate=30.)
+
+ # Create second cell:
+ transfer_function = ScalarTransferFunction('s')
+ temporal_filter = TemporalFilterCosineBump((.4,-.3), (20,60))
+ cell_list = []
+ for xi in np.linspace(0,m.data.shape[2], 5):
+ for yi in np.linspace(0,m.data.shape[1], 5):
+ spatial_filter_on = GaussianSpatialFilter(sigma=(2,2), origin=(0,0), translate=(xi, yi))
+ on_linear_filter = SpatioTemporalFilter(spatial_filter_on, temporal_filter, amplitude=20)
+ spatial_filter_off = GaussianSpatialFilter(sigma=(4,4), origin=(0,0), translate=(xi, yi))
+ off_linear_filter = SpatioTemporalFilter(spatial_filter_off, temporal_filter, amplitude=-20)
+ on_off_cell = LGNOnOffCell(on_linear_filter, off_linear_filter)
+ cell_list.append(on_off_cell)
+
+ lgn = LGNModel(cell_list) #Here include a list of all cells
+ y = lgn.evaluate(m, downsample=100) #Does the filtering + non-linearity on movie object m
+ heat_plot(y, interpolation='none', colorbar=True)
+
+
+
+
+
+#
+# def imshow(self, ii, image_shape, fps, ax=None, show=True, relative_spatial_location=(0,0)):
+#
+# if ax is None:
+# _, ax = plt.subplots(1,1)
+#
+# curr_kernel = self.get_spatio_temporal_kernel(image_shape, fps, relative_spatial_location=relative_spatial_location)
+#
+# cax = curr_kernel.imshow(ii, ax=ax, show=False)
+#
+# if show == True:
+# plt.show()
+#
+# return ax
+#
+#
+# class OnOffCellModel(CellModel):
+#
+# def __init__(self, dc_offset=0, on_subfield=None, off_subfield=None, on_weight = 1, off_weight = -1, t_max=None):
+#
+# super(self.__class__, self).__init__(dc_offset, t_max)
+#
+# if isinstance(on_subfield, dict):
+# curr_module, curr_class = on_subfield.pop('class')
+# self.on_subfield = getattr(importlib.import_module(curr_module), curr_class)(**on_subfield)
+# else:
+# self.on_subfield = on_subfield
+#
+# super(self.__class__, self).add_subfield(on_subfield, on_weight)
+#
+# if isinstance(off_subfield, dict):
+# curr_module, curr_class = off_subfield.pop('class')
+# self.off_subfield = getattr(importlib.import_module(curr_module), curr_class)(**off_subfield)
+# else:
+# self.off_subfield = off_subfield
+#
+# super(self.__class__, self).add_subfield(off_subfield, off_weight)
+#
+#
+# def to_dict(self):
+#
+# return {'dc_offset':self.dc_offset,
+# 'on_subfield':self.on_subfield.to_dict(),
+# 'off_subfield':self.off_subfield.to_dict(),
+# 't_max':self.t_max,
+# 'class':(__name__, self.__class__.__name__)}
+#
+# class SingleSubfieldCellModel(CellModel):
+#
+# def __init__(self, subfield, weight = 1, dc_offset=0, t_max=None):
+#
+# super(SingleSubfieldCellModel, self).__init__(dc_offset, t_max)
+#
+# if isinstance(subfield, dict):
+# curr_module, curr_class = subfield.pop('class')
+# subfield = getattr(importlib.import_module(curr_module), curr_class)(**subfield)
+#
+# super(self.__class__, self).add_subfield(subfield, weight)
+#
+# def to_dict(self):
+#
+# assert len(self.subfield_list) == 1
+# subfield = self.subfield_list[0]
+# weight = self.subfield_weight_dict[subfield]
+#
+# return {'dc_offset':self.dc_offset,
+# 'subfield':subfield.to_dict(),
+# 'weight':weight,
+# 't_max':self.t_max,
+# 'class':(__name__, self.__class__.__name__)}
+#
+# class OnCellModel(SingleSubfieldCellModel):
+#
+# def __init__(self, on_subfield, weight = 1, dc_offset=0 , t_max=None):
+# assert weight > 0
+# super(OnCellModel, self).__init__(on_subfield, weight, dc_offset, t_max)
+#
+# def to_dict(self):
+# data_dict = super(OnCellModel, self).to_dict()
+# data_dict['on_subfield'] = data_dict.pop('subfield')
+# return data_dict
+#
+# class OffCellModel(SingleSubfieldCellModel):
+#
+# def __init__(self, on_subfield, weight = -1, dc_offset=0 , t_max=None):
+# assert weight < 0
+# super(OffCellModel, self).__init__(on_subfield, weight, dc_offset, t_max)
+#
+# def to_dict(self):
+# data_dict = super(OffCellModel, self).to_dict()
+# data_dict['off_subfield'] = data_dict.pop('subfield')
+# return data_dict
+
+
+# class OffCellModel(CellModel):
+#
+# def __init__(self, off_subfield, dc_offset=0, off_weight = 1, t_max=None):
+#
+# assert off_weight < 0.
+# self.weight = off_weight
+#
+#
+#
+#
+# super(self.__class__, self).__init__(dc_offset, t_max)
+#
+# if isinstance(on_subfield, dict):
+# curr_module, curr_class = on_subfield.pop('class')
+# self.subfield = getattr(importlib.import_module(curr_module), curr_class)(**on_subfield)
+# else:
+# self.subfield = on_subfield
+#
+# super(self.__class__, self).add_subfield(self.subfield, self.weight)
+#
+# def to_dict(self):
+#
+# return {'dc_offset':self.dc_offset,
+# 'on_subfield':self.subfield.to_dict(),
+# 'on_weight':self.weight,
+# 't_max':self.t_max,
+# 'class':(__name__, self.__class__.__name__)}
+
+
+
+
+
+
+# if __name__ == "__main__":
+#
+# t = np.arange(0,.5,.001)
+# example_movie = movie.Movie(file_name=os.path.join(isee_engine.movie_directory, 'TouchOfEvil.npy'), frame_rate=30.1, memmap=True)
+#
+# temporal_filter_on = TemporalFilterExponential(weight=1, tau=.05)
+# on_subfield = Subfield(scale=(5,15), weight=.5, rotation=30, temporal_filter=temporal_filter_on, translation=(0,0))
+#
+# temporal_filter_off = TemporalFilterExponential(weight=2, tau=.01)
+# off_subfield = Subfield(scale=(5,15), weight=.5, rotation=-30, temporal_filter=temporal_filter_off)
+#
+# cell = OnOffCellModel(on_subfield=on_subfield, off_subfield=off_subfield, dc_offset=0., t_max=.5)
+# curr_kernel = cell.get_spatio_temporal_kernel((100,150), 30.1)
+# curr_kernel.imshow(0)
+#
+# print cell.to_dict()
+
+
+
+# f = cell.get_spatio_temporal_filter(example_movie.movie_data.shape[1:], t,threshold=.5)
+# print len(f.t_ind_list)
+#
+#
+
+# for ii in range(example_movie.number_of_frames-curr_filter.t_max):
+# print ii, example_movie.number_of_frames, curr_filter.map(example_movie, ii)
+
+
+# off_subfield = Subfield(scale=(15,15), weight=.2, translation=(30,30))
+
+
+#
+# curr_filter = cell.get_spatio_temporal_filter((100,150))
+#
+
+#
+# # print touch_of_evil(40.41, mask=m)
+# print curr_filter.t_max
+# for ii in range(example_movie.number_of_frames-curr_filter.t_max):
+# print ii, example_movie.number_of_frames, curr_filter.map(example_movie, ii)
+
+# cell.visualize_spatial_filter((100,150))
+# show_volume(spatio_temporal_filter, vmin=spatio_temporal_filter.min(), vmax=spatio_temporal_filter.max())
+
+
+
+# def get_spatial_filter(self, image_shape, relative_spatial_location=(0,0), relative_threshold=default_relative_threshold):
+#
+# # Initialize:
+# translation_matrix = util.get_translation_matrix(relative_spatial_location)
+#
+# # On-subunit:
+# on_filter_pre_spatial = self.on_subfield.get_spatial_filter(image_shape)
+# on_filter_spatial = util.apply_transformation_matrix(on_filter_pre_spatial, translation_matrix)
+#
+# # Off-subunit:
+# off_filter_pre_spatial = self.off_subfield.get_spatial_filter(image_shape)
+# off_filter_spatial = util.apply_transformation_matrix(off_filter_pre_spatial, translation_matrix)
+#
+# spatial_filter = on_filter_spatial - off_filter_spatial
+#
+# tmp = np.abs(spatial_filter)
+# spatial_filter[np.where(tmp/tmp.max() < relative_threshold )] = 0
+#
+# return spatial_filter
+
+# kernel = float(self.dc_offset)/len(nonzero_ind_tuple[0])+spatio_temporal_filter[nonzero_ind_tuple]
+
+# def rectifying_filter_factory(kernel, movie, dc_offset=0):
+#
+# def rectifying_filter(t):
+#
+# fi = movie.frame_rate*float(t)
+# fim, fiM = np.floor(fi), np.ceil(fi)
+#
+# print t, fim, fiM
+#
+# try:
+# s1 = (movie.movie_data[int(fim)+kernel.t_ind_list, kernel.row_ind_list, kernel.col_ind_list]*kernel.kernel).sum()
+# s2 = (movie.movie_data[int(fiM)+kernel.t_ind_list, kernel.row_ind_list, kernel.col_ind_list]*kernel.kernel).sum()
+# except IndexError:
+# return None
+#
+# # Linear interpolation:
+# s_pre = dc_offset + s1*((1-(fi-fim))*.5) + s2*((fi-fim)*.5)
+#
+# if s_pre < 0:
+# return 0
+# else:
+# return float(s_pre)
+#
+# return rectifying_filter
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cursor.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cursor.py
new file mode 100644
index 0000000..8406fd1
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/cursor.py
@@ -0,0 +1,266 @@
+from .movie import Movie
+import numpy as np
+from .linearfilter import SpatioTemporalFilter
+from .spatialfilter import GaussianSpatialFilter
+from .temporalfilter import TemporalFilterCosineBump
+from .utilities import convert_tmin_tmax_framerate_to_trange
+import matplotlib.pyplot as plt
+from .kernel import Kernel3D
+import scipy.signal as spsig
+import time
+
+class KernelCursor(object):
+
+
+ def __init__(self, kernel, movie):
+
+ self.movie = movie
+ self.kernel = kernel
+ self.cache = {}
+
+ # print self.kernel.t_range.min(), self.kernel.t_range.max(), type(kernel), len(self.kernel)
+
+ # This ensures that the kernel frame rate matches the movie frame rate:
+ np.testing.assert_almost_equal(np.diff(self.kernel.t_range), np.ones_like(self.kernel.t_range[1:])*(1./movie.frame_rate))
+
+ @property
+ def row_range(self):
+ return self.movie.row_range
+
+ @property
+ def col_range(self):
+ return self.movie.col_range
+
+ @property
+ def t_range(self):
+ return self.movie.t_range
+
+ @property
+ def frame_rate(self):
+ return self.movie.frame_rate
+
+ def evaluate(self, t_min=None, t_max=None, downsample=1):#:#, show=True, ax=None, plot=False, save_file_name=None, plotstyle='b-'):
+
+
+ # print 'EVALUATE'
+ if t_max is None:
+ t_max = self.t_range[-1]
+
+ if t_min is None:
+ t_min = self.t_range[0]
+
+ t_range = convert_tmin_tmax_framerate_to_trange(t_min, t_max, self.movie.frame_rate)[::int(downsample)]
+ y_vals = np.array([self(t) for t in t_range])
+
+ return t_range, y_vals
+
+ def __call__(self, t):
+
+
+
+ if t < self.t_range[0] or t > self.t_range[-1]:
+ curr_rate = 0
+ else:
+# print 'zero'
+
+ ti = t*self.frame_rate
+ til, tir = int(np.floor(ti)), int(np.ceil(ti))
+
+ tl, tr = float(til)/self.frame_rate, float(tir)/self.frame_rate
+ if np.abs(tl-t)<1e-12:
+ curr_rate = self.apply_dot_product(til)
+ # print 'a'
+
+ elif np.abs(tr-t)<1e-12:
+ curr_rate = self.apply_dot_product(tir)
+ # print 'b'
+ else:
+ wa, wb = (1-(t-tl)/(tr-tl)), (1-(tr-t)/(tr-tl))
+ cl = self.apply_dot_product(til)
+ cr = self.apply_dot_product(tir)
+ curr_rate = cl*wa+cr*wb
+ # print 'c'
+
+ if np.isnan(curr_rate):
+ assert RuntimeError
+
+ return curr_rate
+
+ def apply_dot_product(self, ti_offset):
+
+ try:
+ return self.cache[ti_offset]
+
+ except KeyError:
+ t_inds = self.kernel.t_inds + ti_offset + 1 # Offset by one nhc 14 Apr '17
+ min_ind, max_ind = 0, self.movie.data.shape[0]
+ allowed_inds = np.where(np.logical_and(min_ind <= t_inds, t_inds < max_ind))
+ t_inds = t_inds[allowed_inds]
+ row_inds = self.kernel.row_inds[allowed_inds]
+ col_inds = self.kernel.col_inds[allowed_inds]
+ kernel_vector = self.kernel.kernel[allowed_inds]
+ result = np.dot(self.movie[t_inds, row_inds, col_inds],kernel_vector)
+ self.cache[ti_offset ] = result
+ return result
+
+class FilterCursor(KernelCursor):
+
+ def __init__(self, spatiotemporal_filter, movie, threshold=0):
+
+ self.spatiotemporal_filter = spatiotemporal_filter
+ kernel = self.spatiotemporal_filter.get_spatiotemporal_kernel(movie.row_range, movie.col_range, t_range=movie.t_range, threshold=threshold, reverse=True)
+
+ super(FilterCursor, self).__init__(kernel, movie)
+
+class LNUnitCursor(KernelCursor):
+
+ def __init__(self, lnunit, movie, threshold=0):
+
+ # print 'LNUnitCursor'
+
+ self.lnunit = lnunit
+
+ kernel = lnunit.get_spatiotemporal_kernel(movie.row_range, movie.col_range, movie.t_range, reverse=True, threshold=threshold)
+
+ kernel.apply_threshold(threshold)
+
+ super(LNUnitCursor, self).__init__(kernel, movie)
+
+ def __call__(self, t):
+ return self.lnunit.transfer_function(super(LNUnitCursor, self).__call__(t))
+
+class MultiLNUnitCursor(object):
+
+ def __init__(self, multi_lnunit, movie, threshold=0):
+
+ self.multi_lnunit = multi_lnunit
+ self.lnunit_cursor_list = [LNUnitCursor(lnunit, movie, threshold=threshold) for lnunit in multi_lnunit.lnunit_list]
+ self.movie = movie
+
+ def evaluate(self, **kwargs):
+
+# print len(self.lnunit_cursor_list)
+# for ii, x in enumerate(self.lnunit_cursor_list):
+#
+# print ii, self.multi_lnunit, self.multi_lnunit.transfer_function, x
+# print ii, x.evaluate(**kwargs), kwargs
+# print 'done'
+# # print lnunit, movie, curr_cursor
+
+
+
+ multi_e = [unit_cursor.evaluate(**kwargs) for unit_cursor in self.lnunit_cursor_list]
+ t_list, y_list = zip(*multi_e)
+
+# plt.figure()
+# plt.plot(t_list[0],y_list[0])
+# plt.plot(t_list[0],y_list[1],'r')
+# plt.show()
+
+ #sys.exit()
+
+# print len(y_list)
+
+ return t_list[0], self.multi_lnunit.transfer_function(*y_list)
+
+class MultiLNUnitMultiMovieCursor(MultiLNUnitCursor):
+
+ def __init__(self, multi_lnunit, movie_list, threshold=0.):
+
+ assert len(multi_lnunit.lnunit_list) == len(movie_list)
+
+ self.multi_lnunit = multi_lnunit
+ self.lnunit_movie_list = movie_list
+ self.lnunit_cursor_list = [lnunit.get_cursor(movie, threshold=threshold) for lnunit, movie in zip(multi_lnunit.lnunit_list, movie_list)]
+# for lnunit, movie, curr_cursor in zip(multi_lnunit.lnunit_list, movie_list, self.lnunit_cursor_list):
+# print lnunit, movie, curr_cursor
+
+class SeparableKernelCursor(object):
+
+ def __init__(self, spatial_kernel, temporal_kernel, movie):
+ '''Assumes temporal kernel is not reversed'''
+
+ self.movie = movie
+ self.spatial_kernel = spatial_kernel
+ self.temporal_kernel = temporal_kernel
+
+ def evaluate(self, threshold=0):
+
+ full_spatial_kernel = np.array([self.spatial_kernel.full()])
+ full_temporal_kernel = self.temporal_kernel.full()
+
+ nonzero_inds = np.where(np.abs(full_spatial_kernel[0,:,:])>=threshold)
+ rm, rM = nonzero_inds[0].min(), nonzero_inds[0].max()
+ cm, cM = nonzero_inds[1].min(), nonzero_inds[1].max()
+
+ convolution_answer_sep_spatial = (self.movie.data[:,rm:rM+1, cm:cM+1] * full_spatial_kernel[:,rm:rM+1, cm:cM+1]).sum(axis=1).sum(axis=1)
+ sig_tmp = np.zeros(len(full_temporal_kernel) + len(convolution_answer_sep_spatial) - 1)
+ sig_tmp[len(full_temporal_kernel)-1:] = convolution_answer_sep_spatial
+ convolution_answer_sep = spsig.convolve(sig_tmp, full_temporal_kernel[::-1], mode='valid')
+ t = np.arange(len(convolution_answer_sep))/self.movie.frame_rate
+ return t, convolution_answer_sep
+
+
+class SeparableSpatioTemporalFilterCursor(SeparableKernelCursor):
+
+ def __init__(self, spatiotemporal_filter, movie):
+
+ self.spatial_filter = spatiotemporal_filter.spatial_filter
+ self.temporal_filter = spatiotemporal_filter.temporal_filter
+
+ spatial_kernel = self.spatial_filter.get_kernel(movie.row_range, movie.col_range, threshold=-1)
+ temporal_kernel = self.temporal_filter.get_kernel(t_range=movie.t_range, threshold=0, reverse=True)
+ spatial_kernel.kernel *= spatiotemporal_filter.amplitude
+
+ super(SeparableSpatioTemporalFilterCursor, self).__init__(spatial_kernel,
+ temporal_kernel,
+ movie)
+
+
+class SeparableLNUnitCursor(SeparableSpatioTemporalFilterCursor):
+ def __init__(self, lnunit, movie):
+ self.lnunit = lnunit
+
+ super(SeparableLNUnitCursor, self).__init__(self.lnunit.linear_filter, movie)
+
+ def evaluate(self, downsample = 1):
+
+ assert downsample == 1
+
+ t, y = super(SeparableLNUnitCursor, self).evaluate()
+
+ return t, [self.lnunit.transfer_function(yi) for yi in y]
+
+class SeparableMultiLNUnitCursor(object):
+
+ def __init__(self, multilnunit, movie):
+
+ self.multilnunit = multilnunit
+
+ self.lnunit_cursor_list = []
+ for lnunit in self.multilnunit.lnunit_list:
+ self.lnunit_cursor_list.append(SeparableLNUnitCursor(lnunit, movie))
+
+ def evaluate(self, *args, **kwargs):
+
+ assert kwargs.get('downsample', 1) == 1
+
+ y_list = []
+ for cursor in self.lnunit_cursor_list:
+ t, y = cursor.evaluate(*args, **kwargs)
+ y_list.append(y)
+
+ return t, self.multilnunit.transfer_function(*y_list)
+
+# if __name__ == "__main__":
+# spatial_filter_1 = GaussianSpatialFilter(sigma=(2.,2.), amplitude=10)
+# temporal_filter = TemporalFilterCosineBump((.4,-.3), (40,80))
+# curr_filter = SpatioTemporalFilter(spatial_filter_1, temporal_filter)
+#
+# movie_file = '/data/mat/iSee_temp_shared/movies/TouchOfEvil.npy'
+# m_data = np.load(movie_file, 'r')
+# movie = Movie(m_data[:,:,:], frame_rate=30.)
+# cursor = FilterCursor(curr_filter, movie, threshold=-1)
+# cursor.evaluate()
+
+
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/fitfuns.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/fitfuns.py
new file mode 100644
index 0000000..5b67919
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/fitfuns.py
@@ -0,0 +1,190 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Thu Nov 13 17:07:50 2014
+
+@author: rami
+"""
+import os
+from math import *
+import numpy as np
+import numpy.fft as npft
+from random import *
+import scipy.io as sio
+#import statsmodels.api as sm
+from scipy import stats
+import matplotlib.pyplot as plt
+
+def makeFitStruct_GLM(dtsim,kbasprs,nkt,flag_exp):
+
+ gg = {}
+ gg['k'] = []
+ gg['dc'] = 0
+ gg['kt'] = np.zeros((nkt,1))
+ gg['ktbas'] = []
+ gg['kbasprs'] = kbasprs
+ gg['dt'] = dtsim
+
+ nkt = nkt
+ if flag_exp==0:
+ ktbas = makeBasis_StimKernel(kbasprs,nkt)
+ else:
+ ktbas = makeBasis_StimKernel_exp(kbasprs,nkt)
+
+ gg['ktbas'] = ktbas
+ gg['k'] = gg['ktbas']*gg['kt']
+
+ return gg
+
+def makeBasis_StimKernel(kbasprs,nkt):
+
+ neye = kbasprs['neye']
+ ncos = kbasprs['ncos']
+ kpeaks = kbasprs['kpeaks']
+ kdt = 1
+ b = kbasprs['b']
+ delays_raw = kbasprs['delays']
+ delays = delays_raw[0].astype(int)
+
+ ylim = np.array([100.,200.]) # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!HARD-CODED FOR NOW
+# yrnge = nlin(kpeaks + b*np.ones(np.shape(kpeaks)))
+ yrnge = nlin(ylim + b*np.ones(np.shape(kpeaks)))
+ db = (yrnge[-1]-yrnge[0])/(ncos-1)
+ ctrs = nlin(np.array(kpeaks))#yrnge
+ mxt = invnl(yrnge[ncos-1]+2*db)-b
+ kt0 = np.arange(0,mxt,kdt) #-delay
+ nt = len(kt0)
+ e1 = np.tile(nlin(kt0+b*np.ones(np.shape(kt0))),(ncos,1))
+ e2 = np.transpose(e1)
+ e3 = np.tile(ctrs,(nt,1))
+
+ kbasis0 = []
+ for kk in range(ncos):
+ kbasis0.append(ff(e2[:,kk],e3[:,kk],db))
+
+
+ #Concatenate identity vectors
+ nkt0 = np.size(kt0,0)
+ a1 = np.concatenate((np.eye(neye), np.zeros((nkt0,neye))),axis=0)
+ a2 = np.concatenate((np.zeros((neye,ncos)),np.array(kbasis0).T),axis=0)
+ kbasis = np.concatenate((a1,a2),axis=1)
+ kbasis = np.flipud(kbasis)
+ nkt0 = np.size(kbasis,0)
+
+ if nkt0 < nkt:
+ kbasis = np.concatenate((np.zeros((nkt-nkt0,ncos+neye)),kbasis),axis=0)
+ elif nkt0 > nkt:
+ kbasis = kbasis[-1-nkt:-1,:]
+
+
+ kbasis = normalizecols(kbasis)
+
+# plt.figure()
+# plt.plot(kbasis[:,0],'b')
+# plt.plot(kbasis[:,1],'r')
+# plt.show()
+#
+# print kpeaks
+# print nkt0, nkt
+# print delays[0][0], delays[0][1]
+# print sev
+ kbasis2_0 = np.concatenate((kbasis[:,0],np.zeros((delays[0],))),axis=0)
+ kbasis2_1 = np.concatenate((kbasis[:,1],np.zeros((delays[1],))),axis=0)
+
+# plt.figure()
+# plt.plot(kbasis2_0,'b')
+# plt.plot(kbasis2_1,'r')
+# plt.show(block=False)
+
+ len_diff = delays[1]-delays[0]
+ kbasis2_1 = kbasis2_1[len_diff:]
+
+ kbasis2 = np.zeros((len(kbasis2_0),2))
+ kbasis2[:,0] = kbasis2_0
+ kbasis2[:,1] = kbasis2_1
+ # print(np.shape(kbasis2_0))
+ # print(len(kbasis2_0), len(kbasis2_1))
+
+
+# plt.figure()
+# plt.plot(kbasis[:,0],'b')
+# plt.plot(kbasis[:,1],'r')
+# plt.plot(kbasis2_0,'m')
+# plt.plot(kbasis2_1,'k')
+# plt.show(block=False)
+
+ kbasis2 = normalizecols(kbasis2)
+
+ return kbasis2
+
+
+def makeBasis_StimKernel_exp(kbasprs,nkt):
+ ks = kbasprs['ks']
+ b = kbasprs['b']
+ x0 = np.arange(0,nkt)
+ kbasis = np.zeros((nkt,len(ks)))
+ for ii in range(len(ks)):
+ kbasis[:,ii] = invnl(-ks[ii]*x0) #(1.0/ks[ii])*
+
+ kbasis = np.flipud(kbasis)
+ #kbasis = normalizecols(kbasis)
+
+ return kbasis
+
+def nlin(x):
+ eps = 1e-20
+ #x.clip(0.)
+
+ return np.log(x+eps)
+
+def invnl(x):
+ eps = 1e-20
+ return np.exp(x)-eps
+
+def ff(x,c,dc):
+ rowsize = np.size(x,0)
+ m = []
+ for i in range(rowsize):
+ xi = x[i]
+ ci = c[i]
+ val=(np.cos(np.max([-pi,np.min([pi,(xi-ci)*pi/dc/2])]))+1)/2
+ m.append(val)
+
+ return np.array(m)
+
+def normalizecols(A):
+
+ B = A/np.tile(np.sqrt(sum(A**2,0)),(np.size(A,0),1))
+
+ return B
+
+def sameconv(A,B):
+
+ am = np.size(A)
+ bm = np.size(B)
+ nn = am+bm-1
+
+ q = npft.fft(A,nn)*npft.fft(np.flipud(B),nn)
+ p = q
+ G = npft.ifft(p)
+ G = G[range(am)]
+
+ return G
+
+# kbasprs = {}
+# kbasprs['neye'] = 0
+# kbasprs['ncos'] = 2
+# kbasprs['kpeaks'] = 40,80
+# kbasprs['b'] = .3
+#
+# nkt = 400
+#
+# filter_data = makeBasis_StimKernel(kbasprs, nkt)
+#
+# print filter_data
+#
+# print [x for x in filter_data.T]
+#
+# import matplotlib.pyplot as plt
+# plt.plot(filter_data[:,0]+filter_data[:,1])
+# plt.show()
+
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/kernel.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/kernel.py
new file mode 100644
index 0000000..820b1a3
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/kernel.py
@@ -0,0 +1,475 @@
+#from matplotlib import _cntr as cntr
+import matplotlib as mpl
+from mpl_toolkits.mplot3d import Axes3D
+from matplotlib import cm
+import scipy.interpolate as spinterp
+import h5py
+import numpy as np
+import bisect
+import matplotlib.pyplot as plt
+
+def find_l_r_in_t_range(t_range, t):
+
+ for tl in range(len(t_range)-1):
+ tr = tl+1
+ test_val = (t_range[tl]-t)*(t_range[tr]-t)
+ if np.abs(test_val) < 1e-16:
+
+ if np.abs(t_range[tl]-t) < 1e-16:
+ return (tl,)
+ else:
+ return (tr,)
+
+ elif test_val < 0:
+ t_range[tl], t_range[tr], t
+ return tl, tr
+
+def get_contour(X, Y, Z, c):
+ contour_obj = plt.contour(X, Y, Z)
+ #contour_obj = cntr.Cntr(X, Y, Z)
+ res = contour_obj.trace(c)
+ nseg = len(res) // 2
+ if nseg > 0:
+ seg = res[:nseg][0]
+ return seg[:,0], seg[:,1]
+ else:
+ return [],[]
+
+def plot_single_contour(ax, x_contour, y_contour, t, color):
+ t_contour = t+np.zeros_like(x_contour)
+ ax.plot(x_contour, t_contour, y_contour, zdir='z', color=color)
+
+
+class Kernel1D(object):
+
+ def rescale(self):
+ #self.kernel /= np.abs(self.kernel).sum()
+ if np.abs(self.kernel.sum())!=0:
+ self.kernel /= np.abs(self.kernel.sum())
+
+ def normalize(self):
+# self.kernel /= np.abs(self.kernel).sum()
+ self.kernel /= np.abs(self.kernel.sum())
+# self.kernel /= self.kernel.sum()
+
+
+ def __init__(self, t_range, kernel_array, threshold=0., reverse=False):
+ assert len(t_range) == len(kernel_array)
+
+ kernel_array = np.array(kernel_array)
+ inds_to_keep = np.where(np.abs(kernel_array) > threshold)
+
+ if reverse == True:
+ self.t_range = -np.array(t_range)[::-1]
+
+ t_inds_tmp = inds_to_keep[0]
+ max_t_ind = t_inds_tmp.max()
+ reversed_t_inds = max_t_ind - t_inds_tmp
+ self.t_inds = reversed_t_inds - max_t_ind - 1 # Had an off by one error here should be "- 1" nhc 14 Apr '17 change made in cursor evalutiate too
+
+ else:
+ self.t_range = np.array(t_range)
+ self.t_inds = inds_to_keep[0]
+
+ self.kernel = kernel_array[inds_to_keep]
+ assert len(self.t_inds) == len(self.kernel)
+
+ def __len__(self):
+ return len(self.kernel)
+
+ def imshow(self, ax=None, show=True, save_file_name=None, ylim=None, xlim=None,color='b'):
+
+ if ax is None:
+ _, ax = plt.subplots(1,1)
+
+ t_vals = self.t_range[self.t_inds]
+
+ ax.plot(t_vals, self.kernel, color)
+ ax.set_xlabel('Time (Seconds)')
+
+ if not ylim is None:
+ ax.set_ylim(ylim)
+
+ if not xlim is None:
+ ax.set_xlim(xlim)
+ else:
+ a,b=(t_vals[0], t_vals[-1])
+ ax.set_xlim(min(a,b), max(a,b))
+
+ if not save_file_name is None:
+ ax.savefig(save_file_name, transparent=True)
+
+ if show == True:
+ plt.show()
+
+ return ax, (t_vals, self.kernel)
+
+ def full(self, truncate_t=True):
+ data = np.zeros(len(self.t_range))
+ data[self.t_inds] = self.kernel
+
+
+ if truncate_t == True:
+ ind_min = np.where(np.abs(data) > 0)[0].min()
+ return data[ind_min:]
+ else:
+ return data
+
+
+
+ return data
+
+class Kernel2D(object):
+
+ def rescale(self):
+ #self.kernel /= np.abs(self.kernel).sum()
+ if np.abs(self.kernel.sum())!=0:
+ self.kernel /= np.abs(self.kernel.sum())
+
+ def normalize(self):
+# self.kernel /= np.abs(self.kernel).sum()
+ self.kernel /= np.abs(self.kernel.sum())
+
+ @classmethod
+ def from_dense(cls, row_range, col_range, kernel_array, threshold=0.):
+ col_range = np.array(col_range).copy()
+ row_range = np.array(row_range).copy()
+ kernel_array = np.array(kernel_array).copy()
+ inds_to_keep = np.where(np.abs(kernel_array) > threshold)
+ kernel = kernel_array[inds_to_keep]
+ if len(inds_to_keep) == 1:
+ col_inds, row_inds = np.array([]), np.array([])
+ else:
+ col_inds, row_inds = inds_to_keep
+
+ return cls(row_range, col_range, row_inds, col_inds, kernel)
+
+ @classmethod
+ def copy(cls, instance):
+ return cls(instance.row_range.copy(),
+ instance.col_range.copy(),
+ instance.row_inds.copy(),
+ instance.col_inds.copy(),
+ instance.kernel.copy())
+
+
+ def __init__(self, row_range, col_range, row_inds, col_inds, kernel):
+
+
+ self.col_range = np.array(col_range)
+ self.row_range = np.array(row_range)
+ self.row_inds = np.array(row_inds)
+ self.col_inds = np.array(col_inds)
+
+ self.kernel = np.array(kernel)
+
+ assert len(self.row_inds) == len(self.col_inds)
+ assert len(self.row_inds) == len(self.kernel)
+
+ def __mul__(self, constant):
+
+ new_copy = Kernel2D.copy(self)
+ new_copy.kernel *= constant
+ return new_copy
+
+ def __add__(self, other):
+
+
+ if len(other) == 0:
+ return self
+
+ try:
+ np.testing.assert_almost_equal(self.row_range, other.row_range)
+ np.testing.assert_almost_equal(self.col_range, other.col_range)
+ except:
+ raise Exception('Kernels must exist on same grid to be added')
+
+ row_range = self.row_range.copy()
+ col_range = self.col_range.copy()
+
+ kernel_dict = {}
+ for key, ker in zip(zip(self.row_inds, self.col_inds), self.kernel):
+ kernel_dict[key] = kernel_dict.setdefault(key, 0) + ker
+ for key, ker in zip(zip(other.row_inds, other.col_inds), other.kernel):
+ kernel_dict[key] = kernel_dict.setdefault(key, 0) + ker
+
+ key_list, kernel_list = zip(*kernel_dict.items())
+ row_inds_list, col_inds_list = zip(*key_list)
+ row_inds = np.array(row_inds_list)
+ col_inds = np.array(col_inds_list)
+ kernel = np.array(kernel_list)
+
+ return Kernel2D(row_range, col_range, row_inds, col_inds, kernel)
+
+ def apply_threshold(self, threshold):
+
+ inds_to_keep = np.where(np.abs(self.kernel) > threshold)
+ self.row_inds = self.row_inds[inds_to_keep]
+ self.col_inds = self.col_inds[inds_to_keep]
+ self.kernel = self.kernel[inds_to_keep]
+
+ def full(self):
+ data = np.zeros((len(self.row_range), len(self.col_range)))
+ data[self.row_inds, self.col_inds] = self.kernel
+ return data
+
+ def imshow(self, ax=None, show=True, save_file_name=None, clim=None, colorbar=True):
+
+ from mpl_toolkits.axes_grid1 import make_axes_locatable
+
+ if ax is None:
+ _, ax = plt.subplots(1,1)
+
+ if colorbar == True:
+ divider = make_axes_locatable(ax)
+ cax = divider.append_axes("right", size = "5%", pad = 0.05)
+
+ data = self.full()
+
+ if not clim is None:
+ im = ax.imshow(data, extent=(self.col_range[0], self.col_range[-1], self.row_range[0], self.row_range[-1]), origin='lower', clim=clim, interpolation='none')
+ else:
+ im = ax.imshow(data, extent=(self.col_range[0], self.col_range[-1], self.row_range[0], self.row_range[-1]), origin='lower', interpolation='none')
+
+ if colorbar == True:
+ plt.colorbar(im,cax=cax)
+
+ if not save_file_name is None:
+ plt.savefig(save_file_name, transparent=True)
+
+ if show == True:
+ plt.show()
+
+ return ax, data
+
+ def __len__(self):
+ return len(self.kernel)
+
+class Kernel3D(object):
+
+ def rescale(self):
+ #self.kernel /= np.abs(self.kernel).sum()
+ if np.abs(self.kernel.sum())!=0:
+ self.kernel /= np.abs(self.kernel.sum())
+
+ def normalize(self):
+ #self.kernel /= np.abs(self.kernel).sum()
+# print self.kernel.sum()
+ self.kernel /= (self.kernel.sum())*np.sign(self.kernel.sum())
+# print self.kernel.sum()
+# sys.exit()
+
+ @classmethod
+ def copy(cls, instance):
+ return cls(instance.row_range.copy(),
+ instance.col_range.copy(),
+ instance.t_range.copy(),
+ instance.row_inds.copy(),
+ instance.col_inds.copy(),
+ instance.t_inds.copy(),
+ instance.kernel.copy())
+
+ def __len__(self):
+ return len(self.kernel)
+
+ def __init__(self, row_range, col_range, t_range, row_inds, col_inds, t_inds, kernel):
+
+ self.col_range = np.array(col_range)
+ self.row_range = np.array(row_range)
+ self.t_range = np.array(t_range)
+ self.col_inds = np.array(col_inds)
+ self.row_inds = np.array(row_inds)
+ self.t_inds = np.array(t_inds)
+ self.kernel = np.array(kernel)
+
+ assert len(self.row_inds) == len(self.col_inds)
+ assert len(self.row_inds) == len(self.t_inds)
+ assert len(self.row_inds) == len(self.kernel)
+
+ def apply_threshold(self, threshold):
+
+ inds_to_keep = np.where(np.abs(self.kernel) > threshold)
+ self.row_inds = self.row_inds[inds_to_keep]
+ self.col_inds = self.col_inds[inds_to_keep]
+ self.t_inds = self.t_inds[inds_to_keep]
+ self.kernel = self.kernel[inds_to_keep]
+
+ def __add__(self, other):
+
+
+ if len(other) == 0:
+ return self
+
+ try:
+ if not (len(self.row_range) == 0 or len(other.row_range) == 0):
+ np.testing.assert_almost_equal(self.row_range, other.row_range)
+ if not (len(self.col_range) == 0 or len(other.col_range) == 0):
+ np.testing.assert_almost_equal(self.col_range, other.col_range)
+ if not (len(self.t_range) == 0 or len(other.t_range) == 0):
+ np.testing.assert_almost_equal(self.t_range, other.t_range)
+ except:
+ raise Exception('Kernels must exist on same grid to be added')
+
+ if len(self.row_range) == 0:
+ row_range = other.row_range.copy()
+ else:
+ row_range = self.row_range.copy()
+ if len(self.col_range) == 0:
+ col_range = other.col_range.copy()
+ else:
+ col_range = self.col_range.copy()
+ if len(self.t_range) == 0:
+ t_range = other.t_range.copy()
+ else:
+ t_range = self.t_range.copy()
+
+ kernel_dict = {}
+ for key, ker in zip(zip(self.row_inds, self.col_inds, self.t_inds), self.kernel):
+ kernel_dict[key] = kernel_dict.setdefault(key, 0) + ker
+ for key, ker in zip(zip(other.row_inds, other.col_inds, other.t_inds), other.kernel):
+ kernel_dict[key] = kernel_dict.setdefault(key, 0) + ker
+
+ key_list, kernel_list = zip(*kernel_dict.items())
+ row_inds_list, col_inds_list, t_inds_list = zip(*key_list)
+ row_inds = np.array(row_inds_list)
+ col_inds = np.array(col_inds_list)
+ t_inds = np.array(t_inds_list)
+ kernel = np.array(kernel_list)
+
+ return Kernel3D(row_range, col_range, t_range, row_inds, col_inds, t_inds, kernel)
+
+ def __mul__(self, constant):
+
+ new_copy = Kernel3D.copy(self)
+ new_copy.kernel *= constant
+ return new_copy
+
+ def t_slice(self, t):
+
+ ind_list = find_l_r_in_t_range(self.t_range, t)
+
+ if ind_list is None:
+ return None
+
+ elif len(ind_list) == 1:
+
+ t_ind_i = ind_list[0]
+ inds_i = np.where(self.t_range[self.t_inds] == self.t_range[t_ind_i])
+ row_inds = self.row_inds[inds_i]
+ col_inds = self.col_inds[inds_i]
+ kernel = self.kernel[inds_i]
+ return Kernel2D(self.row_range, self.col_range, row_inds, col_inds, kernel)
+
+ else:
+ t_ind_l, t_ind_r = ind_list
+ t_l, t_r = self.t_range[t_ind_l], self.t_range[t_ind_r]
+
+ inds_l = np.where(self.t_range[self.t_inds] == self.t_range[t_ind_l])
+ inds_r = np.where(self.t_range[self.t_inds] == self.t_range[t_ind_r])
+ row_inds_l = self.row_inds[inds_l]
+ col_inds_l = self.col_inds[inds_l]
+ kernel_l = self.kernel[inds_l]
+ kl = Kernel2D(self.row_range, self.col_range, row_inds_l, col_inds_l, kernel_l)
+ row_inds_r = self.row_inds[inds_r]
+ col_inds_r = self.col_inds[inds_r]
+ kernel_r = self.kernel[inds_r]
+ kr = Kernel2D(self.row_range, self.col_range, row_inds_r, col_inds_r, kernel_r)
+ wa, wb = (1-(t-t_l)/(t_r-t_l)), (1-(t_r-t)/(t_r-t_l))
+
+ return kl*wa + kr*wb
+
+ def full(self, truncate_t=True):
+
+ data = np.zeros((len(self.t_range), len(self.row_range), len(self.col_range)))
+ data[self.t_inds, self.row_inds, self.col_inds] = self.kernel
+
+ if truncate_t == True:
+ ind_max = np.where(np.abs(data) > 0)[0].min()
+ return data[ind_max:, :, :]
+ else:
+ return data
+
+
+ # if truncate_t == True:
+ # ind_min = np.where(np.abs(data) > 0)[0].min()
+ # return data[ind_min:]
+ # else:
+ # return data
+
+ def imshow(self, ax=None, t_range=None, cmap=cm.bwr, N=10, show=True, save_file_name=None, kvals=None):
+
+ if ax is None:
+ fig = plt.figure()
+ ax = fig.gca(projection='3d')
+
+ if t_range is None:
+ t_range = self.t_range
+
+ slice_list_sparse = [self.t_slice(t) for t in t_range]
+ slice_list = []
+ slice_t_list = []
+ for curr_slice, curr_t in zip(slice_list_sparse, t_range):
+ if not curr_slice is None:
+ slice_list.append(curr_slice.full())
+ slice_t_list.append(curr_t)
+ all_slice_max = max(map(np.max, slice_list))
+ all_slice_min = min(map(np.min, slice_list))
+ upper_bound = max(np.abs(all_slice_max), np.abs(all_slice_min))
+ lower_bound = -upper_bound
+ norm = mpl.colors.Normalize(vmin=lower_bound, vmax=upper_bound)
+ color_mapper = cm.ScalarMappable(norm=norm, cmap=cmap).to_rgba
+
+ if kvals is None:
+ kvals = np.linspace(lower_bound, upper_bound, N)
+
+ X, Y = np.meshgrid(self.row_range, self.col_range)
+
+ contour_dict = {}
+ for kval in kvals:
+ for t_val, curr_slice in zip(slice_t_list, slice_list):
+ x_contour, y_contour = get_contour(Y, X, curr_slice.T, kval)
+ contour_dict[kval, t_val] = x_contour, y_contour
+ color = color_mapper(kval)
+ color = color[0], color[1], color[2], np.abs(kval)/upper_bound
+ plot_single_contour(ax, x_contour, y_contour, t_val, color)
+
+ ax.set_zlim(self.row_range[0], self.row_range[-1])
+ ax.set_ylim(self.t_range[0], self.t_range[-1])
+ ax.set_xlim(self.col_range[0], self.col_range[-1])
+
+ if not save_file_name is None:
+ plt.savefig(save_file_name, transparent=True)
+
+ if show == True:
+ plt.show()
+
+ return ax, contour_dict
+
+def merge_spatial_temporal(spatial_kernel, temporal_kernel, threshold=0):
+
+ t_range = temporal_kernel.t_range
+
+ spatiotemporal_kernel = np.ones(( len(temporal_kernel), len(spatial_kernel)))
+ spatiotemporal_kernel *= spatial_kernel.kernel[None, :]
+ spatiotemporal_kernel *= temporal_kernel.kernel[:,None]
+ spatiotemporal_kernel = spatiotemporal_kernel.reshape((np.prod(spatiotemporal_kernel.shape)))
+
+ spatial_coord_array = np.empty((len(spatial_kernel),2))
+ spatial_coord_array[:,0] = spatial_kernel.col_inds
+ spatial_coord_array[:,1] = spatial_kernel.row_inds
+
+ spatiiotemporal_coord_array = np.zeros((len(spatial_kernel)*len(temporal_kernel),3))
+ spatiiotemporal_coord_array[:,0:2] = np.kron(np.ones((len(temporal_kernel),1)),spatial_coord_array)
+ spatiiotemporal_coord_array[:,2] = np.kron(temporal_kernel.t_inds, np.ones(len(spatial_kernel)))
+
+ col_inds, row_inds, t_inds = map(lambda x:x.astype(np.int),spatiiotemporal_coord_array.T)
+ kernel = Kernel3D(spatial_kernel.row_range, spatial_kernel.col_range, t_range, row_inds, col_inds, t_inds, spatiotemporal_kernel)
+ kernel.apply_threshold(threshold)
+
+ return kernel
+
+
+
+# Candidate for print
+# for ri, ci, ti, k in zip(kernel.row_inds, kernel.col_inds, kernel.t_inds, kernel.kernel):
+# print ri, ci, ti, k
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/lattice_unit_constructor.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/lattice_unit_constructor.py
new file mode 100644
index 0000000..4de580d
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/lattice_unit_constructor.py
@@ -0,0 +1,254 @@
+import scipy.io as sio
+import os
+import matplotlib.pyplot as plt
+import isee_engine.nwb as nwb
+from linearfilter import SpatioTemporalFilter
+import numpy as np
+from spatialfilter import GaussianSpatialFilter
+from transferfunction import ScalarTransferFunction
+from temporalfilter import TemporalFilterCosineBump
+from cursor import LNUnitCursor, MultiLNUnitCursor
+from movie import Movie
+from lgnmodel1 import LGNModel, heat_plot
+from cellmodel import LGNOnCell, LGNOffCell,LGNOnOffCell,TwoSubfieldLinearCell
+from transferfunction import MultiTransferFunction, ScalarTransferFunction
+from lnunit import LNUnit, MultiLNUnit
+from sympy.abc import x as symbolic_x
+from sympy.abc import y as symbolic_y
+from kernel import Kernel3D
+from movie import Movie, FullFieldFlashMovie
+import itertools
+import scipy.stats as sps
+from make_cell_list import multi_cell_random_generator, make_single_unit_cell_list, make_on_off_cell_list
+#from lgnmodel.make_cell_list import two_unit_cell_config
+#from make_cell_list import single_unit_cell_config
+
+def make_lattice_unit(lattice_unit_center=None):
+ cell_list = []
+ tON_cell_list = make_tON_cell_list(lattice_unit_center)
+ tOFF_cell_list = make_tOFF_cell_list(lattice_unit_center)
+ sON_cell_list = make_sON_cell_list(lattice_unit_center)
+ sOFF_cell_list = make_sOFF_cell_list(lattice_unit_center)
+ overlap_onoff_cell_list = make_overlapping_onoff_cell_list(lattice_unit_center)
+ separate_onoff_cell_list = make_separate_onoff_cell_list(lattice_unit_center)
+
+ cell_list = tON_cell_list + tOFF_cell_list + sON_cell_list + sOFF_cell_list + overlap_onoff_cell_list + separate_onoff_cell_list
+
+ return cell_list
+
+
+def make_tON_cell_list(lattice_unit_center):
+ tON_cell_list = []
+
+ single_unit_cell_config = {}
+ single_unit_cell_config['lattice_unit_center']=lattice_unit_center
+ single_unit_cell_config['width'] = 5.
+ sz = [3,6,9]
+ ncells = [5,3,2]
+ amp_dist = sps.rv_discrete(values=([20,25], [.5,.5]))
+# kpeaks_dist = sps.multivariate_normal(mean=[40., 80.], cov=[[5.0, 0], [0, 5]])
+# wts = (.4,-.2)
+ kpeaks_dist = sps.multivariate_normal(mean=[15., 35.], cov=[[5.0, 0], [0, 5]])
+ wts = (4.,-2.5)
+ delays = (0.,0.)
+ single_unit_cell_config['amplitude'] = amp_dist
+ single_unit_cell_config['kpeaks'] = kpeaks_dist
+ single_unit_cell_config['weights'] = wts
+ single_unit_cell_config['delays'] = delays
+ for num_cells, sig in zip(ncells,sz):
+ single_unit_cell_config['number_of_cells'] = num_cells
+ single_unit_cell_config['sigma'] = (sig,sig)
+# print single_unit_cell_config
+ tON_cell_list += multi_cell_random_generator(make_single_unit_cell_list, **single_unit_cell_config)
+
+ #print len(tON_cell_list)
+ return tON_cell_list
+
+def make_tOFF_cell_list(lattice_unit_center):
+ tOFF_cell_list = []
+
+ single_unit_cell_config = {}
+ single_unit_cell_config['lattice_unit_center']=lattice_unit_center
+ single_unit_cell_config['width'] = 5.
+ sz = [3,6,9]
+ ncells = [10,5,5]
+ amp_dist = sps.rv_discrete(values=([-20,-25], [.5,.5]))
+# kpeaks_dist = sps.multivariate_normal(mean=[40., 80.], cov=[[5.0, 0], [0, 5]])
+# wts = (.4,-.2)
+ kpeaks_dist = sps.multivariate_normal(mean=[15., 35.], cov=[[5.0, 0], [0, 5]])
+ wts = (4.,-2.5)
+ delays = (0.,0.)
+ single_unit_cell_config['amplitude'] = amp_dist
+ single_unit_cell_config['kpeaks'] = kpeaks_dist
+ single_unit_cell_config['weights'] = wts
+ single_unit_cell_config['delays'] = delays
+ for num_cells, sig in zip(ncells,sz):
+ single_unit_cell_config['number_of_cells'] = num_cells
+ single_unit_cell_config['sigma'] = (sig,sig)
+ tOFF_cell_list += multi_cell_random_generator(make_single_unit_cell_list, **single_unit_cell_config)
+
+ #print len(tOFF_cell_list)
+ return tOFF_cell_list
+
+def make_sON_cell_list(lattice_unit_center):
+ sON_cell_list = []
+
+ single_unit_cell_config = {}
+ single_unit_cell_config['lattice_unit_center']=lattice_unit_center
+ single_unit_cell_config['width'] = 5.
+ sz = [3,6,9]
+ ncells = [5,3,2]
+ amp_dist = sps.rv_discrete(values=([20,25], [.5,.5]))
+# kpeaks_dist = sps.multivariate_normal(mean=[100., 160.], cov=[[5.0, 0], [0, 5]])
+# wts = (.4,-.1)
+ kpeaks_dist = sps.multivariate_normal(mean=[80., 120.], cov=[[5.0, 0], [0, 5]])
+ wts = (4.,-.85)
+ delays = (0.,0.)
+ single_unit_cell_config['amplitude'] = amp_dist
+ single_unit_cell_config['kpeaks'] = kpeaks_dist
+ single_unit_cell_config['weights'] = wts
+ single_unit_cell_config['delays'] = delays
+ for num_cells, sig in zip(ncells,sz):
+ single_unit_cell_config['number_of_cells'] = num_cells
+ single_unit_cell_config['sigma'] = (sig,sig)
+ sON_cell_list += multi_cell_random_generator(make_single_unit_cell_list, **single_unit_cell_config)
+
+ #print len(sON_cell_list)
+ return sON_cell_list
+
+def make_sOFF_cell_list(lattice_unit_center):
+ sOFF_cell_list = []
+
+ single_unit_cell_config = {}
+ single_unit_cell_config['lattice_unit_center']=lattice_unit_center
+ single_unit_cell_config['width'] = 5.
+ sz = [3,6,9]
+ ncells = [10,5,5]
+ amp_dist = sps.rv_discrete(values=([-20,-25], [.5,.5]))
+# kpeaks_dist = sps.multivariate_normal(mean=[100., 160.], cov=[[5.0, 0], [0, 5]])
+ kpeaks_dist = sps.multivariate_normal(mean=[80., 120.], cov=[[5.0, 0], [0, 5]])
+# wts = (.4,-.1)
+ wts = (4.,-.85)
+ delays = (0.,0.)
+ single_unit_cell_config['amplitude'] = amp_dist
+ single_unit_cell_config['kpeaks'] = kpeaks_dist
+ single_unit_cell_config['weights'] = wts
+ single_unit_cell_config['delays'] = delays
+ for num_cells, sig in zip(ncells,sz):
+ single_unit_cell_config['number_of_cells'] = num_cells
+ single_unit_cell_config['sigma'] = (sig,sig)
+ sOFF_cell_list += multi_cell_random_generator(make_single_unit_cell_list, **single_unit_cell_config)
+
+ #print len(sOFF_cell_list)
+ return sOFF_cell_list
+
+def make_overlapping_onoff_cell_list(lattice_unit_center):
+ overlap_onoff_cell_list = []
+
+ two_unit_cell_config = {}
+ two_unit_cell_config['lattice_unit_center']=lattice_unit_center
+ two_unit_cell_config['width']=5.
+
+ ncells = 4
+ sz = 9
+ ang_dist = sps.rv_discrete(values=(np.arange(0,180,45), 1./ncells*np.ones(ncells)))
+ amp_on_dist = sps.rv_discrete(values=([20,25], [.5,.5]))
+ amp_off_dist = sps.rv_discrete(values=([-20,-25], [.5,.5]))
+# kpeak_on_dist = sps.multivariate_normal(mean=[40., 80.], cov=[[5.0, 0], [0, 5]])
+# kpeak_off_dist = sps.multivariate_normal(mean=[50., 90.], cov=[[5.0, 0], [0, 5]])
+# wts_on = wts_off = (.4,-.2)
+ kpeak_on_dist = sps.multivariate_normal(mean=[15., 35.], cov=[[5.0, 0], [0, 5]])
+ kpeak_off_dist = sps.multivariate_normal(mean=[20., 40.], cov=[[5.0, 0], [0, 5]])
+ wts_on = wts_off = (4.,-2.5)
+ delays_on = delays_off = (0.,0.)
+ subfield_sep = 2.
+
+ two_unit_cell_config['number_of_cells'] = ncells
+ two_unit_cell_config['ang'] = ang_dist
+ two_unit_cell_config['amplitude_on'] = amp_on_dist
+ two_unit_cell_config['amplitude_off'] = amp_off_dist
+ two_unit_cell_config['kpeaks_on'] = kpeak_on_dist
+ two_unit_cell_config['kpeaks_off'] = kpeak_off_dist
+ two_unit_cell_config['weights_on'] = wts_on
+ two_unit_cell_config['weights_off'] = wts_off
+ two_unit_cell_config['sigma_on'] = (sz,sz)
+ two_unit_cell_config['sigma_off'] = (sz,sz)
+ two_unit_cell_config['subfield_separation'] = subfield_sep
+ two_unit_cell_config['dominant_subunit']='on'
+ two_unit_cell_config['delays_on']=delays_on
+ two_unit_cell_config['delays_off']=delays_off
+
+ overlap_onoff_cell_list += multi_cell_random_generator(make_on_off_cell_list, **two_unit_cell_config)
+
+ #print len(overlap_onoff_cell_list)
+ return overlap_onoff_cell_list
+
+def make_separate_onoff_cell_list(lattice_unit_center):
+ separate_onoff_cell_list = []
+
+ two_unit_cell_config = {}
+ two_unit_cell_config['lattice_unit_center']=lattice_unit_center
+ two_unit_cell_config['width']=5.
+
+ ncells = 8
+ sz = 6
+ ang_dist = np.arange(0,360,45)
+ subfield_sep = 4.
+
+# kpeak_dom_dist = sps.multivariate_normal(mean=[40., 80.], cov=[[5.0, 0], [0, 5]])
+# kpeak_nondom_dist = sps.multivariate_normal(mean=[100., 160.], cov=[[5.0, 0], [0, 5]])
+# wts_dom = (.4,-.2)
+# wts_nondom = (.4,-.1)
+
+ kpeak_dom_dist = sps.multivariate_normal(mean=[15., 35.], cov=[[5.0, 0], [0, 5]])
+ kpeak_nondom_dist = sps.multivariate_normal(mean=[80., 120.], cov=[[5.0, 0], [0, 5]])
+ wts_dom = (4.,-2.5)
+ wts_nondom = (4,-.85)
+ delays_dom = delays_nondom = (0.,0.)
+
+ two_unit_cell_config['number_of_cells'] = ncells
+ two_unit_cell_config['ang'] = ang_dist
+ two_unit_cell_config['sigma_on'] = (sz,sz)
+ two_unit_cell_config['sigma_off'] = (sz,sz)
+ two_unit_cell_config['subfield_separation'] = subfield_sep
+
+ #On-dominant
+ dom_subunit = 'on'
+ if dom_subunit=='on':
+ two_unit_cell_config['dominant_subunit'] = dom_subunit
+ amp_dom_dist = sps.rv_discrete(values=([20,25], [.5,.5]))
+ amp_nondom_dist = sps.rv_discrete(values=([-10,-15], [.5,.5]))
+ two_unit_cell_config['amplitude_on'] = amp_dom_dist
+ two_unit_cell_config['amplitude_off'] = amp_nondom_dist
+ two_unit_cell_config['kpeaks_on'] = kpeak_dom_dist
+ two_unit_cell_config['kpeaks_off'] = kpeak_nondom_dist
+ two_unit_cell_config['weights_on'] = wts_dom
+ two_unit_cell_config['weights_off'] = wts_nondom
+ two_unit_cell_config['delays_on'] = delays_dom
+ two_unit_cell_config['delays_off'] = delays_nondom
+ separate_onoff_cell_list += multi_cell_random_generator(make_on_off_cell_list, **two_unit_cell_config)
+
+ #Off-dominant
+ dom_subunit = 'off'
+ if dom_subunit=='off':
+ two_unit_cell_config['dominant_subunit'] = dom_subunit
+ amp_dom_dist = sps.rv_discrete(values=([-20,-25], [.5,.5]))
+ amp_nondom_dist = sps.rv_discrete(values=([10,15], [.5,.5]))
+ two_unit_cell_config['amplitude_off'] = amp_dom_dist
+ two_unit_cell_config['amplitude_on'] = amp_nondom_dist
+ two_unit_cell_config['kpeaks_off'] = kpeak_dom_dist
+ two_unit_cell_config['kpeaks_on'] = kpeak_nondom_dist
+ two_unit_cell_config['weights_off'] = wts_dom
+ two_unit_cell_config['weights_on'] = wts_nondom
+ two_unit_cell_config['delays_off'] = delays_dom
+ two_unit_cell_config['delays_on'] = delays_nondom
+ separate_onoff_cell_list += multi_cell_random_generator(make_on_off_cell_list, **two_unit_cell_config)
+
+ #print len(separate_onoff_cell_list)
+ return separate_onoff_cell_list
+
+if __name__ == "__main__":
+ lattice_unit_center = (40,30)
+ lattice_cell_list = make_lattice_unit(lattice_unit_center)
+ print(len(lattice_cell_list))
+
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/lgnmodel1.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/lgnmodel1.py
new file mode 100644
index 0000000..1b04710
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/lgnmodel1.py
@@ -0,0 +1,87 @@
+import numpy as np
+import matplotlib.pyplot as plt
+
+def line_plot(evaluate_result, ax=None, show=True, save_file_name=None, xlabel=None, plotstyle=None):
+
+ if ax is None:
+ _, ax = plt.subplots(1,1)
+
+ if not plotstyle is None:
+ for ((t_range, y_vals), curr_plotstyle) in zip(evaluate_result, plotstyle):
+ ax.plot(t_range, y_vals, curr_plotstyle)
+ else:
+ for t_range, y_vals in evaluate_result:
+ ax.plot(t_range, y_vals)
+
+ if xlabel is None:
+ ax.set_xlabel('Time (Seconds)')
+ else:
+ ax.set_xlabel(xlabel)
+
+ if xlabel is None:
+ ax.set_xlabel('Firing Rate (Hz)')
+ else:
+ ax.set_xlabel(xlabel)
+
+ if not save_file_name is None:
+ plt.savefig(save_file_name, transparent=True)
+
+
+
+
+ if show == True:
+ plt.show()
+
+def heat_plot(evaluate_result, ax=None, show=True, save_file_name=None, colorbar=True, **kwargs):
+
+ if ax is None:
+ _, ax = plt.subplots(1,1)
+
+ data = np.empty((len(evaluate_result), len(evaluate_result[0][0])))
+ for ii, (t_vals, y_vals) in enumerate(evaluate_result):
+ data[ii,:] = y_vals
+
+ cax = ax.pcolor(t_vals, np.arange(len(evaluate_result)), data, **kwargs)
+ ax.set_ylim([0,len(evaluate_result)-1])
+ ax.set_xlim([t_vals[0], t_vals[-1]])
+ ax.set_ylabel('Neuron id')
+ ax.set_xlabel('Time (Seconds)')
+
+ if colorbar == True:
+ plt.colorbar(cax)
+
+ if not save_file_name is None:
+ plt.savefig(save_file_name, transparent=True)
+
+ if show == True:
+ plt.show()
+
+
+
+
+class LGNModel(object):
+
+ def __init__(self, cell_list):
+ self.cell_list = cell_list
+
+ def evaluate(self, movie, **kwargs):
+ return [cell.evaluate(movie, **kwargs) for cell in self.cell_list]
+
+# def plot(self):
+# if show == True:
+# plt.show()
+
+
+# show = kwargs.pop('show', False)
+# data = [cell.evaluate_movie(movie, **kwargs) for cell in self.cell_list]
+# t_list, y_list, kernel_list = zip(*data)
+
+# if show == True:
+# for y in y_list:
+# plt.plot(t_list[0], y)
+# plt.show()
+#
+# return t_list[0], y_list, kernel_list
+
+ def __len__(self):
+ return len(self.cell_list)
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/linearfilter.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/linearfilter.py
new file mode 100644
index 0000000..af7fef2
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/linearfilter.py
@@ -0,0 +1,128 @@
+import numpy as np
+from .kernel import Kernel3D
+import matplotlib.pyplot as plt
+
+class SpatioTemporalFilter(object):
+
+ def __init__(self, spatial_filter, temporal_filter, amplitude=1.):
+
+ self.spatial_filter = spatial_filter
+ self.temporal_filter = temporal_filter
+ self.amplitude = amplitude
+
+ def get_spatiotemporal_kernel(self, row_range, col_range, t_range=None, threshold=0, reverse=False):
+
+ spatial_kernel = self.spatial_filter.get_kernel(row_range, col_range, threshold=0)
+ temporal_kernel = self.temporal_filter.get_kernel(t_range=t_range, threshold=0, reverse=reverse)
+
+ t_range = temporal_kernel.t_range
+
+ spatiotemporal_kernel = np.ones(( len(temporal_kernel), len(spatial_kernel)))
+ spatiotemporal_kernel *= spatial_kernel.kernel[None, :]
+
+ spatiotemporal_kernel *= temporal_kernel.kernel[:,None]
+ spatiotemporal_kernel = spatiotemporal_kernel.reshape((np.prod(spatiotemporal_kernel.shape)))
+
+ spatial_coord_array = np.empty((len(spatial_kernel),2))
+ spatial_coord_array[:,0] = spatial_kernel.col_inds
+ spatial_coord_array[:,1] = spatial_kernel.row_inds
+
+ spatiiotemporal_coord_array = np.zeros((len(spatial_kernel)*len(temporal_kernel),3))
+ spatiiotemporal_coord_array[:,0:2] = np.kron(np.ones((len(temporal_kernel),1)),spatial_coord_array)
+ spatiiotemporal_coord_array[:,2] = np.kron(temporal_kernel.t_inds, np.ones(len(spatial_kernel)))
+
+ col_inds, row_inds, t_inds = map(lambda x:x.astype(np.int),spatiiotemporal_coord_array.T)
+ kernel = Kernel3D(spatial_kernel.row_range, spatial_kernel.col_range, t_range, row_inds, col_inds, t_inds, spatiotemporal_kernel)
+ kernel.apply_threshold(threshold)
+
+
+ kernel.kernel *= self.amplitude
+
+
+ return kernel
+
+ def t_slice(self, t, *args, **kwargs):
+
+ k = self.get_spatiotemporal_kernel(*args, **kwargs)
+ return k.t_slice(t)
+
+ def show_temporal_filter(self, *args, **kwargs):
+
+ self.temporal_filter.imshow(*args, **kwargs)
+
+ def show_spatial_filter(self, *args, **kwargs):
+
+ self.spatial_filter.imshow(*args, **kwargs)
+
+ def to_dict(self):
+
+ return {'class':(__name__, self.__class__.__name__),
+ 'spatial_filter':self.spatial_filter.to_dict(),
+ 'temporal_filter':self.temporal_filter.to_dict(),
+ 'amplitude':self.amplitude}
+
+# class OnOffSpatioTemporalFilter(SpatioTemporalFilter):
+#
+# def __init__(self, on_spatiotemporal_filter, off_spatiotemporal_filter):
+#
+# self.on_spatiotemporal_filter = on_spatiotemporal_filter
+# self.off_spatiotemporal_filter = off_spatiotemporal_filter
+#
+# def get_spatiotemporal_kernel(self, col_range, row_range, t_range=None, threshold=0, reverse=False):
+#
+# on_kernel = self.on_spatiotemporal_filter.get_spatiotemporal_kernel(col_range, row_range, t_range, threshold, reverse)
+# off_kernel = self.off_spatiotemporal_filter.get_spatiotemporal_kernel(col_range, row_range, t_range, threshold, reverse)
+#
+# return on_kernel + off_kernel*(-1)
+#
+# def to_dict(self):
+#
+# return {'class':(__name__, self.__class__.__name__),
+# 'on_filter':self.on_spatiotemporal_filter.to_dict(),
+# 'off_filter':self.off_spatiotemporal_filter.to_dict()}
+#
+# class TwoSubfieldLinearFilter(OnOffSpatioTemporalFilter):
+#
+# def __init__(self, dominant_spatiotemporal_filter, nondominant_spatiotemporal_filter, subfield_separation=10, onoff_axis_angle=45, dominant_subfield_location=(30,40)):
+#
+# self.subfield_separation = subfield_separation
+# self.onoff_axis_angle = onoff_axis_angle
+# self.dominant_subfield_location = dominant_subfield_location
+# self.dominant_spatiotemporal_filter = dominant_spatiotemporal_filter
+# self.nondominant_spatiotemporal_filter = nondominant_spatiotemporal_filter
+#
+# dom_amp = dominant_spatiotemporal_filter.spatial_filter.amplitude
+# nondom_amp = nondominant_spatiotemporal_filter.spatial_filter.amplitude
+# if dom_amp < 0 and nondom_amp > 0:
+# super(TwoSubfieldLinearFilter, self).__init__(self.nondominant_spatiotemporal_filter, self.dominant_spatiotemporal_filter)
+# elif dom_amp > 0 and nondom_amp < 0:
+# super(TwoSubfieldLinearFilter, self).__init__(self.dominant_spatiotemporal_filter, self.nondominant_spatiotemporal_filter)
+# else:
+# raise ValueError('Subfields are not of opposite polarity')
+#
+# self.dominant_spatiotemporal_filter.spatial_filter.translate = self.dominant_subfield_location
+# hor_offset = np.cos(self.onoff_axis_angle*np.pi/180.)*self.subfield_separation + self.dominant_subfield_location[0]
+# vert_offset = np.sin(self.onoff_axis_angle*np.pi/180.)*self.subfield_separation+ self.dominant_subfield_location[1]
+# rel_translation = (hor_offset,vert_offset)
+# self.nondominant_spatiotemporal_filter.spatial_filter.translate = rel_translation
+# self.nondominant_spatiotemporal_filter.spatial_filter.origin=self.dominant_spatiotemporal_filter.spatial_filter.origin
+#
+#
+# def to_dict(self):
+#
+# raise NotImplementedError
+#
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/lnunit.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/lnunit.py
new file mode 100644
index 0000000..ebc9952
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/lnunit.py
@@ -0,0 +1,380 @@
+import os
+import itertools
+import matplotlib.pyplot as plt
+import numpy as np
+from . import utilities as util
+import importlib
+from .kernel import Kernel2D, Kernel3D
+from .linearfilter import SpatioTemporalFilter
+import json
+from .spatialfilter import GaussianSpatialFilter
+from .transferfunction import ScalarTransferFunction
+from .temporalfilter import TemporalFilterCosineBump
+from .cursor import LNUnitCursor, MultiLNUnitCursor, MultiLNUnitMultiMovieCursor, SeparableLNUnitCursor, SeparableMultiLNUnitCursor
+from .movie import Movie
+from .lgnmodel1 import LGNModel, heat_plot
+from .transferfunction import MultiTransferFunction, ScalarTransferFunction
+
+
+class LNUnit(object):
+
+ def __init__(self, linear_filter, transfer_function, amplitude=1.):
+
+ self.linear_filter = linear_filter
+ self.transfer_function = transfer_function
+ self.amplitude = amplitude
+
+ def evaluate(self, movie, **kwargs):
+ return self.get_cursor(movie, separable=kwargs.pop('separable', False)).evaluate(**kwargs)
+
+ def get_spatiotemporal_kernel(self, *args, **kwargs):
+ return self.linear_filter.get_spatiotemporal_kernel(*args, **kwargs)
+
+ def get_cursor(self, movie, threshold=0, separable = False):
+ if separable:
+ return SeparableLNUnitCursor(self, movie)
+ else:
+ return LNUnitCursor(self, movie, threshold=threshold)
+
+ def show_temporal_filter(self, *args, **kwargs):
+ self.linear_filter.show_temporal_filter(*args, **kwargs)
+
+ def show_spatial_filter(self, *args, **kwargs):
+ self.linear_filter.show_spatial_filter(*args, **kwargs)
+
+ def to_dict(self):
+ return {'class':(__name__, self.__class__.__name__),
+ 'linear_filter':self.linear_filter.to_dict(),
+ 'transfer_function':self.transfer_function.to_dict()}
+
+class MultiLNUnit(object):
+
+ def __init__(self, lnunit_list, transfer_function):
+
+ self.lnunit_list = lnunit_list
+ self.transfer_function = transfer_function
+
+ def get_spatiotemporal_kernel(self, *args, **kwargs):
+
+ k = Kernel3D([],[],[],[],[],[],[])
+ for unit in self.lnunit_list:
+ k = k+unit.get_spatiotemporal_kernel(*args, **kwargs)
+
+ return k
+
+ def show_temporal_filter(self, *args, **kwargs):
+
+ ax = kwargs.pop('ax', None)
+ show = kwargs.pop('show', None)
+ save_file_name = kwargs.pop('save_file_name', None)
+
+
+ if ax is None:
+ _, ax = plt.subplots(1,1)
+
+ kwargs.update({'ax':ax, 'show':False, 'save_file_name':None})
+ for unit in self.lnunit_list:
+ if unit.linear_filter.amplitude < 0:
+ color='b'
+ else:
+ color='r'
+ unit.linear_filter.show_temporal_filter(color=color, **kwargs)
+
+ if not save_file_name is None:
+ plt.savefig(save_file_name, transparent=True)
+
+ if show == True:
+ plt.show()
+
+ return ax
+
+ def show_spatial_filter(self, *args, **kwargs):
+
+ ax = kwargs.pop('ax', None)
+ show = kwargs.pop('show', True)
+ save_file_name = kwargs.pop('save_file_name', None)
+ colorbar = kwargs.pop('colorbar', True)
+
+ k = Kernel2D(args[0],args[1],[],[],[])
+ for lnunit in self.lnunit_list:
+ k = k + lnunit.linear_filter.spatial_filter.get_kernel(*args, **kwargs)
+ k.imshow(ax=ax, show=show, save_file_name=save_file_name, colorbar=colorbar)
+
+ def get_cursor(self, *args, **kwargs):
+
+ threshold = kwargs.get('threshold', 0.)
+ separable = kwargs.get('separable', False)
+
+ if len(args) == 1:
+ movie = args[0]
+ if separable:
+ return SeparableMultiLNUnitCursor(self, movie)
+ else:
+ return MultiLNUnitCursor(self, movie, threshold=threshold)
+ elif len(args) > 1:
+ movie_list = args
+ if separable:
+ raise NotImplementedError
+ else:
+ return MultiLNUnitMultiMovieCursor(self, movie_list, threshold=threshold)
+ else:
+ assert ValueError
+
+
+ def evaluate(self, movie, **kwargs):
+ seperable = kwargs.pop('separable', False)
+ return self.get_cursor(movie, separable=seperable).evaluate(**kwargs)
+
+from sympy.abc import x, y
+
+if __name__ == "__main__":
+
+ movie_file = '/data/mat/iSee_temp_shared/movies/TouchOfEvil.npy'
+ m_data = np.load(movie_file, 'r')
+ m = Movie(m_data[1000:], frame_rate=30.)
+
+ # Create second cell:
+ transfer_function = ScalarTransferFunction('s')
+ temporal_filter = TemporalFilterCosineBump((.4,-.3), (20,60))
+ cell_list = []
+ for xi in np.linspace(0,m.data.shape[2], 5):
+ for yi in np.linspace(0,m.data.shape[1], 5):
+ spatial_filter_on = GaussianSpatialFilter(sigma=(2,2), origin=(0,0), translate=(xi, yi))
+ on_linear_filter = SpatioTemporalFilter(spatial_filter_on, temporal_filter, amplitude=20)
+ on_lnunit = LNUnit(on_linear_filter, transfer_function)
+ spatial_filter_off = GaussianSpatialFilter(sigma=(4,4), origin=(0,0), translate=(xi, yi))
+ off_linear_filter = SpatioTemporalFilter(spatial_filter_off, temporal_filter, amplitude=-20)
+ off_lnunit = LNUnit(off_linear_filter, transfer_function)
+
+ multi_transfer_function = MultiTransferFunction((x, y), 'x+y')
+
+ multi_unit = MultiLNUnit([on_lnunit, off_lnunit], multi_transfer_function)
+ cell_list.append(multi_unit)
+
+ lgn = LGNModel(cell_list) #Here include a list of all cells
+ y = lgn.evaluate(m, downsample=10) #Does the filtering + non-linearity on movie object m
+ heat_plot(y, interpolation='none', colorbar=False)
+
+
+
+
+
+#
+# def imshow(self, ii, image_shape, fps, ax=None, show=True, relative_spatial_location=(0,0)):
+#
+# if ax is None:
+# _, ax = plt.subplots(1,1)
+#
+# curr_kernel = self.get_spatio_temporal_kernel(image_shape, fps, relative_spatial_location=relative_spatial_location)
+#
+# cax = curr_kernel.imshow(ii, ax=ax, show=False)
+#
+# if show == True:
+# plt.show()
+#
+# return ax
+#
+#
+# class OnOffCellModel(CellModel):
+#
+# def __init__(self, dc_offset=0, on_subfield=None, off_subfield=None, on_weight = 1, off_weight = -1, t_max=None):
+#
+# super(self.__class__, self).__init__(dc_offset, t_max)
+#
+# if isinstance(on_subfield, dict):
+# curr_module, curr_class = on_subfield.pop('class')
+# self.on_subfield = getattr(importlib.import_module(curr_module), curr_class)(**on_subfield)
+# else:
+# self.on_subfield = on_subfield
+#
+# super(self.__class__, self).add_subfield(on_subfield, on_weight)
+#
+# if isinstance(off_subfield, dict):
+# curr_module, curr_class = off_subfield.pop('class')
+# self.off_subfield = getattr(importlib.import_module(curr_module), curr_class)(**off_subfield)
+# else:
+# self.off_subfield = off_subfield
+#
+# super(self.__class__, self).add_subfield(off_subfield, off_weight)
+#
+#
+# def to_dict(self):
+#
+# return {'dc_offset':self.dc_offset,
+# 'on_subfield':self.on_subfield.to_dict(),
+# 'off_subfield':self.off_subfield.to_dict(),
+# 't_max':self.t_max,
+# 'class':(__name__, self.__class__.__name__)}
+#
+# class SingleSubfieldCellModel(CellModel):
+#
+# def __init__(self, subfield, weight = 1, dc_offset=0, t_max=None):
+#
+# super(SingleSubfieldCellModel, self).__init__(dc_offset, t_max)
+#
+# if isinstance(subfield, dict):
+# curr_module, curr_class = subfield.pop('class')
+# subfield = getattr(importlib.import_module(curr_module), curr_class)(**subfield)
+#
+# super(self.__class__, self).add_subfield(subfield, weight)
+#
+# def to_dict(self):
+#
+# assert len(self.subfield_list) == 1
+# subfield = self.subfield_list[0]
+# weight = self.subfield_weight_dict[subfield]
+#
+# return {'dc_offset':self.dc_offset,
+# 'subfield':subfield.to_dict(),
+# 'weight':weight,
+# 't_max':self.t_max,
+# 'class':(__name__, self.__class__.__name__)}
+#
+# class OnCellModel(SingleSubfieldCellModel):
+#
+# def __init__(self, on_subfield, weight = 1, dc_offset=0 , t_max=None):
+# assert weight > 0
+# super(OnCellModel, self).__init__(on_subfield, weight, dc_offset, t_max)
+#
+# def to_dict(self):
+# data_dict = super(OnCellModel, self).to_dict()
+# data_dict['on_subfield'] = data_dict.pop('subfield')
+# return data_dict
+#
+# class OffCellModel(SingleSubfieldCellModel):
+#
+# def __init__(self, on_subfield, weight = -1, dc_offset=0 , t_max=None):
+# assert weight < 0
+# super(OffCellModel, self).__init__(on_subfield, weight, dc_offset, t_max)
+#
+# def to_dict(self):
+# data_dict = super(OffCellModel, self).to_dict()
+# data_dict['off_subfield'] = data_dict.pop('subfield')
+# return data_dict
+
+
+# class OffCellModel(CellModel):
+#
+# def __init__(self, off_subfield, dc_offset=0, off_weight = 1, t_max=None):
+#
+# assert off_weight < 0.
+# self.weight = off_weight
+#
+#
+#
+#
+# super(self.__class__, self).__init__(dc_offset, t_max)
+#
+# if isinstance(on_subfield, dict):
+# curr_module, curr_class = on_subfield.pop('class')
+# self.subfield = getattr(importlib.import_module(curr_module), curr_class)(**on_subfield)
+# else:
+# self.subfield = on_subfield
+#
+# super(self.__class__, self).add_subfield(self.subfield, self.weight)
+#
+# def to_dict(self):
+#
+# return {'dc_offset':self.dc_offset,
+# 'on_subfield':self.subfield.to_dict(),
+# 'on_weight':self.weight,
+# 't_max':self.t_max,
+# 'class':(__name__, self.__class__.__name__)}
+
+
+
+
+
+
+# if __name__ == "__main__":
+#
+# t = np.arange(0,.5,.001)
+# example_movie = movie.Movie(file_name=os.path.join(isee_engine.movie_directory, 'TouchOfEvil.npy'), frame_rate=30.1, memmap=True)
+#
+# temporal_filter_on = TemporalFilterExponential(weight=1, tau=.05)
+# on_subfield = Subfield(scale=(5,15), weight=.5, rotation=30, temporal_filter=temporal_filter_on, translation=(0,0))
+#
+# temporal_filter_off = TemporalFilterExponential(weight=2, tau=.01)
+# off_subfield = Subfield(scale=(5,15), weight=.5, rotation=-30, temporal_filter=temporal_filter_off)
+#
+# cell = OnOffCellModel(on_subfield=on_subfield, off_subfield=off_subfield, dc_offset=0., t_max=.5)
+# curr_kernel = cell.get_spatio_temporal_kernel((100,150), 30.1)
+# curr_kernel.imshow(0)
+#
+# print cell.to_dict()
+
+
+
+# f = cell.get_spatio_temporal_filter(example_movie.movie_data.shape[1:], t,threshold=.5)
+# print len(f.t_ind_list)
+#
+#
+
+# for ii in range(example_movie.number_of_frames-curr_filter.t_max):
+# print ii, example_movie.number_of_frames, curr_filter.map(example_movie, ii)
+
+
+# off_subfield = Subfield(scale=(15,15), weight=.2, translation=(30,30))
+
+
+#
+# curr_filter = cell.get_spatio_temporal_filter((100,150))
+#
+
+#
+# # print touch_of_evil(40.41, mask=m)
+# print curr_filter.t_max
+# for ii in range(example_movie.number_of_frames-curr_filter.t_max):
+# print ii, example_movie.number_of_frames, curr_filter.map(example_movie, ii)
+
+# cell.visualize_spatial_filter((100,150))
+# show_volume(spatio_temporal_filter, vmin=spatio_temporal_filter.min(), vmax=spatio_temporal_filter.max())
+
+
+
+# def get_spatial_filter(self, image_shape, relative_spatial_location=(0,0), relative_threshold=default_relative_threshold):
+#
+# # Initialize:
+# translation_matrix = util.get_translation_matrix(relative_spatial_location)
+#
+# # On-subunit:
+# on_filter_pre_spatial = self.on_subfield.get_spatial_filter(image_shape)
+# on_filter_spatial = util.apply_transformation_matrix(on_filter_pre_spatial, translation_matrix)
+#
+# # Off-subunit:
+# off_filter_pre_spatial = self.off_subfield.get_spatial_filter(image_shape)
+# off_filter_spatial = util.apply_transformation_matrix(off_filter_pre_spatial, translation_matrix)
+#
+# spatial_filter = on_filter_spatial - off_filter_spatial
+#
+# tmp = np.abs(spatial_filter)
+# spatial_filter[np.where(tmp/tmp.max() < relative_threshold )] = 0
+#
+# return spatial_filter
+
+# kernel = float(self.dc_offset)/len(nonzero_ind_tuple[0])+spatio_temporal_filter[nonzero_ind_tuple]
+
+# def rectifying_filter_factory(kernel, movie, dc_offset=0):
+#
+# def rectifying_filter(t):
+#
+# fi = movie.frame_rate*float(t)
+# fim, fiM = np.floor(fi), np.ceil(fi)
+#
+# print t, fim, fiM
+#
+# try:
+# s1 = (movie.movie_data[int(fim)+kernel.t_ind_list, kernel.row_ind_list, kernel.col_ind_list]*kernel.kernel).sum()
+# s2 = (movie.movie_data[int(fiM)+kernel.t_ind_list, kernel.row_ind_list, kernel.col_ind_list]*kernel.kernel).sum()
+# except IndexError:
+# return None
+#
+# # Linear interpolation:
+# s_pre = dc_offset + s1*((1-(fi-fim))*.5) + s2*((fi-fim)*.5)
+#
+# if s_pre < 0:
+# return 0
+# else:
+# return float(s_pre)
+#
+# return rectifying_filter
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/make_cell_list.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/make_cell_list.py
new file mode 100644
index 0000000..aa05481
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/make_cell_list.py
@@ -0,0 +1,294 @@
+import scipy.io as sio
+import os
+import matplotlib.pyplot as plt
+import isee_engine.nwb as nwb
+from linearfilter import SpatioTemporalFilter
+import numpy as np
+from spatialfilter import GaussianSpatialFilter
+from transferfunction import ScalarTransferFunction
+from temporalfilter import TemporalFilterCosineBump
+from cursor import LNUnitCursor, MultiLNUnitCursor
+from movie import Movie
+from lgnmodel1 import LGNModel, heat_plot
+from cellmodel import LGNOnCell, LGNOffCell,LGNOnOffCell,TwoSubfieldLinearCell, OnUnit, OffUnit
+from transferfunction import MultiTransferFunction, ScalarTransferFunction
+from lnunit import LNUnit, MultiLNUnit
+from sympy.abc import x as symbolic_x
+from sympy.abc import y as symbolic_y
+from kernel import Kernel3D
+from movie import Movie, FullFieldFlashMovie
+import itertools
+import scipy.stats as sps
+
+# def multi_cell_tensor_generator(cell_creation_function, **kwargs):
+#
+# sew_param_dict = {}
+# static_param_dict = {}
+# for key, val in kwargs.items():
+# if isinstance(val, (list, np.ndarray)):
+# sew_param_dict[key]=val
+# else:
+# static_param_dict[key]=val
+#
+# cell_list = []
+# loop_keys, loop_lists = zip(*sew_param_dict.items())
+# for param_tuple in itertools.product(*loop_lists):
+# param_dict = dict(zip(loop_keys, param_tuple))
+# print param_dict
+# param_dict.update(static_param_dict)
+# cell_list += cell_creation_function(**param_dict)
+#
+# return cell_list
+
+def multi_cell_random_generator(cell_creation_function=None, **kwargs):
+
+ sew_param_dict = {}
+ static_param_dict = {}
+ range_key_dict = {}
+ for key, val in kwargs.items():
+ if isinstance(val, (sps.rv_continuous, sps.rv_discrete)) or type(val) == type(sps.multivariate_normal()):
+ sew_param_dict[key]=val
+ elif isinstance(val, np.ndarray):
+ range_key_dict[key] = val
+ else:
+ static_param_dict[key]=val
+
+ number_of_cells = static_param_dict.pop('number_of_cells', 1)
+
+ for key, val in range_key_dict.items():
+ assert len(val) == number_of_cells
+
+ cell_list = []
+ loop_keys, loop_lists = zip(*sew_param_dict.items())
+ value_instance_list = zip(*map(lambda x: x.rvs(size=number_of_cells), loop_lists))
+ for ii, curr_value_instance in enumerate(value_instance_list):
+ param_dict = dict(zip(loop_keys, curr_value_instance))
+ param_dict.update(static_param_dict)
+ param_dict['number_of_cells'] = 1
+ for range_key in range_key_dict:
+ param_dict[range_key] = range_key_dict[range_key][ii]
+
+ if cell_creation_function is None:
+ cell_list.append(param_dict)
+ else:
+ cell_list += cell_creation_function(**param_dict)
+
+ return cell_list
+
+
+def make_single_unit_cell_list(number_of_cells=None,
+ lattice_unit_center=None,
+ weights=None,
+ kpeaks=None,
+ delays=None,
+ amplitude=None,
+ sigma=None,
+ width=5,
+ transfer_function_str = 'Heaviside(s)*s'):
+
+ cell_list = []
+ for _ in range(number_of_cells):
+ dxi = np.random.uniform(-width*1./2,width*1./2)
+ dyi = np.random.uniform(-width*1./2,width*1./2)
+ temporal_filter = TemporalFilterCosineBump(weights, kpeaks,delays)
+ spatial_filter = GaussianSpatialFilter(translate=(dxi,dyi), sigma=sigma, origin=lattice_unit_center) # all distances measured from BOTTOM LEFT
+ spatiotemporal_filter = SpatioTemporalFilter(spatial_filter, temporal_filter, amplitude=amplitude)
+ transfer_function = ScalarTransferFunction(transfer_function_str)
+ if amplitude > 0.:
+ cell = OnUnit(spatiotemporal_filter, transfer_function)
+ elif amplitude < 0.:
+ cell = OffUnit(spatiotemporal_filter, transfer_function)
+ else:
+ raise Exception
+
+
+ cell_list.append(cell)
+
+ return cell_list
+
+def make_on_off_cell_list(number_of_cells=None,
+ lattice_unit_center=None,
+ weights_on=None,
+ weights_off=None,
+ kpeaks_on=None,
+ kpeaks_off=None,
+ delays_on = None,
+ delays_off = None,
+ amplitude_on=None,
+ amplitude_off=None,
+ sigma_on=None,
+ sigma_off=None,
+ subfield_separation=None,
+ ang=None,
+ dominant_subunit=None,
+ width=5,
+ transfer_function_str = 'Heaviside(x)*x + Heaviside(y)*y'):
+
+ cell_list = []
+ for _ in range(number_of_cells):
+
+ dxi = np.random.uniform(-width*1./2,width*1./2)
+ dyi = np.random.uniform(-width*1./2,width*1./2)
+
+ dominant_subfield_location = (lattice_unit_center[0]+dxi, lattice_unit_center[1]+dyi)
+# hor_offset = np.cos(ang*np.pi/180.)*subfield_separation
+# vert_offset = np.sin(ang*np.pi/180.)*subfield_separation
+# nondominant_subfield_translation = (hor_offset,vert_offset)
+
+ if dominant_subunit == 'on':
+ on_translate = dominant_subfield_location#(0,0)
+ off_translate = dominant_subfield_location#nondominant_subfield_translation
+
+ elif dominant_subunit == 'off':
+
+ off_translate = dominant_subfield_location#(0,0)
+ on_translate = dominant_subfield_location#nondominant_subfield_translation
+
+ else:
+ raise Exception
+
+ on_origin = off_origin = (0,0)#dominant_subfield_location
+
+ temporal_filter_on = TemporalFilterCosineBump(weights_on, kpeaks_on,delays_on)
+ spatial_filter_on = GaussianSpatialFilter(translate=on_translate,sigma=sigma_on, origin=on_origin) # all distances measured from BOTTOM LEFT
+ on_filter = SpatioTemporalFilter(spatial_filter_on, temporal_filter_on, amplitude=amplitude_on)
+
+ temporal_filter_off = TemporalFilterCosineBump(weights_off, kpeaks_off,delays_off)
+ spatial_filter_off = GaussianSpatialFilter(translate=off_translate,sigma=sigma_off, origin=off_origin) # all distances measured from BOTTOM LEFT
+ off_filter = SpatioTemporalFilter(spatial_filter_off, temporal_filter_off, amplitude=amplitude_off)
+
+# cell = LGNOnOffCell(on_filter, off_filter, transfer_function=MultiTransferFunction((symbolic_x, symbolic_y), transfer_function_str))
+ cell = TwoSubfieldLinearCell(on_filter,off_filter,subfield_separation=subfield_separation, onoff_axis_angle=ang, dominant_subfield_location=dominant_subfield_location)
+ cell_list.append(cell)
+
+ return cell_list
+
+# amplitude_list = amplitude_dist.rvs(size=5)
+# kpeak_list = kpeak_dist.rvs(size=5)
+# cell_config = {'number_of_cells':5,
+# 'lattice_unit_center':(40,30),
+# 'weights':(.4,-.2),
+# 'kpeaks':kpeak_list,
+# 'amplitude':amplitude_list,
+# 'sigma':(4,4),
+# 'width':5}
+# multi_cell_tensor_generator(make_single_unit_cell_list, **cell_config)
+
+
+# amplitude_dist = sps.rv_discrete(values=([20,25], [.5,.5]))
+# kpeak_dist = sps.multivariate_normal(mean=[40., 80.], cov=[[5.0, 0], [0, 5]])
+#
+# single_unit_cell_config = {'number_of_cells':10,
+# 'lattice_unit_center':(40,30),
+# 'weights':(.4,-.2),
+# 'kpeaks':kpeak_dist,
+# 'amplitude':amplitude_dist,
+# 'sigma':(4,4),
+# 'width':5}
+#
+#
+# amplitude_on_dist = sps.rv_discrete(values=([20,25], [.5,.5]))
+# amplitude_off_dist = sps.rv_discrete(values=([-10,-15], [.5,.5]))
+# kpeak_on_dist = sps.multivariate_normal(mean=[40., 80.], cov=[[5.0, 0], [0, 5]])
+# kpeak_off_dist = sps.multivariate_normal(mean=[100., 160.], cov=[[5.0, 0], [0, 5]])
+# #ang_dist = sps.rv_discrete(values=(np.arange(0,360,45), 1./8*np.ones((1,8))))
+# ang_dist = np.arange(0,360,45)
+#
+# two_unit_cell_config={'number_of_cells':8,
+# 'lattice_unit_center':(40,30),
+# 'weights_on':(.4,-.2),
+# 'weights_off':(.4,-.1),
+# 'kpeaks_on':kpeak_on_dist,
+# 'kpeaks_off':kpeak_off_dist,
+# 'amplitude_on':20.,
+# 'amplitude_off':-10.,
+# 'sigma_on':(4,4),
+# 'sigma_off':(4,4),
+# 'subfield_separation':2.,
+# 'ang':ang_dist,
+# 'dominant_subunit':'on',
+# 'width':5}
+
+
+def evaluate_cell_and_plot(input_cell, input_movie, ax, show=False):
+ t, y = input_cell.evaluate(input_movie,downsample = 10)
+ ax.plot(t, y)
+
+ if show == True:
+ plt.show()
+
+
+# if __name__ == "__main__":
+#
+# # Create stimulus 0:
+# frame_rate = 60
+# m1 = FullFieldFlashMovie(np.arange(60), np.arange(80), 1., 3., frame_rate=frame_rate).full(t_max=3)
+# m2 = FullFieldFlashMovie(np.arange(60), np.arange(80), 0, 2, frame_rate=frame_rate, max_intensity=-1).full(t_max=2)
+# m3 = FullFieldFlashMovie(np.arange(60), np.arange(80), 0, 2., frame_rate=frame_rate).full(t_max=2)
+# m4 = FullFieldFlashMovie(np.arange(60), np.arange(80), 0, 2, frame_rate=frame_rate, max_intensity=0).full(t_max=2)
+# m0 = m1+m2+m3+m4
+#
+# # Create stimulus 1:
+# movie_file = '/data/mat/RamIyer/for_Anton/grating_ori0_res2.mat'
+# m_file = sio.loadmat(movie_file)
+# m_data_raw = m_file['mov_fine'].T
+# m_data = np.reshape(m_data_raw,(3000,64,128))
+# m1 = Movie(m_data, frame_rate=1000.)
+#
+# #Create stimulus 2:
+# movie_file = '/data/mat/iSee_temp_shared/TouchOfEvil_norm.npy'
+# m_data = np.load(movie_file, 'r')
+# m = Movie(m_data[1000:], frame_rate=30.)
+#
+# movie_list = [m0, m1, m2]
+#
+# #====================================================
+#
+# #Create cell list
+#
+# cell_list = []
+#
+# #On cells
+# params_tON = (5, (40,30), (.4,-.2),(40,80),20.,(4,4))
+# tON_list = make_single_unit_cell_list(*params_tON)
+# cell_list.append(tON_list)
+#
+# params_sON = (5, (40,30), (.4,-.1),(100,160),20.,(4,4))
+# sON_list = make_single_unit_cell_list(*params_sON)
+# cell_list.append(sON_list)
+#
+# #Off cells
+# params_tOFF = (5, (40,30), (.4,-.2),(40,80),-20.,(4,4))
+# tOFF_list = make_single_unit_cell_list(*params_tOFF)
+# cell_list.append(tOFF_list)
+#
+# params_sOFF = (5, (40,30), (.4,-.1),(100,160),-20.,(4,4))
+# sOFF_list = make_single_unit_cell_list(*params_sOFF)
+# cell_list.append(sOFF_list)
+#
+# #ONOFF cells
+# params_onoff = (5, (40,30),(.4, -.2),(.4,-.2),(40, 80),(50,100),20.,-20.,(4,4),(4,4),2.,0,'on')
+# onoff_list = make_on_off_cell_list(*params_onoff)
+# cell_list.append(onoff_list)
+#
+# #Two subunit cells
+# params_twosub = (5, (40,30),(.4, -.2),(.4,-.1),(40, 80),(100,160),20.,-10.,(4,2),(3,4),10.,90,'on')
+# twosub_list = make_on_off_cell_list(*params_twosub)
+# cell_list.append(twosub_list)
+#
+# #=====================================================
+# #Evaluate and plot responses
+# nc = len(movie_list)
+# nr = len(cell_list)
+# fig, axes = plt.subplots(nr,nc+2)
+#
+# for curr_row, curr_cell in zip(axes, cell_list):
+# curr_cell.show_spatial_filter(np.arange(60),np.arange(80), ax=curr_row[0], show=False, colorbar=False)
+# curr_cell.show_temporal_filter(ax=curr_row[1], show=False)
+#
+# for curr_row, curr_cell in zip(axes, cell_list):
+# for curr_ax, curr_movie in zip(curr_row[2:], movie_list):
+# evaluate_cell_and_plot(curr_cell, curr_movie, curr_ax, show=False)
+#
+# plt.tight_layout()
+# plt.show()
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/movie.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/movie.py
new file mode 100755
index 0000000..a9d4e67
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/movie.py
@@ -0,0 +1,196 @@
+import matplotlib.pyplot as plt
+import numpy as np
+from .utilities import convert_tmin_tmax_framerate_to_trange
+
+
+class Movie(object):
+ def __init__(self, data, row_range=None, col_range=None, labels=('time', 'y', 'x'),
+ units=('second', 'pixel', 'pixel'), frame_rate=None, t_range=None):
+ self.data = data
+ self.labels = labels
+ self.units = units
+ assert units[0] == 'second'
+
+ if t_range is None:
+ self.frame_rate = float(frame_rate)
+ self.t_range = np.arange(data.shape[0])*(1./self.frame_rate)
+ else:
+ self.t_range = np.array(t_range)
+ self.frame_rate = 1./np.mean(np.diff(t_range))
+
+ if row_range is None:
+ self.row_range = np.arange(data.shape[1])
+ else:
+ self.row_range = np.array(row_range)
+ if col_range is None:
+ self.col_range = np.arange(data.shape[2])
+ else:
+ self.col_range = np.array(col_range)
+
+ def imshow_summary(self, ax=None, show=True, xlabel=None):
+ if ax is None:
+ _, ax = plt.subplots(1,1)
+
+ t_vals = self.t_range.copy()
+ y_vals = self.data.mean(axis=2).mean(axis=1)
+ ax.plot(t_vals, y_vals)
+ ax.set_ylim(y_vals.min()-np.abs(y_vals.min())*.05, y_vals.max()+np.abs(y_vals.max())*.05)
+
+ if not xlabel is None:
+ ax.set_xlabel(xlabel)
+
+ ax.set_ylabel('Average frame intensity')
+
+ if show == True:
+ plt.show()
+
+ return ax, (t_vals, y_vals)
+
+ def imshow(self, t, show=True, vmin=-1, vmax=1, cmap=plt.cm.gray):
+ ti = int(t*self.frame_rate)
+ data = self.data[ti,:,:]
+ plt.imshow(data, vmin=vmin, vmax=vmax, cmap=cmap)
+ plt.colorbar()
+ if show:
+ plt.show()
+
+ def __add__(self, other):
+
+ assert self.labels == other.labels
+ assert self.units == other.units
+ assert self.frame_rate == other.frame_rate
+ np.testing.assert_almost_equal(self.col_range, other.col_range)
+ np.testing.assert_almost_equal(self.row_range, other.row_range)
+
+
+ new_data = np.empty((len(self.t_range)+len(other.t_range)-1, len(self.row_range), len(self.col_range)))
+ new_data[:len(self.t_range), :,:] = self.data[:,:,:]
+ new_data[len(self.t_range):, :,:] = other.data[1:,:,:]
+
+ return Movie(new_data, row_range=self.row_range.copy(), col_range=self.col_range.copy(), labels=self.labels, units=self.units, frame_rate=self.frame_rate)
+
+ @property
+ def ranges(self):
+ return self.t_range, self.row_range, self.col_range
+
+ def get_nwb_GrayScaleMovie(self):
+
+ t_scale = nwb.Scale(self.t_range, 'time', self.units[0])
+ row_scale = nwb.Scale(self.row_range, 'distance', self.units[1])
+ col_scale = nwb.Scale(self.col_range, 'distance', self.units[2])
+
+ return nwb.GrayScaleMovie(self.data, scale=(t_scale, row_scale, col_scale))
+
+ def __getitem__(self, *args):
+ return self.data.__getitem__(*args)
+
+
+class FullFieldMovie(Movie):
+ def __init__(self, f, row_range, col_range, frame_rate=24):
+ self.row_range = row_range
+ self.col_range = col_range
+ self.frame_size = (len(self.row_range), len(self.col_range))
+ self._frame_rate = frame_rate
+ self.f = f
+
+ @property
+ def frame_rate(self):
+ return self._frame_rate
+
+ @property
+ def data(self):
+ return self
+
+ def __getitem__(self, *args):
+
+ t_inds, x_inds, y_inds = args[0]
+
+ assert (len(x_inds) == len(y_inds)) and (len(y_inds) == len(t_inds))
+
+ # Convert frame indices to times:
+ t_vals = (1./self.frame_rate)*t_inds
+
+ # Evaluate and return:
+ return self.f(t_vals)
+
+ def full(self, t_min=0, t_max=None):
+ # Compute t_range
+ t_range = convert_tmin_tmax_framerate_to_trange(t_min, t_max, self.frame_rate)
+
+ nt = len(t_range)
+ nr = len(self.row_range)
+ nc = len(self.col_range)
+ a,b,c = np.meshgrid(range(nt),range(nr),range(nc))
+ af, bf, cf = map(lambda x: x.flatten(), [a,b,c])
+ data = np.empty((nt, nr, nc))
+ data[af, bf, cf] = self.f(t_range[af])
+
+ return Movie(data, row_range=self.row_range, col_range=self.col_range, labels=('time', 'y', 'x'), units=('second', 'pixel', 'pixel'), frame_rate=self.frame_rate)
+
+
+class FullFieldFlashMovie(FullFieldMovie):
+ def __init__(self, row_range, col_range, t_on, t_off, max_intensity=1, frame_rate=24):
+ assert t_on < t_off
+
+ def f(t):
+ return np.piecewise(t, *zip(*[(t < t_on, 0), (np.logical_and(t_on <= t, t < t_off), max_intensity),
+ (t_off <= t, 0)]))
+
+ super(FullFieldFlashMovie, self).__init__(f, row_range, col_range, frame_rate=frame_rate)
+
+
+class GratingMovie(Movie):
+ def __init__(self, row_size, col_size, frame_rate=1000.):
+ self.row_size = row_size #in degrees
+ self.col_size = col_size #in degrees
+ self.frame_rate = float(frame_rate) #in Hz
+
+ def create_movie(self, t_min = 0, t_max = 1, gray_screen_dur = 0, cpd = 0.05, temporal_f = 4, theta = 45, phase = 0., contrast = 1.0, row_size_new = None, col_size_new = None):
+ """Create the grating movie with the desired parameters
+ :param t_min: start time in seconds
+ :param t_max: end time in seconds
+ :param gray_screen_dur: Duration of gray screen before grating stimulus starts
+ :param cpd: cycles per degree
+ :param temporal_f: in Hz
+ :param theta: orientation angle
+ :return: Movie object of grating with desired parameters
+ """
+ assert contrast <= 1, "Contrast must be <= 1"
+ assert contrast > 0, "Contrast must be > 0"
+
+ physical_spacing = 1. / (float(cpd) * 10) #To make sure no aliasing occurs
+ self.row_range = np.linspace(0, self.row_size, self.row_size / physical_spacing, endpoint = True)
+ self.col_range = np.linspace(0, self.col_size, self.col_size / physical_spacing, endpoint = True)
+ numberFramesNeeded = int(round(self.frame_rate * (t_max - gray_screen_dur))) + 1
+ time_range = np.linspace(gray_screen_dur, t_max - gray_screen_dur, numberFramesNeeded, endpoint=True)
+
+ tt, yy, xx = np.meshgrid(time_range, self.row_range, self.col_range, indexing='ij')
+
+ thetaRad = -np.pi*(180-theta)/180.
+ phaseRad = np.pi*(180-phase)/180.
+ xy = xx * np.cos(thetaRad) + yy * np.sin(thetaRad)
+ data = contrast*np.sin(2*np.pi*(cpd * xy + temporal_f *tt) + phaseRad)
+
+ if row_size_new != None:
+ self.row_range = np.linspace(0, row_size_new, data.shape[1], endpoint = True)
+ if col_size_new != None:
+ self.col_range = np.linspace(0, col_size_new, data.shape[2], endpoint = True)
+
+ if gray_screen_dur > 0:
+ # just adding one or two seconds to gray screen so flash never "happens"
+ m_gray = FullFieldFlashMovie(self.row_range, self.col_range, gray_screen_dur + 1, gray_screen_dur + 2,
+ frame_rate=self.frame_rate).full(t_max=gray_screen_dur)
+ mov = m_gray + Movie(data, row_range=self.row_range, col_range=self.col_range, labels=('time', 'y', 'x'),
+ units=('second', 'pixel', 'pixel'), frame_rate=self.frame_rate)
+ else:
+ mov = Movie(data, row_range=self.row_range, col_range=self.col_range, labels=('time', 'y', 'x'),
+ units=('second', 'pixel', 'pixel'), frame_rate=self.frame_rate)
+
+ return mov
+
+
+if __name__ == "__main__":
+ m1 = FullFieldFlashMovie(range(60), range(80), 1, 2).full(t_max=2)
+ m2 = FullFieldFlashMovie(range(60), range(80), 1, 2).full(t_max=2)
+ m3 = m1+m2
+ m3.imshow_summary()
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/poissongeneration.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/poissongeneration.py
new file mode 100644
index 0000000..b2125b1
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/poissongeneration.py
@@ -0,0 +1,104 @@
+import numpy as np
+import scipy.interpolate as sinterp
+import scipy.integrate as spi
+import warnings
+import scipy.optimize as sopt
+import scipy.stats as sps
+
+def generate_renewal_process(t0, t1, renewal_distribution):
+ last_event_time = t0
+ curr_interevent_time = float(renewal_distribution())
+ event_time_list = []
+ while last_event_time+curr_interevent_time <= t1:
+ event_time_list.append(last_event_time+curr_interevent_time)
+ curr_interevent_time = float(renewal_distribution())
+ last_event_time = event_time_list[-1]
+
+ return event_time_list
+
+def generate_poisson_process(t0, t1, rate):
+
+ if rate is None: raise ValueError('Rate cannot be None')
+ if rate > 10000: warnings.warn('Very high rate encountered: %s' % rate)
+
+
+ try:
+ assert rate >= 0
+ except AssertionError:
+ raise ValueError('Negative rate (%s) not allowed' % rate)
+
+ try:
+ assert rate < np.inf
+ except AssertionError:
+ raise ValueError('Rate (%s) must be finite' % rate)
+
+
+
+
+
+
+
+ if rate == 0:
+ return []
+ else:
+ return generate_renewal_process(t0, t1, sps.expon(0,1./rate).rvs)
+
+def generate_inhomogenous_poisson(t_range, y_range, seed=None):
+ if not seed == None: np.random.seed(seed)
+ spike_list = []
+ for tl, tr, y in zip(t_range[:-1], t_range[1:], y_range[:-1]):
+ spike_list += generate_poisson_process(tl, tr, y)
+ return spike_list
+
+
+
+
+def generate_poisson_rescaling(t, y, seed=None):
+ y = np.array(y)
+ t = np.array(t)
+ assert not np.any(y<0)
+ f = sinterp.interp1d(t, y, fill_value=0, bounds_error=False)
+ return generate_poisson_rescaling_function(lambda y, t: f(t), t[0], t[-1], seed=seed)
+
+
+
+def generate_poisson_rescaling_function(f, t_min, t_max, seed=None):
+
+
+
+ def integrator(t0, t1):
+ return spi.odeint(f, 0, [t0, t1])[1][0]
+
+ if not seed == None:
+ np.random.seed(seed)
+
+ spike_train = []
+ while t_min < t_max:
+ e0 = np.random.exponential()
+ def root_function(t):
+ return e0 - integrator(t_min, t)
+
+ try:
+ with warnings.catch_warnings(record=True) as w:
+ result = sopt.root(root_function, .1)
+ assert result.success
+ except AssertionError:
+ if not e0 < integrator(t_min, t_max):
+ assert Exception
+ else:
+ break
+
+
+
+
+ t_min = result.x[0]
+ spike_train.append(t_min)
+
+ return np.array(spike_train)
+
+
+def test_generate_poisson_function():
+
+ f = lambda y, t:10
+
+ assert len(generate_poisson_function(f,0,1,seed=5)) == 12
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/singleunitcell.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/singleunitcell.py
new file mode 100644
index 0000000..d3e0b24
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/singleunitcell.py
@@ -0,0 +1,8 @@
+from temporalfilter import TemporalFilterCosineBump
+from transferfunction import ScalarTransferFunction
+from linearfilter import SpatioTemporalFilter
+import numpy as np
+from spatialfilter import GaussianSpatialFilter
+from cellmodel import OnUnit, OffUnit
+
+
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/spatialfilter.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/spatialfilter.py
new file mode 100644
index 0000000..466db94
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/spatialfilter.py
@@ -0,0 +1,215 @@
+from scipy import ndimage
+import numpy as np
+import itertools
+import importlib
+import scipy.interpolate as spinterp
+from . import utilities as util
+import matplotlib.pyplot as plt
+import scipy.misc as spmisc
+import scipy.ndimage as spndimage
+from .kernel import Kernel2D, Kernel3D
+
+class ArrayFilter(object):
+
+ default_threshold = .01
+
+ def __init__(self, mask):
+
+ self.mask = mask
+
+ def imshow(self, row_range, col_range, threshold=0, **kwargs):
+
+ return self.get_kernel(row_range, col_range,threshold).imshow(**kwargs)
+
+ def get_kernel(self, row_range, col_range, threshold=0, amplitude=1.):
+
+# print np.where(self.mask>threshold)
+ row_vals, col_vals = np.where(self.mask>threshold)
+
+ kernel_vals = self.mask[row_vals, col_vals]
+ kernel_vals = amplitude*kernel_vals/kernel_vals.sum()
+
+ return Kernel2D(row_range, col_range, row_vals, col_vals, kernel_vals) # row_range, col_range, row_inds, col_inds, kernel):
+
+
+class GaussianSpatialFilter(object):
+
+ default_threshold = .01
+
+ def __init__(self, translate=(0, 0), sigma=(1.,1.), rotation=0, origin='center'):
+ '''When w=1 and rotation=0, half-height will be at y=1'''
+
+ self.translate = translate
+ self.rotation = rotation
+ self.sigma = sigma
+ self.origin = origin
+
+ def imshow(self, row_range, col_range, threshold=0, **kwargs):
+ return self.get_kernel(row_range, col_range,threshold).imshow(**kwargs)
+
+ def to_dict(self):
+
+ return {'class':(__name__, self.__class__.__name__),
+ 'translate':self.translate,
+ 'rotation':self.rotation,
+ 'sigma':self.sigma}
+
+ def get_kernel(self, row_range, col_range, threshold=0, amplitude=1.):
+
+ # Create symmetric initial point at center:
+ image_shape = len(col_range), len(row_range)
+ h, w = image_shape
+ on_filter_spatial = np.zeros(image_shape)
+ if h%2 == 0 and w%2 == 0:
+ for ii, jj in itertools.product(range(2), range(2)):
+ on_filter_spatial[int(h/2)+ii-1,int(w/2)+jj-1] = .25
+ elif h%2 == 0 and w%2 != 0:
+ for ii in range(2):
+ on_filter_spatial[int(h/2)+ii-1,int(w/2)] = .25
+ elif h%2 != 0 and w%2 == 0:
+ for jj in range(2):
+ on_filter_spatial[int(h/2),int(w/2)+jj-1] = .25
+ else:
+ on_filter_spatial[int(h/2),int(w/2)] = .25
+
+ # Apply gaussian filter to create correct sigma:
+ scaled_sigma_x =float(self.sigma[0])/(col_range[1]-col_range[0])
+ scaled_sigma_y = float(self.sigma[1])/(row_range[1]-row_range[0])
+ on_filter_spatial = ndimage.gaussian_filter(on_filter_spatial, (scaled_sigma_x, scaled_sigma_y), mode='nearest', cval=0)
+# on_filter_spatial = skf.gaussian_filter(on_filter_spatial, sigma=(scaled_sigma_x, scaled_sigma_y))
+
+ # Rotate and translate at center:
+ rotation_matrix = util.get_rotation_matrix(self.rotation, on_filter_spatial.shape)
+ translation_x = float(self.translate[1])/(row_range[1]-row_range[0])
+ translation_y = -float(self.translate[0])/(col_range[1]-col_range[0])
+ translation_matrix = util.get_translation_matrix((translation_x, translation_y))
+ if self.origin != 'center':
+ center_y = -(self.origin[0]-(col_range[-1]+col_range[0])/2)/(col_range[1]-col_range[0])
+ center_x = (self.origin[1]-(row_range[-1]+row_range[0])/2)/(row_range[1]-row_range[0])
+ translation_matrix += util.get_translation_matrix((center_x, center_y))
+ kernel_data = util.apply_transformation_matrix(on_filter_spatial, translation_matrix+rotation_matrix)
+
+ kernel = Kernel2D.from_dense(row_range, col_range, kernel_data, threshold=0)
+ kernel.apply_threshold(threshold)
+ kernel.normalize()
+
+ kernel.kernel *= amplitude
+
+
+ return kernel
+
+
+
+# spatial_model = GaussianSpatialFilterModel(height=21, aspect_ratio=1., rotation=0)
+# spatial_filter = spatial_model(center=(30,40))
+# k = spatial_filter.get_spatial_kernel(range(60), range(80))
+# k.imshow(frame_size=(60,80))
+
+
+
+
+
+
+
+
+
+
+
+
+
+# def evaluate_movie(self, movie, t, show=False):
+#
+# y = []
+# for ti in t:
+# kernel_result = movie.evaluate_Kernel3D(ti, self)
+# y.append(self.transfer_function(kernel_result))
+#
+# if show == True:
+# plt.plot(t, y)
+# plt.show()
+#
+# return t, y
+
+# print mesh_range[0]
+#
+# ii = mesh_range[0][inds]
+# jj = mesh_range[1][inds]
+# print ii, jj
+# print tmp[jj,ii]
+
+# plt.figure()
+# plt.pcolor(mesh_range[0], mesh_range[1], tmp)
+# plt.colorbar()
+# plt.axis('equal')
+# plt.show()
+
+# print self.xydata[0].shape
+#
+# t0 = spndimage.rotate(self.xydata[0],30,reshape=False, mode=mode)
+# t1 = spndimage.rotate(self.xydata[1],30, reshape=False, mode=mode)
+
+# print t0.shape
+# print t1.shape
+# print on_filter_spatial.shape
+
+# plt.pcolor(t0,t1, on_filter_spatial)
+
+
+# self.interpolation_function = spinterp.interp2d(self.w_values, self.h_values, on_filter_spatial, fill_value=0, bounds_error=False)
+#
+# print self.interpolation_function((t0,t1))
+
+# translation_matrix = util.get_translation_matrix(self.translation)
+# tmp = util.apply_transformation_matrix(on_filter_spatial, translation_matrix)
+#
+# plt.pcolor(self.xydata[0], self.xydata[1], tmp)
+# plt.show()
+
+# # print self.xydata_trans[0][0], self.xydata_trans[0],[-1]
+# # print self.xydata_trans[1][0], self.xydata_trans[1],[-1]
+# print self.xydata_trans
+# rotation_matrix = util.get_rotation_matrix(self.rotation, on_filter_spatial.shape)
+# translation_matrix = util.get_translation_matrix(self.translation)
+# on_filter_spatial = util.apply_transformation_matrix(on_filter_spatial, translation_matrix+rotation_matrix)
+
+# plt.imshow(on_filter_spatial, extent=(self.w_values[0], self.w_values[-1], self.h_values[0], self.h_values[-1]), aspect=1.)
+# plt.show()
+
+# def to_dict(self):
+#
+# return {'scale':self.scale,
+# 'translation':self.translation,
+# 'rotation':self.rotation,
+# 'weight':self.weight,
+# 'temporal_filter':self.temporal_filter.to_dict(),
+# 'class':(__name__, self.__class__.__name__)}
+
+# def get_kernel(self, xdata, ydata, threshold=default_threshold):
+#
+#
+# # Rotate and translate at center:
+# rotation_matrix = util.get_rotation_matrix(self.rotation, on_filter_spatial.shape)
+# translation_matrix = util.get_translation_matrix(self.translation)
+# on_filter_spatial = util.apply_transformation_matrix(on_filter_spatial, translation_matrix+rotation_matrix)
+#
+# # Now translate center of field in image:
+# # translation_matrix = util.get_translation_matrix(relative_spatial_location)
+# # on_filter_spatial = util.apply_transformation_matrix(on_filter_spatial, translation_matrix)
+#
+# # Create and return thresholded 2D mask:
+# row_ind_list, col_ind_list = np.where(on_filter_spatial != 0)
+# kernel = on_filter_spatial[row_ind_list, col_ind_list]
+#
+#
+#
+#
+# # filter_mask = Kernel2D(row_ind_list, col_ind_list, kernel, threshold=threshold)
+#
+# return filter_mask
+
+# translation_matrix = util.get_translation_matrix((1.*translation[0]/fudge_factor,-1.*translation[1]/fudge_factor))
+
+# plt.figure()
+# plt.pcolor(self.mesh_support[0], self.mesh_support[1], self.kernel_data)
+# plt.axis('equal')
+# plt.show()
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/temporalfilter.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/temporalfilter.py
new file mode 100644
index 0000000..1e604bf
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/temporalfilter.py
@@ -0,0 +1,114 @@
+import numpy as np
+from . import fitfuns
+import scipy.interpolate as spinterp
+import matplotlib.pyplot as plt
+from .kernel import Kernel1D
+
+class TemporalFilter(object):
+
+ def __init__(self, *args, **kwargs): pass
+
+ def imshow(self, t_range=None, threshold=0, reverse=False, rescale=False, **kwargs):
+ return self.get_kernel(t_range, threshold, reverse, rescale).imshow(**kwargs)
+
+
+ def to_dict(self):
+ return {'class':(__name__, self.__class__.__name__)}
+
+ def get_kernel(self, t_range=None, threshold=0, reverse=False, rescale=False):
+
+ if t_range is None:
+ t_range = self.get_default_t_grid()
+
+# print self.t_support
+# print self.kernel_data
+
+ if len(self.t_support) == 1:
+ k = Kernel1D(self.t_support, self.kernel_data, threshold=threshold, reverse=reverse)
+ else:
+ interpolation_function = spinterp.interp1d(self.t_support, self.kernel_data, fill_value=0, bounds_error=False, assume_sorted=True)
+ k = Kernel1D(t_range, interpolation_function(t_range), threshold=threshold, reverse=reverse)
+ if rescale == True:
+ k.rescale()
+
+ #assert np.abs(np.abs(k.kernel).sum() - 1) < 1e-14
+ assert np.abs(np.abs(k.kernel.sum()) - 1) < 1e-14
+
+ return k
+
+class ArrayTemporalFilter(TemporalFilter):
+
+ def __init__(self, mask,t_support):
+
+ self.mask = mask
+ self.t_support = t_support
+
+ assert len(self.mask) == len(self.t_support)
+
+ self.nkt = 600
+
+ super(self.__class__, self).__init__()
+
+ self.kernel_data = self.mask
+ #self.t_support = np.arange(0, len(self.kernel_data)*.001, .001)
+ #assert len(self.t_support) == len(self.kernel_data)
+
+ def get_default_t_grid(self):
+
+ return np.arange(self.nkt)*.001
+
+class TemporalFilterCosineBump(TemporalFilter):
+
+ def __init__(self, weights, kpeaks, delays):
+
+ assert len(kpeaks) == 2
+ assert kpeaks[0] 0
+ assert delays[0] <= delays[1]
+
+ self.ncos = len(weights)
+
+ # Not likely to change defaults:
+ self.neye = 0
+ self.b = .3
+ self.nkt = 600
+
+ super(self.__class__, self).__init__()
+
+ # Parameters
+ self.weights = np.array([weights]).T
+ self.kpeaks = kpeaks
+ self.delays = np.array([delays]).astype(int)
+
+ # Adapter code to get filters from Ram's code:
+ kbasprs = {}
+ kbasprs['neye'] = self.neye
+ kbasprs['ncos'] = self.ncos
+ kbasprs['kpeaks'] = self.kpeaks
+ kbasprs['b'] = self.b
+ kbasprs['delays'] = self.delays
+ nkt = self.nkt
+ #kbasprs['bases'] = fitfuns.makeBasis_StimKernel(kbasprs, nkt)
+ self.kernel_data = np.dot(fitfuns.makeBasis_StimKernel(kbasprs, nkt), self.weights)[::-1].T[0]
+# plt.figure()
+# plt.plot(self.kernel_data)
+# plt.show()
+# sys.exit()
+ self.t_support = np.arange(0, len(self.kernel_data)*.001, .001)
+ self.kbasprs = kbasprs
+ assert len(self.t_support) == len(self.kernel_data)
+
+ def __call__(self, t):
+ return self.interpolation_function(t)
+
+ def get_default_t_grid(self):
+ return np.arange(self.nkt)*.001
+
+ def to_dict(self):
+
+ param_dict = super(self.__class__, self).to_dict()
+
+ param_dict.update({'weights':self.weights.tolist(),
+ 'kpeaks':self.kpeaks})
+
+ return param_dict
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/transferfunction.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/transferfunction.py
new file mode 100644
index 0000000..03ff617
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/transferfunction.py
@@ -0,0 +1,58 @@
+from sympy.utilities.lambdify import lambdify
+import sympy.parsing.sympy_parser as symp
+import sympy.abc
+import numpy as np
+
+
+class ScalarTransferFunction(object):
+ def __init__(self, transfer_function_string, symbol=sympy.abc.s):
+ self.symbol = symbol
+ self.transfer_function_string = transfer_function_string
+ self.closure = lambdify(self.symbol, symp.parse_expr(self.transfer_function_string), modules=['sympy'])
+
+ def __call__(self, s):
+ return self.closure(s)
+
+ def to_dict(self):
+ return {'class': (__name__, self.__class__.__name__),
+ 'function': self.transfer_function_string}
+
+ def imshow(self, xlim, ax=None, show=True, save_file_name=None, ylim=None):
+ # TODO: This function should be removed (as Ram to see if/where it's used) since it will fail (no t_vals)
+ import matplotlib.pyplot as plt
+ if ax is None:
+ _, ax = plt.subplots(1, 1)
+
+ plt.plot(self.t_vals, self.kernel)
+ ax.set_xlabel('Time (Seconds)')
+
+ if ylim is not None:
+ ax.set_ylim(ylim)
+
+ if xlim is not None:
+ ax.set_xlim((self.t_range[0], self.t_range[-1]))
+
+ if save_file_name is not None:
+ plt.savefig(save_file_name, transparent=True)
+
+ if show:
+ plt.show()
+
+ return ax
+
+
+class MultiTransferFunction(object):
+ def __init__(self, symbol_tuple, transfer_function_string):
+ self.symbol_tuple = symbol_tuple
+ self.transfer_function_string = transfer_function_string
+ self.closure = lambdify(self.symbol_tuple, symp.parse_expr(self.transfer_function_string), modules=['sympy'])
+
+ def __call__(self, *s):
+ if isinstance(s[0], (float,)):
+ return self.closure(*s)
+ else:
+ return np.array(list(map(lambda x: self.closure(*x), zip(*s))))
+
+ def to_dict(self):
+ return {'class': (__name__, self.__class__.__name__),
+ 'function': self.transfer_function_string}
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/util_fns.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/util_fns.py
new file mode 100644
index 0000000..af297a0
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/util_fns.py
@@ -0,0 +1,190 @@
+import os
+import re
+import matplotlib.mlab as mlab
+import numpy as np
+import scipy.io as sio
+from scipy.fftpack import fft
+import pandas as pd
+from .movie import Movie, FullFieldFlashMovie
+
+
+pd.set_option('display.width', 1000)
+pd.set_option('display.max_columns', 100)
+
+
+#################################################
+def chunks(l, n):
+ """Yield successive n-sized chunks from l."""
+ for i in range(0, len(l), n):
+ yield l[i:i + n]
+
+
+##################################################
+def compute_FFT_OneCycle(FR, TF, downsample):
+ one_cyc = np.int(((1000. / downsample) / TF))
+ FR_cyc = list(chunks(FR, one_cyc))
+ if (TF == 15. or TF == 8.):
+ FR_cyc = FR_cyc[:-1]
+
+ FR_cyc_avg = np.mean(FR_cyc, axis=0)
+ y = FR_cyc_avg
+ AMP = 2 * np.abs(fft(y) / len(y))
+ F0 = 0.5 * AMP[0]
+ assert (F0 - np.mean(y) < 1.e-4)
+ F1 = AMP[1]
+
+ return F0, F1
+
+
+##################################################
+def create_ff_mov(frame_rate, tst, tend, xrng, yrng):
+ ff_mov_on = FullFieldFlashMovie(np.arange(xrng), np.arange(yrng), tst, tend, frame_rate=frame_rate,
+ max_intensity=1).full(t_max=tend) # +0.5)
+ ff_mov_off = FullFieldFlashMovie(np.arange(xrng), np.arange(yrng), tst, tend, frame_rate=frame_rate,
+ max_intensity=-1).full(t_max=tend) # +0.5)
+
+ return ff_mov_on, ff_mov_off
+
+
+##################################################
+def create_grating_movie_list(gr_dir_name):
+ gr_fnames = os.listdir(gr_dir_name)
+ gr_fnames_ord = sorted(gr_fnames, key=lambda x: (int(re.sub('\D', '', x)), x))
+
+ gr_mov_list = []
+ for fname in gr_fnames_ord[:5]:
+ movie_file = os.path.join(gr_dir_name, fname)
+ m_file = sio.loadmat(movie_file)
+ m_data_raw = m_file['mov'].T
+ swid = np.shape(m_data_raw)[1]
+ res = int(np.sqrt(swid / (8 * 16)))
+ m_data = np.reshape(m_data_raw, (3000, 8 * res, 16 * res))
+ m1 = Movie(m_data[:500, :, :], row_range=np.linspace(0, 120, m_data.shape[1], endpoint=True), col_range=np.linspace(0, 120, m_data.shape[2], endpoint=True), frame_rate=1000.)
+ gr_mov_list.append(m1)
+
+ return gr_mov_list
+
+
+##################################################
+metrics_dir = os.path.join(os.path.dirname(__file__), 'cell_metrics')
+def get_data_metrics_for_each_subclass(ctype):
+ # Load csv file into dataframe
+ if ctype.find('_sus') >= 0:
+ prs_fn = os.path.join(metrics_dir, '{}_cells_v3.csv'.format(ctype))
+ else:
+ prs_fn = os.path.join(metrics_dir, '{}_cell_data.csv'.format(ctype))
+
+ prs_df = pd.read_csv(prs_fn)
+ N_class, nmet = np.shape(prs_df)
+
+ # Group data by subclasses based on max F0 vals
+ exp_df = prs_df.iloc[:, [13, 14, 17, 18, 28, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+ 54]].copy() # Bl_lat,Wh_lat,Bl_si, wh_si, spont, 5 F0s, 5 F1s
+ sub_df = exp_df.iloc[:, [5, 6, 7, 8, 9]]
+ exp_df['max_tf'] = sub_df.idxmax(axis=1).values # sub_df.idxmax(axis=1)
+
+ exp_means = exp_df.groupby(['max_tf']).mean()
+ exp_std = exp_df.groupby(['max_tf']).std()
+ exp_nsub = exp_df.groupby(['max_tf']).size()
+
+ max_ind_arr = np.where(exp_nsub == np.max(exp_nsub))
+ max_nsub_ind = max_ind_arr[0][0]
+
+ # Get means and std dev for subclasses
+ exp_prs_dict = {}
+ for scn in np.arange(len(exp_nsub)):
+ f0_exp = exp_means.iloc[scn, 5:10].values
+ f1_exp = exp_means.iloc[scn, 10:].values
+ spont_exp = exp_means.iloc[scn, 4:5].values
+ if ctype.find('OFF') >= 0:
+ si_exp = exp_means.iloc[scn, 2:3].values
+ ttp_exp = exp_means.iloc[scn, 0:1].values
+ elif ctype.find('ON') >= 0:
+ si_exp = exp_means.iloc[scn, 3:4].values
+ ttp_exp = exp_means.iloc[scn, 1:2].values
+ else:
+ si_exp = np.NaN * np.ones((1, 5))
+ ttp_exp = np.NaN * np.ones((1, 2))
+
+ nsub = exp_nsub.iloc[scn]
+ if nsub == 1:
+ f0_std = np.mean(exp_std.iloc[max_nsub_ind, 5:10].values) * np.ones((1, 5))
+ f1_std = np.mean(exp_std.iloc[max_nsub_ind, 10:].values) * np.ones((1, 5))
+ spont_std = np.mean(exp_std.iloc[max_nsub_ind, 4:5].values) * np.ones((1, 5))
+ if ctype.find('OFF') >= 0:
+ si_std = np.mean(exp_std.iloc[max_nsub_ind, 2:3].values) * np.ones((1, 5))
+ elif ctype.find('ON') >= 0:
+ si_std = np.mean(exp_std.iloc[max_nsub_ind, 3:4].values) * np.ones((1, 5))
+ else:
+ si_std = np.NaN * np.ones((1, 5))
+
+ else:
+ f0_std = exp_std.iloc[scn, 5:10].values
+ f1_std = exp_std.iloc[scn, 10:].values
+ spont_std = exp_std.iloc[scn, 4:5].values
+ if ctype.find('OFF') >= 0:
+ si_std = exp_std.iloc[scn, 2:3].values
+ elif ctype.find('ON') >= 0:
+ si_std = exp_std.iloc[scn, 3:4].values
+ else:
+ si_std = np.NaN * np.ones((1, 5))
+
+ if ctype.find('t') >= 0:
+ tcross = 40.
+ si_inf_exp = (si_exp - tcross / 200.) * (200. / (200. - tcross - 40.))
+ elif ctype.find('s') >= 0:
+ tcross = 60.
+ si_inf_exp = (si_exp - tcross / 200.) * (200. / (200. - tcross - 40.))
+
+ dict_key = exp_means.iloc[scn].name[3:]
+ exp_prs_dict[dict_key] = {}
+ exp_prs_dict[dict_key]['f0_exp'] = f0_exp
+ exp_prs_dict[dict_key]['f1_exp'] = f1_exp
+ exp_prs_dict[dict_key]['spont_exp'] = spont_exp
+ exp_prs_dict[dict_key]['si_exp'] = si_exp
+ exp_prs_dict[dict_key]['si_inf_exp'] = si_inf_exp
+ exp_prs_dict[dict_key]['ttp_exp'] = ttp_exp
+ exp_prs_dict[dict_key]['f0_std'] = f0_std
+ exp_prs_dict[dict_key]['f1_std'] = f1_std
+ exp_prs_dict[dict_key]['spont_std'] = spont_std
+ exp_prs_dict[dict_key]['si_std'] = si_std
+ exp_prs_dict[dict_key]['nsub'] = nsub
+ exp_prs_dict[dict_key]['N_class'] = N_class
+
+ return exp_prs_dict
+
+
+##################################################
+def check_optim_results_against_bounds(bounds, opt_wts, opt_kpeaks):
+ bds_wts0 = bounds[0]
+ bds_wts1 = bounds[1]
+ bds_kp0 = bounds[2]
+ bds_kp1 = bounds[3]
+
+ opt_wts0 = opt_wts[0]
+ opt_wts1 = opt_wts[1]
+ opt_kp0 = opt_kpeaks[0]
+ opt_kp1 = opt_kpeaks[1]
+
+ if (opt_wts0 == bds_wts0[0] or opt_wts0 == bds_wts0[1]):
+ prm_on_bds = 'w0'
+ elif (opt_wts1 == bds_wts1[0] or opt_wts1 == bds_wts1[1]):
+ prm_on_bds = 'w1'
+ elif (opt_kp0 == bds_kp0[0] or opt_kp0 == bds_kp0[1]):
+ prm_on_bds = 'kp0'
+ elif (opt_kp1 == bds_kp1[0] or opt_kp1 == bds_kp1[1]):
+ prm_on_bds = 'kp1'
+ else:
+ prm_on_bds = 'None'
+
+ return prm_on_bds
+
+
+#######################################################
+def get_tcross_from_temporal_kernel(temporal_kernel):
+ max_ind = np.argmax(temporal_kernel)
+ min_ind = np.argmin(temporal_kernel)
+
+ temp_tcross_ind = mlab.cross_from_above(temporal_kernel[max_ind:min_ind], 0.0)
+ tcross_ind = max_ind + temp_tcross_ind[0]
+ return tcross_ind
diff --git a/bmtk-vb/bmtk/simulator/filternet/lgnmodel/utilities.py b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/utilities.py
new file mode 100644
index 0000000..69e61d9
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/lgnmodel/utilities.py
@@ -0,0 +1,123 @@
+import array
+import matplotlib.pyplot as plt
+import skimage.transform as transform
+import numpy as np
+import scipy.integrate as spi
+import scipy.optimize as sopt
+import warnings
+import scipy.interpolate as sinterp
+
+def get_vanhateren(filename, src_dir):
+ with open(filename, 'rb') as handle:
+ s = handle.read()
+ arr = array.array('H', s)
+ arr.byteswap()
+ return np.array(arr, dtype='uint16').reshape(1024, 1536)
+
+def convert_tmin_tmax_framerate_to_trange(t_min,t_max,frame_rate):
+ duration = t_max-t_min
+ number_of_frames = duration*frame_rate # Assumes t_min/t_max in same time units as frame_rate
+ dt= 1./frame_rate
+ return t_min+np.arange(number_of_frames+1)*dt
+
+def get_rotation_matrix(rotation, shape):
+ '''Angle in degrees'''
+
+ shift_y, shift_x = np.array(shape) / 2.
+ tf_rotate = transform.SimilarityTransform(rotation=np.deg2rad(rotation))
+ tf_shift = transform.SimilarityTransform(translation=[-shift_x, -shift_y])
+ tf_shift_inv = transform.SimilarityTransform(translation=[shift_x, shift_y])
+ return (tf_shift + (tf_rotate + tf_shift_inv))
+
+def get_translation_matrix(translation):
+ shift_x, shift_y = translation
+ tf_shift = transform.SimilarityTransform(translation=[-shift_x, shift_y])
+ return tf_shift
+
+
+def get_scale_matrix(scale, shape):
+ shift_y, shift_x = np.array(shape) / 2.
+ tf_rotate = transform.SimilarityTransform(scale=(1./scale[0], 1./scale[1]))
+ tf_shift = transform.SimilarityTransform(translation=[-shift_x, -shift_y])
+ tf_shift_inv = transform.SimilarityTransform(translation=[shift_x, shift_y])
+ return tf_shift + (tf_rotate + tf_shift_inv)
+
+def apply_transformation_matrix(image, matrix):
+ return transform.warp(image, matrix)
+
+
+def get_convolution_ind(curr_fi, flipped_t_inds, kernel, data):
+
+ flipped_and_offset_t_inds = flipped_t_inds + curr_fi
+
+ if np.all( flipped_and_offset_t_inds >= 0):
+
+ # No negative entries; still might be over the end though:
+ try:
+ return np.dot(data[flipped_and_offset_t_inds], kernel)
+
+ except IndexError:
+
+ # Requested some indices out of range of data:
+ indices_within_range = np.where(flipped_and_offset_t_inds < len(data))
+ valid_t_inds = flipped_and_offset_t_inds[indices_within_range]
+ valid_kernel = kernel[indices_within_range]
+ return np.dot(data[valid_t_inds], valid_kernel)
+
+ else:
+
+# # Some negative entries:
+# if np.all( flipped_and_offset_t_inds < 0):
+#
+# # All are negative:
+# return 0
+#
+# else:
+
+ # Only some are negative, so restrict:
+ indices_within_range = np.where(flipped_and_offset_t_inds >= 0)
+ valid_t_inds = flipped_and_offset_t_inds[indices_within_range]
+ valid_kernel = kernel[indices_within_range]
+
+ return np.dot(data[valid_t_inds], valid_kernel)
+
+def get_convolution(t, frame_rate, flipped_t_inds, kernel, data):
+
+ # Get frame indices:
+ fi = frame_rate*float(t)
+ fim = int(np.floor(fi))
+ fiM = int(np.ceil(fi))
+
+ if fim != fiM:
+
+ # Linear interpolation:
+ sm = get_convolution_ind(fim, flipped_t_inds, kernel, data)
+ sM = get_convolution_ind(fiM, flipped_t_inds, kernel, data)
+ return sm*(1-(fi-fim)) + sM*(fi-fim)
+
+ else:
+
+ # Requested time is exactly one piece of data:
+ return get_convolution_ind(fim, flipped_t_inds, kernel, data)
+
+if __name__ == "__main__":
+ pass
+# print generate_poisson([0,1,2,3],[.5,1,2,3])
+
+
+
+# test_generate_poisson_function()
+
+# image = np.zeros((101,151))
+# image[48:52+1]=1
+#
+# mr = get_rotation_matrix(30, image.shape)
+# mt = get_translation_matrix((20,0))
+# ms = get_scale_matrix((.5,1),image.shape)
+#
+# m = mr
+#
+# fig, ax = plt.subplots(2,1)
+# ax[0].imshow(image)
+# ax[1].imshow(apply_transformation_matrix(image, m))
+# plt.show()
diff --git a/bmtk-vb/bmtk/simulator/filternet/modules/__init__.py b/bmtk-vb/bmtk/simulator/filternet/modules/__init__.py
new file mode 100644
index 0000000..13185dd
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/modules/__init__.py
@@ -0,0 +1,2 @@
+from .record_rates import RecordRates
+from .create_spikes import SpikesGenerator
diff --git a/bmtk-vb/bmtk/simulator/filternet/modules/base.py b/bmtk-vb/bmtk/simulator/filternet/modules/base.py
new file mode 100644
index 0000000..1bf7865
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/modules/base.py
@@ -0,0 +1,9 @@
+class SimModule(object):
+ def initialize(self, sim):
+ pass
+
+ def save(self, sim, **kwargs):
+ pass
+
+ def finalize(self, sim):
+ pass
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/filternet/modules/create_spikes.py b/bmtk-vb/bmtk/simulator/filternet/modules/create_spikes.py
new file mode 100644
index 0000000..d2acf96
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/modules/create_spikes.py
@@ -0,0 +1,99 @@
+import os
+import numpy as np
+import random
+import six
+
+from .base import SimModule
+from bmtk.utils.io.spike_trains import SpikeTrainWriter
+from bmtk.simulator.filternet.lgnmodel import poissongeneration as pg
+
+
+class SpikesGenerator(SimModule):
+ def __init__(self, spikes_file_csv=None, spikes_file=None, spikes_file_nwb=None, tmp_dir='output'):
+ def _get_file_path(file_name):
+ if file_name is None or os.path.isabs(file_name):
+ return file_name
+
+ return os.path.join(tmp_dir, file_name)
+
+ self._csv_fname = _get_file_path(spikes_file_csv)
+ self._save_csv = spikes_file_csv is not None
+
+ self._h5_fname = _get_file_path(spikes_file)
+ self._save_h5 = spikes_file is not None
+
+ self._nwb_fname = _get_file_path(spikes_file_nwb)
+ self._save_nwb = spikes_file_nwb is not None
+
+ self._tmpdir = tmp_dir
+
+ self._spike_writer = SpikeTrainWriter(tmp_dir=tmp_dir)
+
+ def save(self, sim, gid, times, rates):
+ try:
+ spike_trains = np.array(f_rate_to_spike_train(times*1000.0, rates, np.random.randint(10000),
+ 1000.*min(times), 1000.*max(times), 0.1))
+ except:
+ # convert to milliseconds and hence the multiplication by 1000
+ spike_trains = 1000.0*np.array(pg.generate_inhomogenous_poisson(times, rates,
+ seed=np.random.randint(10000)))
+
+ self._spike_writer.add_spikes(times=spike_trains, gid=gid)
+
+ def finalize(self, sim):
+ self._spike_writer.flush()
+
+ if self._save_csv:
+ self._spike_writer.to_csv(self._csv_fname)
+
+ if self._save_h5:
+ self._spike_writer.to_hdf5(self._h5_fname)
+
+ if self._save_nwb:
+ self._spike_writer.to_nwb(self._nwb_fname)
+
+ self._spike_writer.close()
+
+
+def f_rate_to_spike_train(t, f_rate, random_seed, t_window_start, t_window_end, p_spike_max):
+ # t and f_rate are lists containing time stamps and corresponding firing rate values;
+ # they are assumed to be of the same length and ordered with the time strictly increasing;
+ # p_spike_max is the maximal probability of spiking that we allow within the time bin; it is used to decide on the size of the time bin; should be less than 1!
+
+ if np.max(f_rate) * np.max(np.diff(t))/1000. > 0.1: #Divide by 1000 to convert to seconds
+ print('Firing rate to high for time interval and will not estimate spike correctly. Spikes will ' \
+ 'be calculated with the slower inhomogenous poisson generating fucntion')
+ raise Exception()
+
+ spike_times = []
+
+ # Use seed(...) to instantiate the random number generator. Otherwise, current system time is used.
+ random.seed(random_seed)
+
+ # Assume here for each pair (t[k], f_rate[k]) that the f_rate[k] value applies to the time interval [t[k], t[k+1]).
+ for k in six.moves.range(0, len(f_rate)-1):
+ t_k = t[k]
+ t_k_1 = t[k+1]
+ if ((t_k >= t_window_start) and (t_k_1 <= t_window_end)):
+ delta_t = t_k_1 - t_k
+ # Average number of spikes expected in this interval (note that firing rate is in Hz and time is in ms).
+ av_N_spikes = f_rate[k] / 1000.0 * delta_t
+
+ if (av_N_spikes > 0):
+ if (av_N_spikes <= p_spike_max):
+ N_bins = 1
+ else:
+ N_bins = int(np.ceil(av_N_spikes / p_spike_max))
+
+ t_base = t[k]
+ t_bin = 1.0 * delta_t / N_bins
+ p_spike_bin = 1.0 * av_N_spikes / N_bins
+ for i_bin in six.moves.range(0, N_bins):
+ rand_tmp = random()
+ if rand_tmp < p_spike_bin:
+ spike_t = t_base + random() * t_bin
+ spike_times.append(spike_t)
+
+ t_base += t_bin
+
+ return spike_times
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/filternet/modules/record_rates.py b/bmtk-vb/bmtk/simulator/filternet/modules/record_rates.py
new file mode 100644
index 0000000..b2978a3
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/modules/record_rates.py
@@ -0,0 +1,29 @@
+import os
+import csv
+
+from .base import SimModule
+
+
+class RecordRates(SimModule):
+ def __init__(self, csv_file=None, h5_file=None, tmp_dir='output'):
+ csv_file = csv_file if csv_file is None or os.path.isabs(csv_file) else os.path.join(tmp_dir, csv_file)
+ self._save_to_csv = csv_file is not None
+ self._tmp_csv_file = csv_file if self._save_to_csv else os.path.join(tmp_dir, '__tmp_rates.csv')
+
+ self._tmp_csv_fhandle = open(self._tmp_csv_file, 'w')
+ self._tmp_csv_writer = csv.writer(self._tmp_csv_fhandle, delimiter=' ')
+
+ self._save_to_h5 = h5_file is not None
+
+ def save(self, sim, gid, times, rates):
+ for t, r in zip(times, rates):
+ self._tmp_csv_writer.writerow([gid, t, r])
+ self._tmp_csv_fhandle.flush()
+
+ def finalize(self, sim):
+ if self._save_to_h5:
+ raise NotImplementedError
+
+ self._tmp_csv_fhandle.close()
+ if not self._save_to_csv:
+ os.remove(self._tmp_csv_file)
diff --git a/bmtk-vb/bmtk/simulator/filternet/pyfunction_cache.py b/bmtk-vb/bmtk/simulator/filternet/pyfunction_cache.py
new file mode 100644
index 0000000..9ac949a
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/pyfunction_cache.py
@@ -0,0 +1,98 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import types
+from functools import wraps
+
+
+class _PyFunctions(object):
+ """Structure for holding custom user-defined python functions.
+
+ Will store a set of functions created by the user. Should not access this directly but rather user the
+ decorators or setter functions, and use the py_modules class variable to access individual functions. Is divided
+ up into
+ synaptic_weight: functions for calcuating synaptic weight.
+ cell_model: should return NEURON cell hobj.
+ synapse model: should return a NEURON synapse object.
+ """
+ def __init__(self):
+ self.__cell_processors = {}
+
+ def clear(self):
+ self.__cell_processors.clear()
+
+ @property
+ def cell_processors(self):
+ return self.__cell_processors.keys()
+
+ def cell_processor(self, name):
+ return self.__cell_processors[name]
+
+ def add_cell_processor(self, name, func, overwrite=True):
+ if overwrite or name not in self.__cell_processors:
+ self.__cell_processors[name] = func
+
+ def __repr__(self):
+ return self.__cell_processors
+
+
+py_modules = _PyFunctions()
+
+
+def cell_processor(*wargs, **wkwargs):
+ """A decorator for registering NEURON cell loader functions."""
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_cell_processor(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_cell_processor(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def add_cell_processor(func, name=None, overwrite=True):
+ assert(callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_cell_processor(func_name, func, overwrite)
+
+
+def load_py_modules(cell_processors):
+ # py_modules.clear()
+ assert (isinstance(cell_processors, types.ModuleType))
+ for f in [cell_processors.__dict__.get(f) for f in dir(cell_processors)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_cell_processor(f.__name__, f)
diff --git a/bmtk-vb/bmtk/simulator/filternet/transfer_functions.py b/bmtk-vb/bmtk/simulator/filternet/transfer_functions.py
new file mode 100644
index 0000000..6517719
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/transfer_functions.py
@@ -0,0 +1 @@
+from bmtk.simulator.filternet.lgnmodel.transferfunction import *
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/filternet/utils.py b/bmtk-vb/bmtk/simulator/filternet/utils.py
new file mode 100644
index 0000000..c01045c
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/filternet/utils.py
@@ -0,0 +1 @@
+from bmtk.simulator.filternet.lgnmodel.util_fns import *
diff --git a/bmtk-vb/bmtk/simulator/mintnet/Image_Library.py b/bmtk-vb/bmtk/simulator/mintnet/Image_Library.py
new file mode 100644
index 0000000..506a040
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/Image_Library.py
@@ -0,0 +1,105 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import os
+from PIL import Image
+
+# Image_Batch
+# .data (image_data)
+# .image_dir, .new_size
+
+
+# add seed for random
+# call should return indices into im_list
+class Image_Experiment(object):
+
+ def __init__(self,stuff):
+
+ self.image_dir
+ self.new_size
+ self.sample_indices
+ self.im_list
+ # creating of pandas table, template
+
+
+
+
+
+class Image_Library (object):
+ def __init__(self, image_dir,new_size=(128,192)): # NOTE: change this so that sequential is a class variable, not an argument to the call
+ self.image_dir = image_dir
+ self.new_size = new_size
+
+ im_list = os.listdir(image_dir)
+
+ remove_list = []
+ for im in im_list:
+ if im[-5:]!='.tiff' and im[-5:]!='.JPEG' and im[-4:]!='.jpg':
+ remove_list.append(im)
+
+ for im in remove_list:
+ im_list.remove(im)
+
+ self.im_list = im_list
+
+ self.current_location = 0 # used for sequential samples
+ self.lib_size = len(self.im_list)
+
+ def __call__(self,num_samples, sequential=False):
+
+ image_data = np.zeros([num_samples,self.new_size[0],self.new_size[1],1],dtype=np.float32)
+
+ if sequential:
+ if self.lib_size-self.current_location > num_samples:
+ sample_indices = np.arange(self.current_location,self.current_location + num_samples)
+ self.current_location += num_samples
+ else:
+ sample_indices = np.arange(self.current_location,self.lib_size)
+ self.current_location = 0
+ else:
+ sample_indices = np.random.randint(0,len(self.im_list),num_samples)
+
+ for i,s in enumerate(sample_indices):
+ im = Image.open(os.path.join(self.image_dir,self.im_list[s]))
+ im = im.convert('L')
+ im = im.resize((self.new_size[1],self.new_size[0]))
+ image_data[i,:,:,0] = np.array(im,dtype=np.float32)
+
+ return image_data
+
+ def create_experiment(self):
+
+ data = self()
+ return Image_Experiment(stuff)
+
+ def experiment_from_table(self,table):
+ pass
+
+ def to_h5(self,sample_indices=None):
+ pass
+
+ def template(self):
+ pass
+
+ def table(self,*params):
+ pass
diff --git a/bmtk-vb/bmtk/simulator/mintnet/Image_Library_Supervised.py b/bmtk-vb/bmtk/simulator/mintnet/Image_Library_Supervised.py
new file mode 100644
index 0000000..756b62b
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/Image_Library_Supervised.py
@@ -0,0 +1,93 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from PIL import Image
+import numpy as np
+import os
+
+class Image_Library_Supervised (object):
+
+ def __init__(self,image_dir,new_size=(256,256)):
+
+ self.categories = os.listdir(image_dir)
+
+ self.num_categories = len(self.categories) #len(image_dir_list)
+ self.image_dir_list = [os.path.join(image_dir,x) for x in self.categories]
+ self.new_size = new_size
+
+
+ # self.categories = []
+ # for d in self.image_dir_list:
+ # self.categories += [os.path.basename(d)]
+
+ self.im_lists = {}
+ for i,cat in enumerate(self.categories):
+ d = self.image_dir_list[i]
+ if os.path.basename(d[0])=='.': continue
+ self.im_lists[cat] = os.listdir(d)
+
+ for cat in self.im_lists:
+ remove_list = []
+ for im in self.im_lists[cat]:
+ if im[-4:]!='.jpg':
+ remove_list.append(im)
+
+ for im in remove_list:
+ self.im_lists[cat].remove(im)
+
+
+ self.current_location = np.zeros(len(self.categories)) # used for sequential samples
+ self.lib_size = [len(self.im_lists[x]) for x in self.categories]
+ #self.lib_size = len(self.im_list)
+
+ def __call__(self,num_samples,sequential=False):
+
+ image_data = np.zeros([self.num_categories*num_samples,self.new_size[0],self.new_size[1],1],dtype=np.float32)
+
+ # y_vals = np.tile(np.arange(self.num_categories),(num_samples,1)).T.flatten()
+ # y_vals = y_vals.astype(np.float32)
+
+ y_vals = np.zeros([num_samples*self.num_categories,self.num_categories],np.float32)
+
+ for i,cat in enumerate(self.categories):
+
+ y_vals[num_samples*i:num_samples*i+num_samples].T[i] = 1
+
+ if sequential:
+ if self.lib_size[i]-self.current_location[i] > num_samples:
+ sample_indices = np.arange(self.current_location[i],self.current_location[i] + num_samples,dtype=np.int64)
+ self.current_location[i] += num_samples
+ else:
+ sample_indices = np.arange(self.current_location[i],self.lib_size[i],dtype=np.int64)
+ self.current_location[i] = 0
+ else:
+ sample_indices = np.random.randint(0,len(self.im_lists[cat]),num_samples)
+
+ for j,s in enumerate(sample_indices):
+ im = Image.open(os.path.join(self.image_dir_list[i],self.im_lists[cat][s]))
+ im = im.convert('L')
+ im = im.resize((self.new_size[1],self.new_size[0]))
+ index = j + num_samples*i
+ image_data[index,:,:,0] = np.array(im,dtype=np.float32)
+
+ return y_vals, image_data
+
diff --git a/bmtk-vb/bmtk/simulator/mintnet/__init__.py b/bmtk-vb/bmtk/simulator/mintnet/__init__.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/bmtk/simulator/mintnet/analysis/LocallySparseNoise.py b/bmtk-vb/bmtk/simulator/mintnet/analysis/LocallySparseNoise.py
new file mode 100644
index 0000000..60b9228
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/analysis/LocallySparseNoise.py
@@ -0,0 +1,105 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import pandas as pd
+import h5py
+
+
+class LocallySparseNoise (object):
+
+ def __init__(self,data_file_name):
+
+ self.stim_table = pd.read_hdf(data_file_name,'stim_table')
+ self.node_table = pd.read_hdf(data_file_name,'node_table')
+
+
+ self.data_file_name = data_file_name
+
+ data = h5py.File(self.data_file_name,'r')
+
+ self.data_sets = data.keys()
+ self.data_sets.remove('stim_table')
+ self.data_sets.remove('node_table')
+ self.data_sets.remove('stim_template')
+
+ self.stim_template = data['stim_template'].value
+
+ data.close()
+
+ @staticmethod
+ def rf(response, stim_template, stim_shape):
+ T = stim_template.shape[0]
+ rf_shape = tuple(stim_template.shape[1:])
+
+ unit_shape = tuple(response.shape[1:])
+
+ response.resize([T,np.prod(unit_shape)])
+
+ rf = np.dot(response.T,stim_template)
+
+ rf_new_shape = tuple([rf.shape[0]] + list(rf_shape))
+ rf.resize(rf_new_shape)
+ rf_final_shape = tuple(list(unit_shape) + list(stim_shape))
+ rf.resize(rf_final_shape)
+
+ return rf
+
+ def compute_receptive_fields(self, dtype=np.float32):
+
+ output = h5py.File(self.data_file_name[:-3]+'_analysis.ic','a')
+ data = h5py.File(self.data_file_name,'r')
+
+ # convert to +/-1 or 0
+ stim_template = data['stim_template'].value.astype(dtype)
+ stim_template = stim_template-127
+ stim_template = np.sign(stim_template)
+ #print np.unique(stim_template)
+
+ stim_shape = tuple(stim_template.shape[1:])
+ T = stim_template.shape[0]
+
+ stim_template.resize([T,np.prod(stim_shape)])
+
+ stim_template_on = stim_template.copy()
+ stim_template_off = stim_template.copy()
+
+ stim_template_on[stim_template_on<0] = 0.0
+ stim_template_off[stim_template_off>0] = 0.0
+
+ for data_set in self.data_sets:
+
+ response = data[data_set].value
+ response = response - np.mean(response,axis=0)
+
+ key_onoff = data_set+'/lsn/on_off'
+ key_on = data_set+'/lsn/on'
+ key_off = data_set+'/lsn/off'
+ for key in [key_onoff, key_on, key_off]:
+ if key in output:
+ del output[key]
+
+ output[key_onoff] = self.rf(response, stim_template, stim_shape)
+ output[key_on] = self.rf(response, stim_template_on, stim_shape)
+ output[key_off] = self.rf(response, stim_template_off, stim_shape)
+
+ data.close()
diff --git a/bmtk-vb/bmtk/simulator/mintnet/analysis/StaticGratings.py b/bmtk-vb/bmtk/simulator/mintnet/analysis/StaticGratings.py
new file mode 100644
index 0000000..10a019b
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/analysis/StaticGratings.py
@@ -0,0 +1,101 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import pandas as pd
+import h5py
+import sys
+import os
+
+class StaticGratings (object):
+
+ def __init__(self,data_file_name):
+
+ self.stim_table = pd.read_hdf(data_file_name,'stim_table')
+ self.node_table = pd.read_hdf(data_file_name,'node_table')
+ self.tunings_file = None
+
+ f = lambda label: self.stim_table.dropna().drop_duplicates([label])[label].sort_values(inplace=False).values
+
+ self.orientations = f('orientation')
+ self.spatial_frequencies = f('spatial_frequency')
+ self.phases = f('phase')
+
+ self.data_file_name = data_file_name
+
+ data = h5py.File(self.data_file_name,'r')
+
+ self.data_sets = data.keys()
+ self.data_sets.remove('stim_table')
+ self.data_sets.remove('node_table')
+ self.data_sets.remove('stim_template')
+
+ data.close()
+
+ def tuning_matrix(self, response, dtype=np.float32):
+
+ tuning_shape = tuple([len(self.orientations), len(self.spatial_frequencies), len(self.phases)] + list(response.shape[1:]))
+
+ tuning_matrix = np.empty(tuning_shape, dtype=dtype)
+
+ for i,ori in enumerate(self.orientations):
+ for j,sf in enumerate(self.spatial_frequencies):
+ for k,ph in enumerate(self.phases):
+
+ index = self.stim_table[(self.stim_table.spatial_frequency==sf) & (self.stim_table.orientation==ori) & (self.stim_table.phase==ph)].index
+
+ tuning_matrix[i,j,k] = np.mean(response[index],axis=0)
+
+ return tuning_matrix
+
+ def compute_all_tuning(self, dtype=np.float32, force=False):
+ self.tunings_file = self.data_file_name[:-3]+'_analysis.ic'
+ if os.path.exists(self.tunings_file) and not force:
+ print('Using existing tunings file {}.'.format(self.tunings_file))
+ return
+
+ output = h5py.File(self.tunings_file,'a')
+ data = h5py.File(self.data_file_name,'r')
+
+ for i, data_set in enumerate(self.data_sets):
+ sys.stdout.write( '\r{0:.02f}'.format(float(i)*100/len(self.data_sets))+'% done')
+ sys.stdout.flush()
+
+ response = data[data_set].value
+
+ tuning = self.tuning_matrix(response, dtype=dtype)
+
+ key = data_set+'/sg/tuning'
+ if key in output:
+ del output[key]
+ output[key] = tuning
+
+ sys.stdout.write( '\r{0:.02f}'.format(float(100))+'% done')
+ sys.stdout.flush()
+
+ data.close()
+
+ def get_tunings_file(self):
+ if self.tunings_file is None:
+ self.compute_all_tuning()
+
+ return h5py.File(self.tunings_file, 'r')
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/mintnet/analysis/__init__.py b/bmtk-vb/bmtk/simulator/mintnet/analysis/__init__.py
new file mode 100644
index 0000000..2d56a26
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/analysis/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/mintnet/hmax/C_Layer.py b/bmtk-vb/bmtk/simulator/mintnet/hmax/C_Layer.py
new file mode 100644
index 0000000..1489c89
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/hmax/C_Layer.py
@@ -0,0 +1,260 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import tensorflow as tf
+import os
+import pandas as pd
+
+class C_Layer (object):
+ def __init__(self,node_name,S_Layer_input,bands):
+ '''
+ :type S_Layer: S_Layer object
+ :param S_Layer: instance of S_Layer object that serves as input for this C_Layer
+
+ :type bands: list
+ :param bands: bands[i] = [[list of frequency indices for S_layer over which to pool], grid_size, sample_step]
+ '''
+ self.node_name = node_name
+ self.input = S_Layer_input.input
+
+ self.tf_sess = S_Layer_input.tf_sess
+
+ s_output = S_Layer_input.output
+
+ self.K = S_Layer_input.K
+
+ band_output = {}
+
+ num_bands = len(bands)
+
+ self.band_output = {}
+
+ self.band_shape = {}
+
+ with tf.name_scope(self.node_name):
+ for b in range(num_bands):
+ bands_to_pool, grid_size, sample_step = bands[b]
+
+ sub_band_shape = []
+ for sub_band in bands_to_pool:
+ sub_band_shape += [S_Layer_input.band_shape[sub_band]]
+
+ max_band_shape = sub_band_shape[0]
+ for shape in sub_band_shape[1:]:
+ if shape[0] > max_band_shape[0]: max_band_shape[0] = shape[0]
+ if shape[1] > max_band_shape[1]: max_band_shape[1] = shape[1]
+
+ # print "max_band_shape = ", max_band_shape
+ # for sub_band in bands_to_pool:
+ # print "\tsub_band_shape = ", S_Layer_input.band_shape[sub_band]
+ # print "\tinput band shape = ", s_output[sub_band].get_shape()
+
+ #resize all inputs to highest resolution so that we can maxpool over equivalent scales
+ resize_ops = []
+ for sub_band in bands_to_pool:
+ op = s_output[sub_band]
+ # resize_ops += [tf.image.resize_images(op,max_band_shape[0],max_band_shape[1],method=ResizeMethod.NEAREST_NEIGHBOR)]
+ resize_ops += [tf.image.resize_nearest_neighbor(op,max_band_shape)]
+ #print "\tresize op shape = ", resize_ops[-1].get_shape()
+
+ #take the maximum for each input channel, element-wise
+ max_channel_op = resize_ops[0]
+ for op in resize_ops[1:]:
+ max_channel_op = tf.maximum(op,max_channel_op)
+
+ #print "\tmax channel op shape = ", max_channel_op.get_shape()
+
+ # new shape for mode 'SAME'
+ # new_band_shape = (max_band_shape[0]/sample_step, max_band_shape[1]/sample_step)
+ new_band_shape = np.ceil(np.array(max_band_shape)/float(sample_step)).astype(np.int64)
+
+ # make sure the grid_size and sample_step aren't bigger than the image
+ if max_band_shape[0] < grid_size:
+ y_size = max_band_shape[0]
+ else:
+ y_size = grid_size
+
+ if max_band_shape[1] < grid_size:
+ x_size = max_band_shape[1]
+ else:
+ x_size = grid_size
+
+ if sample_step > max_band_shape[0]:
+ y_step = max_band_shape[0]
+ new_band_shape = (1,new_band_shape[1])
+ else:
+ y_step = sample_step
+ if sample_step > max_band_shape[1]:
+ x_step = max_band_shape[1]
+ new_band_shape = (new_band_shape[0],1)
+ else:
+ x_step = sample_step
+
+ # max pool
+ max_pool_op = tf.nn.max_pool(max_channel_op,[1,y_size,x_size,1],strides=[1,y_step,x_step,1],padding='SAME')
+
+ self.band_shape[b] = new_band_shape
+ #print "max_band_shape: ", max_band_shape
+
+ self.band_output[b]=max_pool_op
+
+ self.num_units = 0
+ for b in self.band_shape:
+ self.num_units += np.prod(self.band_shape[b])*self.K
+
+ self.output = self.band_output
+
+ def __repr__(self):
+ return "C_Layer"
+
+ def compute_output(self,X,band):
+ return self.tf_sess.run(self.output[band],feed_dict={self.input:X})
+
+ # def get_compute_ops(self):
+ #
+ # node_table = pd.DataFrame(columns=['node','band'])
+ # compute_list = []
+ #
+ # for band in self.band_output:
+ # node_table = node_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+ #
+ # compute_list.append(self.output[band])
+ #
+ # return node_table, compute_list
+
+ def get_compute_ops(self,unit_table=None):
+
+ compute_list = []
+
+ if unit_table is not None:
+
+ for i, row in unit_table.iterrows():
+
+ if 'y' in unit_table:
+ node, band, y, x = row['node'], int(row['band']), int(row['y']), int(row['x'])
+ compute_list.append(self.output[band][:,y,x,:])
+
+ elif 'band' in unit_table:
+ node, band = row['node'], int(row['band'])
+ compute_list.append(self.output[band])
+
+ else:
+ return self.get_all_compute_ops()
+
+ else:
+ return self.get_all_compute_ops()
+
+ return unit_table, compute_list
+
+ def get_all_compute_ops(self):
+
+ compute_list = []
+ unit_table = pd.DataFrame(columns=['node','band'])
+ for band in self.band_output:
+ unit_table = unit_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+
+ compute_list.append(self.output[band])
+
+ return unit_table, compute_list
+
+
+def test_C1_Layer():
+
+ from S1_Layer import S1_Layer
+ import matplotlib.pyplot as plt
+
+ fig_dir = 'Figures'
+ # First we need an S1 Layer
+ # these parameters are taken from Serre, et al PNAS for HMAX
+ freq_channel_params = [ [7,2.8,3.5],
+ [9,3.6,4.6],
+ [11,4.5,5.6],
+ [13,5.4,6.8],
+ [15,6.3,7.9],
+ [17,7.3,9.1],
+ [19,8.2,10.3],
+ [21,9.2,11.5],
+ [23,10.2,12.7],
+ [25,11.3,14.1],
+ [27,12.3,15.4],
+ [29,13.4,16.8],
+ [31,14.6,18.2],
+ [33,15.8,19.7],
+ [35,17.0,21.2],
+ [37,18.2,22.8],
+ [39,19.5,24.4]]
+
+ orientations = np.arange(4)*np.pi/4
+
+ input_shape = (128,192)
+ s1 = S1_Layer(input_shape,freq_channel_params,orientations)
+
+ # Now we need to define a C1 Layer
+ bands = [ [[0,1], 8, 3],
+ [[2,3], 10, 5],
+ [[4,5], 12, 7],
+ [[6,7], 14, 8],
+ [[8,9], 16, 10],
+ [[10,11], 18, 12],
+ [[12,13], 20, 13],
+ [[14,15,16], 22, 15]]
+
+ c1 = C_Layer(s1,bands)
+
+ # Test c1 on an image
+ from isee_engine.mintnet.Image_Library import Image_Library
+
+ image_dir = '/Users/michaelbu/Code/HCOMP/SampleImages'
+
+ im_lib = Image_Library(image_dir)
+
+ image_data = im_lib(1)
+
+ fig, ax = plt.subplots(1)
+ ax.imshow(image_data[0,:,:,0],cmap='gray')
+
+ print(image_data.shape)
+
+ fig, ax = plt.subplots(len(bands),len(orientations)*2)
+ result = {}
+ for b in range(len(bands)):
+ result[b] = c1.compute_output(image_data,b)
+ print(result[b].shape)
+ n, y,x,K = result[b].shape
+
+ for k in range(K):
+ #print result[b][i].shape
+ # y = i/8
+ # x = i%8
+ # ax[y,x].imshow(result[b][0,i],interpolation='nearest',cmap='gray')
+ # ax[y,x].axis('off')
+
+ ax[b,k].imshow(result[b][0,:,:,k],interpolation='nearest',cmap='gray')
+ ax[b,k].axis('off')
+
+ fig.savefig(os.path.join(fig_dir,'c1_layer.tiff'))
+ plt.show()
+
+if __name__=='__main__':
+
+ test_C1_Layer()
diff --git a/bmtk-vb/bmtk/simulator/mintnet/hmax/Readout_Layer.py b/bmtk-vb/bmtk/simulator/mintnet/hmax/Readout_Layer.py
new file mode 100644
index 0000000..9126ea1
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/hmax/Readout_Layer.py
@@ -0,0 +1,243 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import tensorflow as tf
+from bmtk.simulator.mintnet.Image_Library_Supervised import Image_Library_Supervised
+import h5py
+
+class Readout_Layer (object):
+
+ def __init__(self,node_name,input_layer,K,lam,alt_image_dir='',file_name=None):
+
+ self.node_name = node_name
+ self.K = K
+ self.input_layer = input_layer
+ self.weight_file = file_name
+ self.lam = lam
+
+ self.alt_image_dir = alt_image_dir
+
+ if file_name==None:
+ new_weights=True
+ self.train_state = False
+ else:
+
+ weight_h5 = h5py.File(self.weight_file,'a')
+ file_open=True
+
+ if self.node_name in weight_h5.keys():
+
+ new_weights = False
+ weight_data = weight_h5[self.node_name]['weights'].value
+ self.train_state = weight_h5[self.node_name]['train_state'].value
+
+ else:
+
+ new_weights = True
+ self.train_state =False
+ weight_h5.create_group(self.node_name)
+ weight_h5[self.node_name]['train_state']=self.train_state
+
+ self.input = self.input_layer.input
+ #self.tf_sess = self.input_layer.tf_sess
+ self.tf_sess = tf.Session()
+
+ self.w_shape = (self.input_layer.K,self.K)
+
+ if new_weights:
+ #weights=1.0*np.ones(self.w_shape).astype(np.float32)
+ weights=100000*np.random.normal(size=self.w_shape).astype(np.float32)
+ if file_name!=None:
+ weight_h5[self.node_name].create_dataset('weights',shape=weights.shape,dtype=np.float32,compression='gzip',compression_opts=9)
+ weight_h5[self.node_name]['weights'][...]=weights
+ else:
+ weights=weight_data
+
+ self.weights = tf.Variable(weights.astype(np.float32),trainable=True,name='weights')
+ self.weights.initializer.run(session=self.tf_sess)
+ self.bias = tf.Variable(np.zeros(self.K,dtype=np.float32),trainable=True,name='bias')
+ self.bias.initializer.run(session=self.tf_sess)
+
+ # sigmoid doesn't seem to work well, and is slow
+ #self.output = tf.sigmoid(tf.matmul(self.input_layer.output,W)+self.bias)
+
+ self.input_placeholder = tf.placeholder(tf.float32,shape=(None,self.input_layer.K))
+ #self.output = tf.nn.softmax(tf.matmul(self.input_placeholder,self.weights) + self.bias)
+ self.linear = tf.matmul(self.input_placeholder,self.weights) #+ self.bias
+
+ self.output = tf.sign(self.linear)
+ #self.output = tf.nn.softmax(self.linear)
+ #self.output = tf.nn.softmax(tf.matmul(self.input_layer.output,self.weights) + self.bias)
+
+ self.y = tf.placeholder(tf.float32,shape=(None,self.K))
+
+
+ #self.cost = -tf.reduce_mean(self.y*tf.log(self.output))
+ self.cost = tf.reduce_mean((self.y - self.output)**2) + self.lam*(tf.reduce_sum(self.weights))**2
+
+ # not gonna do much with current cost function :)
+ self.train_step = tf.train.GradientDescentOptimizer(0.1).minimize(self.cost)
+
+ self.num_units = self.K
+
+ if file_open:
+ weight_h5.close()
+
+ def compute_output(self,X):
+
+ #return self.tf_sess.run(self.output,feed_dict={self.input:X})
+
+ rep = self.input_layer.tf_sess.run(self.input_layer.output,feed_dict={self.input:X})
+
+ return self.tf_sess.run(self.output,feed_dict={self.input_placeholder:rep})
+
+ def predict(self,X):
+
+ y_vals = self.compute_output(X)
+
+ return np.argmax(y_vals,axis=1)
+
+ def train(self,image_dir,batch_size=10,image_shape=(256,256),max_iter=200):
+
+ print("Training")
+
+ im_lib = Image_Library_Supervised(image_dir,new_size=image_shape)
+
+ # let's use the linear regression version for now
+ training_lib_size = 225
+ y_vals, image_data = im_lib(training_lib_size,sequential=True)
+
+ y_vals = y_vals.T[0].T
+ y_vals = 2*y_vals - 1.0
+
+ print(y_vals)
+ # print y_vals
+ # print image_data.shape
+
+ # import matplotlib.pyplot as plt
+ # plt.imshow(image_data[0,:,:,0])
+ # plt.figure()
+ # plt.imshow(image_data[1,:,:,0])
+ # plt.figure()
+ # plt.imshow(image_data[9,:,:,0])
+
+ # plt.show()
+
+ num_batches = int(np.ceil(2*training_lib_size/float(batch_size)))
+ rep_list = []
+ for i in range(num_batches):
+ print(i)
+ # if i==num_batches-1:
+ # rep = self.input_layer.tf_sess.run(self.input_layer.output,feed_dict={self.input:image_data[i*batch_size:i*batch_size + training_lib_size%batch_size]})
+ # else:
+ rep = self.input_layer.tf_sess.run(self.input_layer.output,feed_dict={self.input:image_data[i*batch_size:(i+1)*batch_size]})
+ rep_list += [rep]
+
+ rep = np.vstack(rep_list)
+
+
+ C = np.dot(rep.T,rep) + self.lam*np.eye(self.input_layer.K)
+ W = np.dot(np.linalg.inv(C),np.dot(rep.T,y_vals)).astype(np.float32)
+
+ self.tf_sess.run(self.weights.assign(tf.expand_dims(W,1)))
+
+ train_result = self.tf_sess.run(self.output,feed_dict={self.input_placeholder:rep})
+
+ print(W)
+ print(train_result.flatten())
+ print(y_vals.flatten())
+ #print (train_result.flatten() - y_vals.flatten())
+ print("train error = ", np.mean((train_result.flatten() != y_vals.flatten())))
+
+ from scipy.stats import norm
+ target_mask = y_vals==1
+ dist_mask = np.logical_not(target_mask)
+ hit_rate = np.mean(train_result.flatten()[target_mask] == y_vals.flatten()[target_mask])
+ false_alarm = np.mean(train_result.flatten()[dist_mask] != y_vals.flatten()[dist_mask])
+ dprime = norm.ppf(hit_rate) - norm.ppf(false_alarm)
+ print("dprime = ", dprime)
+
+ # Test error
+ im_lib = Image_Library_Supervised('/Users/michaelbu/Data/SerreOlivaPoggioPNAS07/Train_Test_Set/Test',new_size=image_shape)
+
+ testing_lib_size = 300
+ y_vals_test, image_data_test = im_lib(testing_lib_size,sequential=True)
+
+ y_vals_test = y_vals_test.T[0].T
+ y_vals_test = 2*y_vals_test - 1.0
+
+ num_batches = int(np.ceil(2*testing_lib_size/float(batch_size)))
+ rep_list = []
+ for i in range(num_batches):
+ print(i)
+ # if i==num_batches-1:
+ # rep = self.input_layer.tf_sess.run(self.input_layer.output,feed_dict={self.input:image_data[i*batch_size:i*batch_size + training_lib_size%batch_size]})
+ # else:
+ rep = self.input_layer.tf_sess.run(self.input_layer.output,feed_dict={self.input:image_data_test[i*batch_size:(i+1)*batch_size]})
+ rep_list += [rep]
+
+ rep_test = np.vstack(rep_list)
+
+ test_result = self.tf_sess.run(self.output,feed_dict={self.input_placeholder:rep_test})
+
+ #print test_result
+ print("test error = ", np.mean((test_result.flatten() != y_vals_test.flatten())))
+ target_mask = y_vals_test==1
+ dist_mask = np.logical_not(target_mask)
+ hit_rate = np.mean(test_result.flatten()[target_mask] == y_vals_test.flatten()[target_mask])
+ false_alarm = np.mean(test_result.flatten()[dist_mask] != y_vals_test.flatten()[dist_mask])
+ dprime = norm.ppf(hit_rate) - norm.ppf(false_alarm)
+ print("dprime = ", dprime)
+
+ print(rep_test.shape)
+
+
+ # logistic regression unit
+ # import time
+ # for n in range(max_iter):
+ # start = time.time()
+ # print "\tIteration ", n
+
+ # y_vals, image_data = im_lib(batch_size,sequential=True)
+
+ # print "\tComputing representation"
+ # rep = self.input_layer.tf_sess.run(self.input_layer.output,feed_dict={self.input:image_data})
+
+ # print "\tGradient descent step"
+ # #print "rep shape = ", rep.shape
+ # self.tf_sess.run(self.train_step,feed_dict={self.input_placeholder:rep,self.y:y_vals})
+
+
+ # #self.tf_sess.run(self.train_step,feed_dict={self.input:image_data,self.y:y_vals})
+
+ # #print "\t\ttraining batch cost = ", self.tf_sess.run(self.cost,feed_dict={self.input:image_data,self.y:y_vals})
+
+ # print "\t\tTraining error = ", np.mean(np.abs(np.argmax(y_vals,axis=1) - self.predict(image_data)))
+ # print y_vals
+ # print
+ # print self.predict(image_data)
+ # print "\t\ttraining batch cost = ", self.tf_sess.run(self.cost,feed_dict={self.input_placeholder:rep,self.y:y_vals})
+ # print "\t\ttraining linear model = ", self.tf_sess.run(self.linear,feed_dict={self.input_placeholder:rep,self.y:y_vals})
+
+ # print "\t\ttotal time = ", time.time() - start
+
diff --git a/bmtk-vb/bmtk/simulator/mintnet/hmax/S1_Layer.py b/bmtk-vb/bmtk/simulator/mintnet/hmax/S1_Layer.py
new file mode 100644
index 0000000..44bed67
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/hmax/S1_Layer.py
@@ -0,0 +1,273 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import tensorflow as tf
+import os
+import pandas as pd
+
+def gabor(X,Y,lamb,sigma,theta,gamma,phase):
+
+ X_hat = X*np.cos(theta) + Y*np.sin(theta)
+ Y_hat = -X*np.sin(theta) + Y*np.cos(theta)
+
+ arg1 = (0.5/sigma**2)*(X_hat**2 + (gamma**2)*Y_hat**2)
+ arg2 = (2.0*np.pi/lamb)*X_hat
+
+ return np.exp(-arg1)*np.cos(arg2 + phase)
+
+class S1_Layer (object):
+ def __init__(self,node_name,input_shape,freq_channel_params,orientations): #,num_cores=8):
+ '''
+ freq_channel_params is a dictionary of features for each S1 channel
+ len(freq_channel_params) ==num_bands freq_channel_params[i] = [pixels,sigma,lambda,stride]
+ orientations is a list of angles in radians for each filter
+ '''
+ #self.tf_sess = tf.Session()
+
+ self.node_name = node_name
+# NUM_CORES = num_cores # Choose how many cores to use.
+# NUM_CORES = 1
+# self.tf_sess = tf.Session(config=tf.ConfigProto(inter_op_parallelism_threads=NUM_CORES,
+# intra_op_parallelism_threads=NUM_CORES))
+ self.tf_sess = tf.Session()
+# print "Warning: Using hard-coded number of CPU Cores. This should be changed to auto-configure when TensorFlow has been updated."
+
+ self.input_shape = (None,input_shape[0],input_shape[1],1)
+ self.input = tf.placeholder(tf.float32,shape=self.input_shape,name="input")
+
+ #phases = np.array([0, np.pi/2])
+ phases = np.array([0.0]) # HMAX uses dense tiling in lieu of phases (make this explicit later)
+
+ num_bands = len(freq_channel_params)
+ num_orientations = len(orientations)
+ num_phases = len(phases)
+ self.K = num_orientations*num_phases #number of features per band
+
+ #n_output = num_frequency_channels*num_orientations*num_phases
+
+ n_input = 1
+
+ self.band_filters = {}
+ self.filter_params = {}
+ self.band_output = {}
+ self.output = self.band_output
+ self.band_shape = {}
+
+ with tf.name_scope(self.node_name):
+ for band in range(num_bands):
+ pixels, sigma, lamb, stride = freq_channel_params[band]
+ self.band_shape[band] = input_shape
+
+ w_shape = np.array([pixels,pixels,n_input,self.K])
+
+ W = np.zeros(w_shape,dtype=np.float32)
+
+ #compute w values from parameters
+ gamma = 0.3 # value taken from Serre et al giant HMAX manuscript from 2005
+ X,Y = np.meshgrid(np.arange(pixels),np.arange(pixels))
+ X = X - pixels/2
+ Y = Y - pixels/2
+
+ #self.filter_params[band] = freq_channel_params[band]
+ self.filter_params[band] = {'pixels':pixels,'sigma':sigma,'lambda':lamb, 'stride':stride} #should I add orientations and phases to this?
+
+ for i in range(self.K):
+
+ ori_i = i%num_orientations
+ phase_i = i/num_orientations
+
+ theta = orientations[ori_i]
+ phase = phases[phase_i]
+
+ zero_mask = np.zeros([pixels,pixels],dtype='bool')
+ zero_mask = (X*X + Y*Y > pixels*pixels/4)
+
+ W[:,:,0,i] = gabor(X,Y,lamb,sigma,theta,gamma,phase)
+ W[:,:,0,i][zero_mask] = 0.0
+ W[:,:,0,i] = W[:,:,0,i]/np.sqrt(np.sum(W[:,:,0,i]**2))
+
+ W = tf.Variable(W,trainable=False,name='W_'+str(band))
+ W.initializer.run(session=self.tf_sess)
+
+ self.band_filters[band] = W
+
+ input_norm = tf.reshape(tf.reduce_sum(self.input*self.input,[1,2,3]),[-1,1,1,1])
+ normalized_input = tf.div(self.input,tf.sqrt(input_norm))
+ self.band_output[band] = tf.nn.conv2d(normalized_input,W,strides=[1,stride,stride,1],padding='SAME')
+ self.band_shape[band] = tuple([int(x) for x in self.band_output[band].get_shape()[1:3]])
+
+
+ self.num_units = 0
+ for b in self.band_shape:
+ self.num_units += np.prod(self.band_shape[band])*self.K
+
+ def __del__(self):
+ self.tf_sess.close()
+
+ def __repr__(self):
+ return "S1_Layer"
+
+ def compute_output(self,X,band):
+
+ return self.tf_sess.run(self.output[band],feed_dict={self.input:X})
+
+ def get_compute_ops(self,unit_table=None):
+
+ compute_list = []
+
+ if unit_table is not None:
+
+ for i, row in unit_table.iterrows():
+
+ if 'y' in unit_table:
+ node, band, y, x = row['node'], int(row['band']), int(row['y']), int(row['x'])
+ compute_list.append(self.output[band][:,y,x,:])
+
+ elif 'band' in unit_table:
+ node, band = row['node'], int(row['band'])
+ compute_list.append(self.output[band])
+
+ else:
+ return self.get_all_compute_ops()
+
+ else:
+ return self.get_all_compute_ops()
+
+ return unit_table, compute_list
+
+ def get_all_compute_ops(self):
+
+ compute_list = []
+ unit_table = pd.DataFrame(columns=['node','band'])
+ for band in self.band_output:
+ unit_table = unit_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+
+ compute_list.append(self.output[band])
+
+ return unit_table, compute_list
+
+def S1_Layer_test():
+
+ import matplotlib.pyplot as plt
+
+ fig_dir = 'Figures'
+
+ # these parameters are taken from Serre, et al PNAS for HMAX
+ freq_channel_params = [ [7,2.8,3.5],
+ [9,3.6,4.6],
+ [11,4.5,5.6],
+ [13,5.4,6.8],
+ [15,6.3,7.9],
+ [17,7.3,9.1],
+ [19,8.2,10.3],
+ [21,9.2,11.5],
+ [23,10.2,12.7],
+ [25,11.3,14.1],
+ [27,12.3,15.4],
+ [29,13.4,16.8],
+ [31,14.6,18.2],
+ [33,15.8,19.7],
+ [35,17.0,21.2],
+ [37,18.2,22.8],
+ [39,19.5,24.4]]
+
+ orientations = np.arange(4)*np.pi/4
+
+ input_shape = (128,192)
+ s1 = S1_Layer(input_shape,freq_channel_params,orientations)
+
+ #plot filters, make sure they are correct
+ fig, ax = plt.subplots(len(orientations),len(freq_channel_params))
+ fig2,ax2 = plt.subplots(len(orientations),len(freq_channel_params))
+ for i,theta in enumerate(orientations):
+ for j,params in enumerate(freq_channel_params):
+
+ #index = j*len(orientations)*2 + i*2
+
+ fil = s1.tf_sess.run(s1.band_filters[j])[:,:,0,i]
+
+ ax[i,j].imshow(fil,interpolation='nearest',cmap='gray')
+ ax[i,j].axis('off')
+
+ fil = s1.tf_sess.run(s1.band_filters[j])[:,:,0,i+4]
+
+ ax2[i,j].imshow(fil,interpolation='nearest',cmap='gray')
+ ax2[i,j].axis('off')
+
+
+ from Image_Library import Image_Library
+
+ image_dir = '/Users/michaelbu/Code/HCOMP/SampleImages'
+
+ im_lib = Image_Library(image_dir)
+
+ image_data = im_lib(1)
+
+ fig, ax = plt.subplots(1)
+ ax.imshow(image_data[0,:,:,0],cmap='gray')
+
+ import timeit
+ #print timeit.timeit('result = s1.compute_output(image_data)','from __main__ import s1',number=10)
+
+ def f():
+ for band in range(len(freq_channel_params)):
+ s1.compute_output(image_data,band)
+
+ number = 10
+ runs = timeit.Timer(f).repeat(repeat=10,number=number)
+ print("Average time (s) for output evaluation for ", number, " runs: ", np.mean(runs)/number, '+/-', np.std(runs)/np.sqrt(number))
+
+
+
+ print("Image shape = ", image_data.shape)
+
+
+ fig_r, ax_r = plt.subplots(len(orientations),len(freq_channel_params))
+ fig_r2,ax_r2 = plt.subplots(len(orientations),len(freq_channel_params))
+
+ for j,params in enumerate(freq_channel_params):
+
+ result = s1.compute_output(image_data,j)
+ print("result shape = ", result.shape)
+
+ for i,theta in enumerate(orientations):
+
+ #fil = np.zeros([39,39])
+ #index = j*len(orientations)*2 + i*2
+ #print s1.params[0]
+
+ ax_r[i,j].imshow(result[0,:,:,i],interpolation='nearest',cmap='gray')
+ ax_r[i,j].axis('off')
+
+ ax_r2[i,j].imshow(result[0,:,:,i+4],interpolation='nearest',cmap='gray')
+ ax_r2[i,j].axis('off')
+
+ fig_r.savefig(os.path.join(fig_dir,'s1_layer_0.tiff'))
+ fig_r2.savefig(os.path.join(fig_dir,'s1_layer_1.tiff'))
+ plt.show()
+
+ #sess.close()
+
+if __name__=='__main__':
+
+ S1_Layer_test()
diff --git a/bmtk-vb/bmtk/simulator/mintnet/hmax/S_Layer.py b/bmtk-vb/bmtk/simulator/mintnet/hmax/S_Layer.py
new file mode 100644
index 0000000..df10f08
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/hmax/S_Layer.py
@@ -0,0 +1,404 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import tensorflow as tf
+from bmtk.simulator.mintnet.Image_Library import Image_Library
+import os
+import h5py
+import pandas as pd
+
+class S_Layer (object):
+ def __init__(self, node_name, C_Layer_input, grid_size, pool_size, K, file_name=None, randomize=False):
+ self.node_name = node_name
+
+ self.input = C_Layer_input.input
+
+ self.tf_sess = C_Layer_input.tf_sess
+ #self.input_layer = C_Layer_input
+ # c_output should be a dictionary indexed over bands
+
+ c_output = C_Layer_input.output
+ self.C_Layer_input = C_Layer_input
+
+ self.K = K
+ self.input_K = C_Layer_input.K
+ self.grid_size = grid_size
+ self.pool_size = pool_size
+
+ self.band_output = {}
+ #self.band_filters = {}
+ self.band_shape = C_Layer_input.band_shape
+ #print self.band_shape
+
+ file_open = False
+ if file_name==None:
+ self.train_state=False
+ new_weights = True
+ else:
+
+ self.weight_file = file_name
+
+ weight_h5 = h5py.File(self.weight_file, 'a')
+ file_open = True
+
+ if self.node_name in weight_h5.keys():
+
+ new_weights=False
+ weight_data = weight_h5[self.node_name]['weights']
+ self.train_state = weight_h5[self.node_name]['train_state'].value
+
+ else:
+
+ new_weights=True
+ self.train_state = False
+ weight_h5.create_group(self.node_name)
+ #weight_h5[self.node_name].create_group('weights')
+ weight_h5[self.node_name]['train_state']=self.train_state
+
+
+
+ # perform checks to make sure weight_file is consistent with the Layer parameters
+ # check input bands
+ # check grid_size, pool_size, K
+
+ with tf.name_scope(self.node_name):
+ #for band in c_output.keys():
+
+ if new_weights:
+
+ # if self.grid_size >= self.band_shape[band][0]:
+ # size_y = self.band_shape[band][0]
+ # else:
+ # size_y = grid_size
+ # if self.grid_size >= self.band_shape[band][1]:
+ # size_x = self.band_shape[band][1]
+ # else:
+ # size_x = grid_size
+
+ w_shape = np.array([self.grid_size,self.grid_size,self.input_K,self.K])
+
+ self.w_shape = w_shape
+
+ w_bound = np.sqrt(np.prod(w_shape[1:]))
+ if randomize:
+ W = np.random.uniform(low= -1.0/w_bound, high=1.0/w_bound, size=w_shape).astype(np.float32)
+ else:
+ W = np.zeros(w_shape).astype(np.float32)
+
+ if file_name!=None:
+ weight_h5[self.node_name].create_dataset('weights',shape=w_shape,dtype=np.float32)
+
+ else:
+ # Need to check that c_output.keys() has the same set of keys that weight_dict is expecting
+ W = weight_data.value
+ self.w_shape = W.shape
+
+
+
+
+ W = tf.Variable(W,trainable=False,name='W')
+ W.initializer.run(session=self.tf_sess)
+
+ #self.band_filters[band]= W
+ self.weights = W
+
+ for band in c_output.keys():
+ W_slice = W[:self.band_shape[band][0],:self.band_shape[band][1]]
+
+ input_norm = tf.expand_dims(tf.reduce_sum(c_output[band]*c_output[band],[1,2]),1) #,[-1,1,1,self.input_K])
+ input_norm = tf.expand_dims(input_norm,1)
+ normalized_input = tf.div(c_output[band],tf.maximum(tf.sqrt(input_norm),1e-12))
+ self.band_output[band] = tf.nn.conv2d(normalized_input,W_slice,strides=[1,1,1,1],padding='SAME')
+
+ self.output = self.band_output
+
+ self.num_units = 0
+ for b in self.band_shape:
+ self.num_units += np.prod(self.band_shape[b])*self.K
+
+ if file_open:
+ weight_h5.close()
+
+ def __repr__(self):
+ return "S_Layer"
+
+ def compute_output(self,X,band):
+
+ return self.tf_sess.run(self.output[band],feed_dict={self.input:X})
+
+ def find_band_and_coords_for_imprinting_unit(self, imprinting_unit_index):
+
+ cumulative_units = 0
+ for band in self.C_Layer_input.output:
+
+ units_in_next_band = int(np.prod(self.C_Layer_input.output[band].get_shape()[1:3]))
+
+ if imprinting_unit_index < cumulative_units + units_in_next_band:
+ # found the right band!
+ yb, xb = self.C_Layer_input.band_shape[band]
+
+ band_index = imprinting_unit_index - cumulative_units
+
+ y = band_index/xb
+ x = band_index%xb
+ break
+ else:
+ cumulative_units += units_in_next_band
+
+ return band, y, x
+
+
+
+ def get_total_pixels_in_C_Layer_input(self):
+
+ total = 0
+
+ band_shape = self.C_Layer_input.band_shape
+ band_ids = band_shape.keys()
+ band_ids.sort()
+
+ for band in band_ids:
+ total += np.prod(band_shape[band])
+
+ return total
+
+
+ def get_patch_bounding_box_and_shift(self,band,y,x):
+ y_lower = y - self.grid_size/2
+ y_upper = y_lower + self.grid_size
+
+ x_lower = x - self.grid_size/2
+ x_upper = x_lower + self.grid_size
+
+ yb, xb = self.C_Layer_input.band_shape[band]
+
+ # compute shifts in lower bound to deal with overlap with the edges
+ y_shift_lower = np.max([-y_lower,0])
+ x_shift_lower = np.max([-x_lower,0])
+
+
+ y_lower = np.max([y_lower,0])
+ y_upper = np.min([y_upper,yb])
+
+ x_lower = np.max([x_lower,0])
+ x_upper = np.min([x_upper,xb])
+
+ y_shift_upper = y_shift_lower + y_upper - y_lower
+ x_shift_upper = x_shift_lower + x_upper - x_lower
+
+ return y_lower, y_upper, x_lower, x_upper, y_shift_lower, y_shift_upper, x_shift_lower, x_shift_upper
+
+ def train(self,image_dir,batch_size=100,image_shape=(256,256)): #,save_file='weights.pkl'):
+
+ print("Training")
+
+ im_lib = Image_Library(image_dir,new_size=image_shape)
+
+ new_weights = np.zeros(self.w_shape).astype(np.float32)
+
+
+ for k in range(self.K):
+
+ if k%10==0:
+ print("Imprinting feature ", k)
+ # how to handle the randomly picked neuron; rejection sampling?
+ imprinting_unit_index = np.random.randint(self.get_total_pixels_in_C_Layer_input())
+
+ #print "Imprinting unit index ", imprinting_unit_index
+ band, y, x = self.find_band_and_coords_for_imprinting_unit(imprinting_unit_index)
+ #print "Imprinting unit in band ", band, " at ", (y, x)
+
+ im_data = im_lib(1)
+
+ output = self.C_Layer_input.compute_output(im_data,band)
+
+ # grab weights from chosen unit, save them to new_weights
+ y_lower, y_upper, x_lower, x_upper, y_shift_lower, y_shift_upper, x_shift_lower, x_shift_upper = self.get_patch_bounding_box_and_shift(band,y,x)
+
+ w_patch = output[0,y_lower:y_upper,x_lower:x_upper,:].copy()
+
+ #print "(y_lower, y_upper), (x_lower, x_upper) = ", (y_lower, y_upper), (x_lower, x_upper)
+ #print "Patch shape = ", w_patch.shape
+
+ patch_size = np.prod(w_patch.shape)
+ # print "self.w_shape = ", self.w_shape, " patch_size = ", patch_size, " pool_size = ", self.pool_size
+ # print "band, y, x = ", band,y,x
+
+ pool_size = np.min([self.pool_size,patch_size])
+ pool_mask_indices = np.random.choice(np.arange(patch_size), size=pool_size, replace=False)
+ pool_mask = np.zeros(patch_size,dtype=np.bool)
+ pool_mask[pool_mask_indices] = True
+ pool_mask.resize(w_patch.shape)
+ pool_mask = np.logical_not(pool_mask) # we want a mask for the indices to zero out
+
+ w_patch[pool_mask] = 0.0
+
+ # will need to enlarge w_patch if the edges got truncated
+
+ new_weights[y_shift_lower:y_shift_upper,x_shift_lower:x_shift_upper,:,k] = w_patch
+
+
+ # old code starts here
+ # num_batches = self.K/batch_size
+ # if self.K%batch_size!=0:
+ # num_batches = num_batches+1
+
+ self.tf_sess.run(self.weights.assign(new_weights))
+ print()
+ print("Saving weights to file in ", self.weight_file)
+
+ weight_h5 = h5py.File(self.weight_file,'a')
+ #for band in new_weights:
+ weight_h5[self.node_name]['weights'][...] = new_weights
+ weight_h5[self.node_name]['train_state'][...]=True
+
+ weight_h5.close()
+
+ # def get_compute_ops(self):
+ #
+ # node_table = pd.DataFrame(columns=['node','band'])
+ # compute_list = []
+ #
+ # for band in self.band_output:
+ # node_table = node_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+ #
+ # compute_list.append(self.output[band])
+ #
+ # return node_table, compute_list
+
+ def get_compute_ops(self,unit_table=None):
+
+ compute_list = []
+
+ if unit_table is not None:
+
+ for i, row in unit_table.iterrows():
+
+ if 'y' in unit_table:
+ node, band, y, x = row['node'], int(row['band']), int(row['y']), int(row['x'])
+ compute_list.append(self.output[band][:,y,x,:])
+
+ elif 'band' in unit_table:
+ node, band = row['node'], int(row['band'])
+ compute_list.append(self.output[band])
+
+ else:
+ return self.get_all_compute_ops()
+
+ else:
+ return self.get_all_compute_ops()
+
+ return unit_table, compute_list
+
+ def get_all_compute_ops(self):
+
+ compute_list = []
+ unit_table = pd.DataFrame(columns=['node','band'])
+ for band in self.band_output:
+ unit_table = unit_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+
+ compute_list.append(self.output[band])
+
+ return unit_table, compute_list
+
+
+def test_S_Layer_ouput():
+
+ from S1_Layer import S1_Layer
+ import matplotlib.pyplot as plt
+ from C_Layer import C_Layer
+
+ fig_dir = 'Figures'
+ # First we need an S1 Layer
+ # these parameters are taken from Serre, et al PNAS for HMAX
+ freq_channel_params = [ [7,2.8,3.5],
+ [9,3.6,4.6],
+ [11,4.5,5.6],
+ [13,5.4,6.8],
+ [15,6.3,7.9],
+ [17,7.3,9.1],
+ [19,8.2,10.3],
+ [21,9.2,11.5],
+ [23,10.2,12.7],
+ [25,11.3,14.1],
+ [27,12.3,15.4],
+ [29,13.4,16.8],
+ [31,14.6,18.2],
+ [33,15.8,19.7],
+ [35,17.0,21.2],
+ [37,18.2,22.8],
+ [39,19.5,24.4]]
+
+ orientations = np.arange(4)*np.pi/4
+
+ input_shape = (128,192)
+ s1 = S1_Layer(input_shape,freq_channel_params,orientations)
+
+ # Now we need to define a C1 Layer
+ bands = [ [[0,1], 8, 3],
+ [[2,3], 10, 5],
+ [[4,5], 12, 7],
+ [[6,7], 14, 8],
+ [[8,9], 16, 10],
+ [[10,11], 18, 12],
+ [[12,13], 20, 13],
+ [[14,15,16], 22, 15]]
+
+ c1 = C_Layer(s1,bands)
+
+ grid_size = 3
+ pool_size = 10
+ K = 10
+
+ s2 = S_Layer('s2',c1,grid_size,pool_size,K,file_name='S_test_file.h5',randomize=False)
+
+ # Test s2 on an image
+ image_dir = '/Users/michaelbu/Code/HCOMP/SampleImages'
+
+ im_lib = Image_Library(image_dir,new_size=input_shape)
+
+ image_data = im_lib(1)
+
+ fig, ax = plt.subplots(1)
+ ax.imshow(image_data[0,:,:,0],cmap='gray')
+
+ fig,ax = plt.subplots(8,10)
+
+ result = {}
+ for b in range(len(bands)):
+ result[b] = s2.compute_output(image_data,b)
+
+ for k in range(K):
+ ax[b,k].imshow(result[b][0,:,:,k],interpolation='nearest',cmap='gray')
+ ax[b,k].axis('off')
+
+ fig.savefig(os.path.join(fig_dir,'s2_layer.tiff'))
+ plt.show()
+
+ s2.train(image_dir,batch_size=10,image_shape=input_shape) #,save_file='test_weights.pkl')
+
+
+
+
+if __name__=='__main__':
+ test_S_Layer_ouput()
diff --git a/bmtk-vb/bmtk/simulator/mintnet/hmax/Sb_Layer.py b/bmtk-vb/bmtk/simulator/mintnet/hmax/Sb_Layer.py
new file mode 100644
index 0000000..4731323
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/hmax/Sb_Layer.py
@@ -0,0 +1,242 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import tensorflow as tf
+from S_Layer import S_Layer
+import pandas as pd
+
+class Sb_Layer (object):
+ def __init__(self,node_name,C_Layer_input,grid_size,pool_size,K_per_subband,file_name=None):
+ '''grid_size is a list, unlike the standard S_Layer, as is file_names'''
+
+ self.node_name = node_name
+ self.tf_sess = C_Layer_input.tf_sess
+
+ self.input = C_Layer_input.input
+
+ self.num_sublayers = len(grid_size)
+ self.K = K_per_subband*self.num_sublayers #number of features will be number of sub bands times the K per subband
+ self.pool_size = pool_size
+ self.grid_size = grid_size
+
+ c_output = C_Layer_input.output
+
+ self.sublayers = {}
+ with tf.name_scope(self.node_name):
+ for i in range(self.num_sublayers):
+ subnode_name = node_name+'_'+str(i)
+ self.sublayers[i] = S_Layer(subnode_name,C_Layer_input,grid_size[i],pool_size,K_per_subband,file_name)
+
+ self.band_output = {}
+ self.band_shape = C_Layer_input.band_shape
+
+ for band in c_output.keys():
+
+ sub_band_list = []
+ for i in range(self.num_sublayers):
+ sub_band_list += [self.sublayers[i].band_output[band]]
+
+
+
+ #gather sub_layer outputs and stack them for each band
+ self.band_output[band] = tf.concat(sub_band_list, 3)
+
+ self.output = self.band_output
+
+ self.num_units = 0
+ for b in self.band_shape:
+ self.num_units += np.prod(self.band_shape[b])*self.K
+
+ def __repr__(self):
+ return "Sb_Layer"
+
+ def compute_output(self,X,band):
+
+ return self.tf_sess.run(self.output[band],feed_dict={self.input:X})
+
+ def train(self,image_dir,batch_size=100,image_shape=(256,256)): #,save_file_prefix='weights'):
+
+ for i in range(self.num_sublayers):
+ #save_file = save_file_prefix + '_'+str(i)+'.pkl'
+
+ #try:
+ self.sublayers[i].train(image_dir,batch_size,image_shape) #,save_file)
+ #except Exception as e:
+ # print i
+ # raise e
+
+ # def get_compute_ops(self):
+ #
+ # node_table = pd.DataFrame(columns=['node','band'])
+ # compute_list = []
+ #
+ # for band in self.band_output:
+ # node_table = node_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+ #
+ # compute_list.append(self.output[band])
+ #
+ # return node_table, compute_list
+
+ def get_compute_ops(self,unit_table=None):
+
+ compute_list = []
+
+ if unit_table is not None:
+
+ for i, row in unit_table.iterrows():
+
+ if 'y' in unit_table:
+ node, band, y, x = row['node'], int(row['band']), int(row['y']), int(row['x'])
+ compute_list.append(self.output[band][:,y,x,:])
+
+ elif 'band' in unit_table:
+ node, band = row['node'], int(row['band'])
+ compute_list.append(self.output[band])
+
+ else:
+ return self.get_all_compute_ops()
+
+ else:
+ return self.get_all_compute_ops()
+
+ return unit_table, compute_list
+
+ def get_all_compute_ops(self):
+
+ compute_list = []
+ unit_table = pd.DataFrame(columns=['node','band'])
+ for band in self.band_output:
+ unit_table = unit_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+
+ compute_list.append(self.output[band])
+
+ return unit_table, compute_list
+
+
+def test_S2b_Layer():
+
+ from S1_Layer import S1_Layer
+ import matplotlib.pyplot as plt
+ from C_Layer import C_Layer
+
+ fig_dir = 'Figures'
+ # First we need an S1 Layer
+ # these parameters are taken from Serre, et al PNAS for HMAX
+ freq_channel_params = [ [7,2.8,3.5],
+ [9,3.6,4.6],
+ [11,4.5,5.6],
+ [13,5.4,6.8],
+ [15,6.3,7.9],
+ [17,7.3,9.1],
+ [19,8.2,10.3],
+ [21,9.2,11.5],
+ [23,10.2,12.7],
+ [25,11.3,14.1],
+ [27,12.3,15.4],
+ [29,13.4,16.8],
+ [31,14.6,18.2],
+ [33,15.8,19.7],
+ [35,17.0,21.2],
+ [37,18.2,22.8],
+ [39,19.5,24.4]]
+
+ orientations = np.arange(4)*np.pi/4
+
+ input_shape = (128,192)
+ s1 = S1_Layer(input_shape,freq_channel_params,orientations)
+
+ # Now we need to define a C1 Layer
+ bands = [ [[0,1], 8, 3],
+ [[2,3], 10, 5],
+ [[4,5], 12, 7],
+ [[6,7], 14, 8],
+ [[8,9], 16, 10],
+ [[10,11], 18, 12],
+ [[12,13], 20, 13],
+ [[14,15,16], 22, 15]]
+
+ c1 = C_Layer(s1,bands)
+
+ print("s1 shape: ", s1.band_shape)
+ print("c1 shape: ", c1.band_shape)
+
+ grid_size = [6,9,12,15]
+ pool_size = 10
+ K = 10
+
+ s2b = Sb_Layer(c1,grid_size,pool_size,K)
+
+ print("s2b shape: ", s2b.band_shape)
+
+ c2b_bands = [ [[0,1,2,3,4,5,6,7],40,40]]
+
+ c2b = C_Layer(s2b,c2b_bands)
+
+
+ print("c2b shape: ", c2b.band_shape)
+ #print c2b.band_output.keys()
+ # Test s2 on an image
+ from Image_Library import Image_Library
+
+ image_dir = '/Users/michaelbu/Code/HCOMP/SampleImages'
+
+ im_lib = Image_Library(image_dir,new_size=input_shape)
+
+ image_data = im_lib(1)
+
+ fig, ax = plt.subplots(1)
+ ax.imshow(image_data[0,:,:,0],cmap='gray')
+
+ fig,ax = plt.subplots(8,10)
+
+ result = {}
+ for b in range(len(bands)):
+ result[b] = s2b.compute_output(image_data,b)
+
+ for k in range(K):
+ ax[b,k].imshow(result[b][0,:,:,k],interpolation='nearest',cmap='gray')
+ ax[b,k].axis('off')
+
+ fig.savefig(os.path.join(fig_dir,'s2b_layer.tiff'))
+
+ fig,ax = plt.subplots(8,10)
+
+ result = {}
+
+ #only one band for c2b
+ result[0] = c2b.compute_output(image_data,0)
+
+ for k in range(K):
+ ax[b,k].imshow(result[0][0,:,:,k],interpolation='nearest',cmap='gray')
+ ax[b,k].axis('off')
+
+ fig.savefig(os.path.join(fig_dir,'c2b_layer.tiff'))
+
+
+ #plt.show()
+
+ s2b.train(image_dir,batch_size=10,image_shape=input_shape,save_file_prefix='test_S2b_weights')
+
+if __name__=='__main__':
+
+ test_S2b_Layer()
diff --git a/bmtk-vb/bmtk/simulator/mintnet/hmax/ViewTunedLayer.py b/bmtk-vb/bmtk/simulator/mintnet/hmax/ViewTunedLayer.py
new file mode 100644
index 0000000..1ae95e1
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/hmax/ViewTunedLayer.py
@@ -0,0 +1,219 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import tensorflow as tf
+from bmtk.simulator.mintnet.Image_Library import Image_Library
+#from bmtk.mintnet.Stimulus.NaturalScenes import NaturalScenes
+import h5py
+import pandas as pd
+
+class ViewTunedLayer (object):
+ def __init__(self,node_name,K,alt_image_dir='',*inputs,**keyword_args):
+
+ self.node_name=node_name
+
+ file_name = keyword_args.get('file_name',None)
+
+ self.alt_image_dir = alt_image_dir
+
+ if file_name==None:
+ print("No filename given. Generating new (random) weights for layer ", node_name)
+ self.train_state = False
+ new_weights=True
+ else:
+
+ self.weight_file = file_name
+ weight_h5 = h5py.File(self.weight_file,'a')
+ file_open=True
+
+ if self.node_name in weight_h5.keys():
+
+ #print "Loading weights for layer ", node_name, " from ", self.weight_file
+ new_weights = False
+ weight_data = weight_h5[self.node_name]['weights'].value
+ self.train_state = weight_h5[self.node_name]['train_state']
+
+ else:
+
+ new_weights=True
+ self.train_state=False
+ weight_h5.create_group(self.node_name)
+ weight_h5[self.node_name]['train_state']=self.train_state
+
+ self.input = inputs[0].input
+ self.tf_sess = inputs[0].tf_sess
+ #should add a check that all inputs have the same value of inputs[i].input
+
+ self.K = K
+
+ concat_list = []
+ total_K = 0
+
+ with tf.name_scope(self.node_name):
+ for i, node in enumerate(inputs):
+
+ output_i = node.output
+
+ for b in output_i:
+ shape = node.band_shape[b]
+
+ num_K = np.prod(shape)*node.K
+ total_K = total_K + num_K
+ #print "shape = ", shape, " total_K = ", num_K
+ reshape_op = tf.reshape(output_i[b],[-1,num_K])
+ concat_list += [reshape_op]
+
+ self.input_unit_vector = tf.concat(concat_list, 1) #shape [batch_size, total_K]
+
+ self.w_shape = (total_K,K)
+ #weight = np.random.normal(size=self.w_shape).astype(np.float32)
+ if new_weights:
+ weight = np.zeros(self.w_shape).astype(np.float32)
+ weight_h5[self.node_name].create_dataset('weights',shape=weight.shape,dtype=np.float32,compression='gzip',compression_opts=9)
+ else:
+ weight = weight_data #ict['ViewTunedWeight']
+ assert weight.shape[0]==total_K, "weights from file are not equal to total input size for layer "+self.node_name
+
+
+ self.weights = tf.Variable(weight,trainable=False,name='weights')
+ self.weights.initializer.run(session=self.tf_sess)
+
+ #print self.input_unit_vector.get_shape(), total_K
+ #should this be a dictionary for consistency?
+ #print "input unit vector shape = ", self.input_unit_vector.get_shape()
+ #print "total_K = ", total_K
+
+ input_norm = tf.expand_dims(tf.reduce_sum(self.input_unit_vector*self.input_unit_vector,[1]),1) #,[-1,total_K])
+ normalized_input = tf.div(self.input_unit_vector,tf.sqrt(input_norm))
+ self.output = tf.matmul(normalized_input,self.weights) #/0.01
+
+ # try gaussian tuning curve centered on preferred feature
+ # self.output = tf.exp(-0.5*tf.reduce_sum(self.weights - self.input_unit_vector))
+
+ self.num_units = K
+
+ if file_open:
+ weight_h5.close()
+
+ def __repr__(self):
+ return "ViewTunedLayer"
+
+ def compute_output(self,X):
+
+ return self.tf_sess.run(self.output,feed_dict={self.input:X})
+
+ def train(self,image_dir,batch_size=10,image_shape=(256,256)): #,save_file=None):
+
+ print("Training")
+
+ im_lib = Image_Library(image_dir,new_size=image_shape)
+
+ #ns_lib = NaturalScenes.with_new_stimulus_from_folder(image_dir, new_size=image_shape, add_channels=True)
+
+ new_weights = np.zeros(self.w_shape,dtype=np.float32)
+
+ num_batches = self.K/batch_size
+
+ for n in range(num_batches):
+ #for k in range(self.K):
+ print("\t\tbatch: ", n, " Total features: ", n*batch_size)
+ print("\t\t\tImporting images for batch")
+ image_data = im_lib(batch_size,sequential=True)
+ print("\t\t\tDone")
+
+ print("\t\t\tComputing responses for batch")
+ batch_output = self.tf_sess.run(self.input_unit_vector,feed_dict={self.input:image_data})
+ new_weights[:,n*batch_size:(n+1)*batch_size] = batch_output.T
+
+ print("\t\t\tDone")
+
+ if self.K%batch_size!=0:
+ last_batch_size = self.K%batch_size
+ print("\t\tbatch: ", n+1, " Total features: ", (n+1)*batch_size)
+ print("\t\t\tImporting images for batch")
+ image_data = im_lib(last_batch_size,sequential=True)
+ print("\t\t\tDone")
+
+ print("\t\t\tComputing responses for batch")
+ batch_output = self.tf_sess.run(self.input_unit_vector,feed_dict={self.input:image_data})
+ new_weights[:,-last_batch_size:] = batch_output.T
+
+ new_weights = new_weights/np.sqrt(np.maximum(np.sum(new_weights**2,axis=0),1e-12))
+
+ self.tf_sess.run(self.weights.assign(new_weights))
+
+ print("")
+ print("Saving weights to file ", self.weight_file)
+ weight_h5 = h5py.File(self.weight_file,'a')
+ weight_h5[self.node_name]['weights'][...] = new_weights
+ weight_h5[self.node_name]['train_state'][...] = True
+ weight_h5.close()
+
+ def get_compute_ops(self,unit_table=None):
+
+ compute_list = []
+
+ if unit_table is not None:
+ for i, row in unit_table.iterrows():
+ compute_list = [self.output]
+
+ else:
+ unit_table = pd.DataFrame([[self.node_name]], columns=['node'])
+ compute_list = [self.output]
+
+ return unit_table, compute_list
+
+
+
+def test_ViewTunedLayer():
+
+ from hmouse_test import hmouse
+
+ image_dir = '/Users/michaelbu/Code/H-MOUSE/ILSVRC2015/Data/DET/test'
+ image_shape = (256,256)
+ weight_file_prefix = 'S2b_weights_500'
+
+ print("Configuring HMAX network")
+ hm = hmouse('config/nodes.csv','config/node_types.csv')
+
+ for node in hm.nodes:
+ print(node, " num_units = ", hm.nodes[node].num_units)
+
+ s4 = ViewTunedLayer(10,hm.nodes['c1'],hm.nodes['c2'],hm.nodes['c2b']) #,hm.nodes['c3'])
+
+ im_lib = Image_Library(image_dir,new_size=image_shape)
+ image_data = im_lib(1)
+
+ print(s4.tf_sess.run(tf.shape(s4.input_unit_vector),feed_dict={s4.input:image_data}))
+ print(s4.tf_sess.run(tf.shape(s4.weights)))
+
+ print(s4.compute_output(image_data).shape)
+
+ #s4.train(image_dir,batch_size=10,image_shape=image_shape,save_file='s4_test_weights.pkl')
+
+
+
+
+if __name__=='__main__':
+
+ test_ViewTunedLayer()
diff --git a/bmtk-vb/bmtk/simulator/mintnet/hmax/__init__.py b/bmtk-vb/bmtk/simulator/mintnet/hmax/__init__.py
new file mode 100644
index 0000000..44200f2
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/hmax/__init__.py
@@ -0,0 +1,28 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import S_Layer
+import S1_Layer
+import Sb_Layer
+import C_Layer
+import ViewTunedLayer
+import hmax
diff --git a/bmtk-vb/bmtk/simulator/mintnet/hmax/hmax.py b/bmtk-vb/bmtk/simulator/mintnet/hmax/hmax.py
new file mode 100644
index 0000000..c770ec6
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/mintnet/hmax/hmax.py
@@ -0,0 +1,432 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import sys
+import json
+from S1_Layer import S1_Layer
+from C_Layer import C_Layer
+from S_Layer import S_Layer
+from Sb_Layer import Sb_Layer
+from ViewTunedLayer import ViewTunedLayer
+from Readout_Layer import Readout_Layer
+import tensorflow as tf
+import os
+import h5py
+import pandas as pd
+
+from bmtk.simulator.mintnet.Image_Library import Image_Library
+import matplotlib.pyplot as plt
+
+class hmax (object):
+
+ def __init__(self, configuration, name=None): #,num_cores=8):
+ self.name = name
+
+ if os.path.isdir(configuration):
+ # If configuration is a directory look for a config-file inside it.
+ self.config_file = os.path.join(configuration, 'config_' + configuration + '.json')
+ if self.name is None:
+ self.name = os.path.basename(configuration)
+
+ elif os.path.isfile(configuration):
+ # If configuration is a json file
+ if self.name is None:
+ raise Exception("A name is required for configuration parameters")
+ self.config_file = configuration
+
+ with open(self.config_file,'r') as f:
+ self.config_data = json.loads(f.read())
+
+ self.config_dir = os.path.dirname(os.path.abspath(configuration))
+ self.train_state_file = self.__get_config_file(self.config_data['train_state_file'])
+ self.image_dir = self.__get_config_file(self.config_data['image_dir'])
+
+ # Find, and create if necessary, the output directory
+ if 'output_dir' in self.config_data:
+ self.output_dir = self.__get_config_file(self.config_data['output_dir'])
+ else:
+ self.output_dir = os.path.join(self.config_dir, 'output')
+
+ if not os.path.exists(self.output_dir):
+ os.makedirs(self.output_dir)
+
+ with open(self.train_state_file, 'r') as f:
+ self.train_state = json.loads(f.read())
+
+ if not os.path.exists(self.output_dir):
+ os.makedirs(self.output_dir)
+
+ # get the nodes
+ models_file = self.__get_config_file(self.config_data['network']['node_types'])
+ nodes_file = self.__get_config_file(self.config_data['network']['nodes'])
+ self.__nodes_table = self.__build_nodes_table(nodes_file, models_file, self.config_data)
+
+ # Read the connections
+ self.nodes = {}
+ self.train_order = []
+
+ edges_file = self.__get_config_file(self.config_data['network']['edges'])
+ for (node_name, input_node, node_dict) in self.__get_edges(edges_file, self.config_data):
+ model_class = self.__nodes_table[node_name]['model_id']
+
+ print("Constructing node: ", node_name)
+ if model_class=='S1_Layer':
+ node_type = S1_Layer
+ freq_channel_params = node_dict['freq_channel_params']
+ input_shape = node_dict['input_shape']
+ self.input_shape = input_shape
+ orientations = node_dict['orientations']
+
+ self.nodes[node_name] = node_type(node_name,input_shape,freq_channel_params,orientations) #,num_cores=num_cores)
+ #writer = tf.train.SummaryWriter('tmp/hmax', self.nodes['s1'].tf_sess.graph_def)
+ #merged = tf.merge_all_summaries()
+
+ #writer.add_summary(self.nodes[node_name].tf_sess.run(merged),0)
+
+ elif model_class=='C_Layer':
+ node_type = C_Layer
+ bands = node_dict['bands']
+
+
+ self.nodes[node_name] = node_type(node_name,self.nodes[input_node],bands)
+ #writer = tf.train.SummaryWriter('tmp/hmax', self.nodes['s1'].tf_sess.graph_def)
+
+ elif model_class=='S_Layer':
+ node_type = S_Layer
+ K = node_dict['K']
+ weight_file = self.__get_config_file(node_dict['weight_file']) if 'weight_file' in node_dict else None
+ pool_size = node_dict['pool_size']
+ grid_size = node_dict['grid_size']
+ self.train_order += [node_name]
+
+ self.nodes[node_name] = node_type(node_name, self.nodes[input_node], grid_size, pool_size,K,
+ file_name=weight_file)
+
+ elif model_class=='Sb_Layer':
+ node_type = Sb_Layer
+ K = node_dict['K']
+ weight_file = self.__get_config_file(node_dict['weight_file']) if 'weight_file' in node_dict else None
+ pool_size = node_dict['pool_size']
+ grid_size = node_dict['grid_size']
+
+ self.train_order += [node_name]
+
+ self.nodes[node_name] = node_type(node_name,self.nodes[input_node],grid_size,pool_size,K,file_name=weight_file)
+
+ elif model_class=='ViewTunedLayer':
+ node_type = ViewTunedLayer
+ K = node_dict['K']
+ input_nodes = node_dict['inputs']
+ input_nodes = [self.nodes[node] for node in input_nodes]
+ weight_file = self.__get_config_file(node_dict['weight_file']) if 'weight_file' in node_dict else None
+ alt_image_dir = node_dict['alt_image_dir']
+
+ self.train_order += [node_name]
+
+ #print "alt_image_dir=",alt_image_dir
+ self.nodes[node_name] = node_type(node_name,K,alt_image_dir,*input_nodes,file_name=weight_file)
+
+ elif model_class=='Readout_Layer':
+ node_type = Readout_Layer
+ K = node_dict['K']
+ input_nodes = self.nodes[input_node]
+ weight_file = os.path.join(config_dir,node_dict['weight_file'])
+ if weight_file=='': weight_file=None
+ alt_image_dir = node_dict['alt_image_dir']
+ lam = node_dict['lam']
+
+ self.train_order += [node_name]
+
+ self.nodes[node_name] = node_type(node_name,self.nodes[input_node],K,lam,alt_image_dir,file_name=weight_file)
+
+ else:
+ raise Exception("Unknown model class {}".format(model_class))
+
+ # print "Done"
+ # print
+
+ #nfhandle.close()
+
+
+
+ self.node_names = self.nodes.keys()
+
+ self.input_shape = (self.nodes['s1'].input_shape[1], self.nodes['s1'].input_shape[2])
+
+ print("Done")
+ #writer = tf.train.SummaryWriter('tmp/hmax', self.nodes['s1'].tf_sess.graph_def)
+
+
+ def __build_nodes_table(self, nodes_csv, models_csv, config):
+ models_df = pd.read_csv(models_csv, sep=' ')
+ nodes_df = pd.read_csv(nodes_csv, sep=' ')
+ nodes_df.set_index('id')
+ nodes_full = pd.merge(left=nodes_df, right=models_df, on='model_id')
+ nodes_table = {r['id']: {'model_id': r['model_id'], 'python_object': r['python_object']}
+ for _, r in nodes_full.iterrows() }
+
+ return nodes_table
+
+ def __get_edges(self, edges_csv, config):
+ def parse_query(query_str):
+ if query_str == '*' or query_str == 'None':
+ return None
+ elif query_str.startswith('id=='):
+ return query_str[5:-1]
+ else:
+ raise Exception('Unknown query string {}'.format(query_str))
+
+ # location where config files are located
+ params_dir = self.__get_config_file(config.get('node_config_dir', ''))
+
+ edges_df = pd.read_csv(edges_csv, sep=' ')
+ edges = []
+ for _, row in edges_df.iterrows():
+ # find source and target
+ source = parse_query(row['source_query'])
+ target = parse_query(row['target_query'])
+
+ # load the parameters from the file
+ params_file = os.path.join(params_dir, row['params_file'])
+ params = json.load(open(params_file, 'r'))
+
+ # Add to list
+ edges.append((target, source, params))
+
+ # TODO: check list and reorder to make sure the layers are in a valid order
+
+ # return the edges. Should we use a generator?
+ return edges
+
+ def __get_config_file(self, fpath):
+ if os.path.isabs(fpath):
+ return fpath
+ else:
+ return os.path.join(self.config_dir, fpath)
+
+
+
+ @classmethod
+ def load(cls, config_dir, name=None):
+ return cls(config_dir, name)
+
+ def train(self): #,alt_image_dict=None):
+
+ for node in self.train_order:
+ if not self.train_state.get(node, False):
+ print("Training Node: ", node)
+
+ if hasattr(self.nodes[node],'alt_image_dir') and self.nodes[node].alt_image_dir!='':
+ print("\tUsing alternate image directory: ", self.nodes[node].alt_image_dir) # alt_image_dict[node]
+ self.nodes[node].train(self.nodes[node].alt_image_dir,batch_size=self.config_data['batch_size'],image_shape=self.input_shape)
+ self.train_state[node]=True
+ else:
+ print("\tUsing default image directory: ", self.image_dir)
+ self.nodes[node].train(self.image_dir,batch_size=self.config_data['batch_size'],image_shape=self.input_shape)
+ self.train_state[node]=True
+
+
+ # if node not in alt_image_dict:
+ # print "\tUsing default image directory: ", image_dir
+ # self.nodes[node].train(image_dir,batch_size=self.config_data['batch_size'],image_shape=self.input_shape)
+ # self.train_state[node]=True
+ # else:
+ # print "\tUsing alternate image directory: ", alt_image_dict[node]
+ # self.nodes[node].train(alt_image_dict[node],batch_size=self.config_data['batch_size'],image_shape=self.input_shape)
+ # self.train_state[node]=True
+
+ print("Done")
+
+ with open(self.config_data['train_state_file'], 'w') as f:
+ f.write(json.dumps(self.train_state))
+
+
+ def run_stimulus(self,stimulus, node_table=None, output_file='output'):
+ '''stimulus is an instance of one of the mintnet.Stimulus objects, i.e. LocallySparseNoise'''
+
+ if output_file[-3:]!=".ic":
+ output_file = output_file+".ic" # add *.ic suffix if not already there
+
+ stim_template = stimulus.get_image_input(new_size=self.input_shape, add_channels=True)
+
+ print("Creating new output file: ", output_file, " (and removing any previous one)")
+ if os.path.exists(output_file):
+ os.remove(output_file)
+ output_h5 = h5py.File(output_file,'w')
+
+ T, y, x, K = stim_template.shape
+ all_nodes = self.nodes.keys()
+
+ if node_table is None: # just compute everything and return it all; good luck!
+
+ new_node_table = pd.DataFrame(columns=['node','band'])
+
+ compute_list = []
+ for node in all_nodes:
+
+ add_to_node_table, new_compute_list = self.nodes[node].get_compute_ops()
+ new_node_table = new_node_table.append(add_to_node_table,ignore_index=True)
+ compute_list += new_compute_list
+ else:
+ compute_list = []
+
+ new_node_table = node_table.sort_values('node')
+ new_node_table = new_node_table.reindex(np.arange(len(new_node_table)))
+
+ for node in all_nodes:
+ unit_table = new_node_table[node_table['node']==node]
+ if (new_node_table['node']==node).any():
+ _, new_compute_list = self.nodes[node].get_compute_ops(unit_table=unit_table)
+
+ compute_list += new_compute_list
+
+
+ # create datasets in hdf5 file from node_table, with data indexed by table index
+ for i, row in new_node_table.iterrows():
+
+ output_shape = tuple([T] + [ int(x) for x in compute_list[i].get_shape()[1:]])
+ output_h5.create_dataset(str(i), output_shape, dtype=np.float32)
+
+
+
+ batch_size = self.config_data['batch_size']
+ num_batches = T/batch_size
+ if T%self.config_data['batch_size']!=0:
+ num_batches += 1
+
+ for i in range(num_batches):
+ sys.stdout.write( '\r{0:.02f}'.format(float(i)*100/num_batches)+'% done')
+ sys.stdout.flush()
+ output_list = self.nodes[all_nodes[0]].tf_sess.run(compute_list,feed_dict={self.nodes[all_nodes[0]].input: stim_template[i*batch_size:(i+1)*batch_size]})
+
+ for io, output in enumerate(output_list):
+ # dataset_string = node_table['node'].loc[io] + "/" + str(int(node_table['band'].loc[io]))
+ # output_h5[dataset_string][i*batch_size:(i+1)*batch_size] = output
+
+ output_h5[str(io)][i*batch_size:(i+1)*batch_size] = output
+ sys.stdout.write( '\r{0:.02f}'.format(float(100))+'% done')
+ sys.stdout.flush()
+
+ output_h5['stim_template'] = stimulus.stim_template
+ output_h5.close()
+ new_node_table.to_hdf(output_file,'node_table')
+ if hasattr(stimulus,'label_dataframe') and stimulus.label_dataframe is not None:
+ stimulus.label_dataframe.to_hdf(output_file,'labels')
+ stimulus.stim_table.to_hdf(output_file,'stim_table')
+
+
+ def get_exemplar_node_table(self):
+
+ node_table = pd.DataFrame(columns=['node','band','y','x'])
+ for node in self.nodes:
+ node_output = self.nodes[node].output
+ if hasattr(self.nodes[node],'band_shape'):
+ for band in node_output:
+ y,x = [int(x) for x in node_output[band].get_shape()[1:3]]
+ y /= 2
+ x /= 2
+ new_row = pd.DataFrame([[self.nodes[node].node_name, band, y, x]], columns=['node','band','y','x'])
+ node_table = node_table.append(new_row, ignore_index=True)
+ else:
+ new_row = pd.DataFrame([[self.nodes[node].node_name]], columns=['node'])
+ node_table = node_table.append(new_row, ignore_index=True)
+
+ return node_table
+
+
+ def generate_output(self):
+ try:
+ im_lib = Image_Library(self.image_dir,new_size=self.input_shape)
+ except OSError as e:
+ print('''A repository of images (such as a collection from ImageNet - http://www.image-net.org) is required for input.
+ An example would be too large to include in the isee_engine itself.
+ Set the path for this image repository in hmax/config_hmax.json''')
+ raise e
+
+ image_data = im_lib(1)
+
+ fig, ax = plt.subplots(1)
+ ax.imshow(image_data[0,:,:,0],cmap='gray')
+
+ fig.savefig(os.path.join(self.output_dir,'input_image'))
+ plt.close(fig)
+
+ nodes = self.nodes
+
+ for node_to_plot in nodes:
+ print("Generating output for node ", node_to_plot)
+ node_output_dir = os.path.join(self.output_dir,node_to_plot)
+
+ if not os.path.exists(node_output_dir):
+ os.makedirs(node_output_dir)
+
+ if type(self.nodes[node_to_plot])==ViewTunedLayer:
+ print("ViewTunedLayer")
+ self.nodes[node_to_plot].compute_output(image_data)
+ continue
+
+ if type(self.nodes[node_to_plot])==Readout_Layer:
+ print("Readout_Layer")
+ self.nodes[node_to_plot].compute_output(image_data)
+ continue
+
+ num_bands = len(nodes[node_to_plot].output)
+
+ if type(self.nodes[node_to_plot])==S1_Layer or node_to_plot=='c1':
+ #print "Yes, this is an S1_Layer"
+ num_filters_to_plot = 4
+ fig, ax = plt.subplots(num_filters_to_plot,num_bands,figsize=(20,8))
+ #fig2,ax2 = plt.subplots(num_filters_to_plot,num_bands,figsize=(20,8))
+ else:
+ num_filters_to_plot = 8
+ fig, ax = plt.subplots(num_filters_to_plot,num_bands,figsize=(20,8))
+
+ for band in range(num_bands):
+ result = nodes[node_to_plot].compute_output(image_data,band)
+ #print result[band].shape
+ n, y,x,K = result.shape
+
+ for k in range(num_filters_to_plot):
+
+ if num_bands!=1:
+ ax[k,band].imshow(result[0,:,:,k],interpolation='nearest',cmap='gray')
+ ax[k,band].axis('off')
+ else:
+ ax[k].imshow(result[0,:,:,k],interpolation='nearest',cmap='gray')
+ ax[k].axis('off')
+
+ # if type(self.nodes[node_to_plot])==S1_Layer:
+ # for k in range(num_filters_to_plot):
+
+ # ki = 4+k
+ # ax2[k,band].imshow(result[0,:,:,ki],interpolation='nearest',cmap='gray')
+ # ax2[k,band].axis('off')
+
+ if type(self.nodes[node_to_plot])==S1_Layer:
+ fig.savefig(os.path.join(node_output_dir,'output_phase0.pdf'))
+ #fig2.savefig(os.path.join(node_output_dir,'output_phase1.pdf'))
+ #plt.close(fig2)
+ else:
+ fig.savefig(os.path.join(node_output_dir,'output.pdf'))
+
+ plt.close(fig)
diff --git a/bmtk-vb/bmtk/simulator/pointnet/__init__.py b/bmtk-vb/bmtk/simulator/pointnet/__init__.py
new file mode 100644
index 0000000..2ad957d
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/__init__.py
@@ -0,0 +1,26 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from . import default_setters
+from .config import Config
+from .pointnetwork import PointNetwork
+from .pointsimulator import PointSimulator
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/pointnet/config.py b/bmtk-vb/bmtk/simulator/pointnet/config.py
new file mode 100644
index 0000000..a6644d5
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/config.py
@@ -0,0 +1,48 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+
+from bmtk.simulator.core.config import ConfigDict
+from bmtk.simulator.pointnet.io_tools import io
+
+
+# TODO: Implement pointnet validator and create json schema for pointnet
+def from_json(config_file, validate=False):
+ conf_dict = ConfigDict.from_json(config_file)
+ conf_dict.io = io
+ return conf_dict
+
+def from_dict(config_file, validate=False):
+ conf_dict = ConfigDict.from_dict(config_file)
+ conf_dict.io = io
+ return conf_dict
+
+class Config(ConfigDict):
+ def __init__(self, dict_obj):
+ super(Config, self).__init__(dict_obj)
+ self._io = io
+
+ @property
+ def io(self):
+ return io
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/pointnet/default_setters/__init__.py b/bmtk-vb/bmtk/simulator/pointnet/default_setters/__init__.py
new file mode 100644
index 0000000..b07cc2d
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/default_setters/__init__.py
@@ -0,0 +1,2 @@
+from . import synaptic_weights
+from . import synapse_models
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/pointnet/default_setters/synapse_models.py b/bmtk-vb/bmtk/simulator/pointnet/default_setters/synapse_models.py
new file mode 100644
index 0000000..8e94328
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/default_setters/synapse_models.py
@@ -0,0 +1,16 @@
+from bmtk.simulator.pointnet.pyfunction_cache import add_synapse_model
+
+
+def static_synapse(edge):
+ model_params = {
+ 'model': 'static_synapse',
+ 'delay': edge.delay,
+ 'weight': edge.syn_weight(None, None)
+ }
+
+ model_params.update(edge.dynamics_params)
+ return model_params
+
+
+add_synapse_model(static_synapse, 'default', overwrite=False)
+add_synapse_model(static_synapse, overwrite=False)
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/pointnet/default_setters/synaptic_weights.py b/bmtk-vb/bmtk/simulator/pointnet/default_setters/synaptic_weights.py
new file mode 100644
index 0000000..4c66ae1
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/default_setters/synaptic_weights.py
@@ -0,0 +1,8 @@
+from bmtk.simulator.pointnet.pyfunction_cache import add_weight_function
+
+
+def default_weight_fnc(edge_props, source_node, target_node):
+ return edge_props['syn_weight']*edge_props.nsyns
+
+
+add_weight_function(default_weight_fnc, 'default_weight_fnc', overwrite=False)
diff --git a/bmtk-vb/bmtk/simulator/pointnet/io_tools.py b/bmtk-vb/bmtk/simulator/pointnet/io_tools.py
new file mode 100644
index 0000000..b5ea9ea
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/io_tools.py
@@ -0,0 +1,122 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+"""
+Functions for logging, writing and reading from file.
+
+"""
+import nest
+
+from bmtk.simulator.core.io_tools import IOUtils
+
+# Want users to be able to use NEST whether or not it is compiled in parallel mode or not, which means checking if
+# the method nest.SyncPRocesses (aka MPI Barrier) exists. If it doesn't try getting barrier from mpi4py
+rank = nest.Rank()
+n_nodes = nest.NumProcesses()
+try:
+ barrier = nest.SyncProcesses
+except AttributeError as exc:
+ try:
+ from mpi4py import MPI
+ barrier = MPI.COMM_WORLD.Barrier
+ except:
+ # Barrier is just an empty function, no problem if running on one core.
+ barrier = lambda: None
+
+
+class NestIOUtils(IOUtils):
+ def __init__(self):
+ super(NestIOUtils, self).__init__()
+ self.mpi_rank = rank
+ self.mpi_size = n_nodes
+
+ def barrier(self):
+ barrier()
+
+ def quiet_simulator(self):
+ nest.set_verbosity('M_QUIET')
+
+ def setup_output_dir(self, config_dir, log_file, overwrite=True):
+ super(NestIOUtils, self).setup_output_dir(config_dir, log_file, overwrite=True)
+ if n_nodes > 1 and rank == 0:
+ io.log_info('Running NEST with MPI ({} cores)'.format(n_nodes))
+
+
+io = NestIOUtils()
+
+
+'''
+log_format = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
+pointnet_logger = logging.getLogger()
+pointnet_logger.setLevel(logging.DEBUG)
+
+console_handler = logging.StreamHandler(sys.stdout)
+console_handler.setFormatter(log_format)
+pointnet_logger.addHandler(console_handler)
+
+
+def collect_gdf_files(gdf_dir, output_file, nest_id_map, overwrite=False):
+
+ if n_nodes > 0:
+ # Wait until all nodes are finished
+ barrier()
+
+ if rank != 0:
+ return
+
+ log("Saving spikes to file...")
+ spikes_out = output_file
+ if os.path.exists(spikes_out) and not overwrite:
+ return
+
+ gdf_files_globs = '{}/*.gdf'.format(gdf_dir)
+ gdf_files = glob.glob(gdf_files_globs)
+ with open(spikes_out, 'w') as spikes_file:
+ csv_writer = csv.writer(spikes_file, delimiter=' ')
+ for gdffile in gdf_files:
+ spikes_df = pd.read_csv(gdffile, names=['gid', 'time', 'nan'], sep='\t')
+ for _, row in spikes_df.iterrows():
+ csv_writer.writerow([row['time'], nest_id_map[int(row['gid'])]])
+ os.remove(gdffile)
+ log("done.")
+
+
+def setup_output_dir(config):
+ if rank == 0:
+ try:
+ output_dir = config['output']['output_dir']
+ if os.path.exists(output_dir):
+ shutil.rmtree(output_dir)
+ os.makedirs(output_dir)
+
+ if 'log_file' in config['output']:
+ file_logger = logging.FileHandler(config['output']['log_file'])
+ file_logger.setFormatter(log_format)
+ pointnet_logger.addHandler(file_logger)
+ log('Created a log file')
+
+ except Exception as exc:
+ print(exc)
+
+ barrier()
+'''
+
diff --git a/bmtk-vb/bmtk/simulator/pointnet/modules/__init__.py b/bmtk-vb/bmtk/simulator/pointnet/modules/__init__.py
new file mode 100644
index 0000000..962ea78
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/modules/__init__.py
@@ -0,0 +1,2 @@
+from .record_spikes import SpikesMod
+from .multimeter_reporter import MultimeterMod
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/pointnet/modules/multimeter_reporter.py b/bmtk-vb/bmtk/simulator/pointnet/modules/multimeter_reporter.py
new file mode 100644
index 0000000..12d86ac
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/modules/multimeter_reporter.py
@@ -0,0 +1,110 @@
+import os
+import glob
+import pandas as pd
+from bmtk.utils.io.cell_vars import CellVarRecorder
+from bmtk.simulator.pointnet.io_tools import io
+
+import nest
+
+
+try:
+ MPI_RANK = nest.Rank()
+ N_HOSTS = nest.NumProcesses()
+
+except Exception as e:
+ MPI_RANK = 0
+ N_HOSTS = 1
+
+
+class MultimeterMod(object):
+ def __init__(self, tmp_dir, file_name, variable_name, cells, tstart=None, tstop=None, interval=None, to_h5=True,
+ delete_dat=True, **opt_params):
+ """For recording neuron properties using a NEST multimeter object
+
+ :param tmp_dir: ouput directory
+ :param file_name: Name of (SONATA hdf5) file that will be saved to
+ :param variable_name: A list of the variable(s) being recorded. Must be valid according to the cells
+ :param cells: A node-set or list of gids to record from
+ :param tstart: Start time of the recording (if None will default to sim.tstart)
+ :param tstop: Stop time of recording (if None will default to sim.tstop)
+ :param interval: Recording time step (if None will default to sim.dt)
+ :param to_h5: True to save to sonata .h5 format (default: True)
+ :param delete_dat: True to delete the .dat files created by NEST (default True)
+ :param opt_params:
+ """
+
+ self._output_dir = tmp_dir
+ self._file_name = file_name if os.path.isabs(file_name) else os.path.join(self._output_dir, file_name)
+ self._variable_name = variable_name
+ self._node_set = cells
+ self._tstart = tstart
+ self._tstop = tstop
+ self._interval = interval
+ self._to_h5 = to_h5
+ self._delete_dat = delete_dat
+
+ self._gids = None
+ self._nest_ids = None
+ self._multimeter = None
+
+ self._min_delay = 1.0 # Required for calculating steps recorded
+
+ self.__output_label = os.path.join(self._output_dir, '__bmtk_nest_{}'.format(os.path.basename(self._file_name)))
+ self._var_recorder = CellVarRecorder(self._file_name, self._output_dir, self._variable_name, buffer_data=False)
+
+ def initialize(self, sim):
+ self._gids = list(sim.net.get_node_set(self._node_set).gids())
+ self._nest_ids = [sim.net._gid2nestid[gid] for gid in self._gids]
+
+ self._tstart = self._tstart or sim.tstart
+ self._tstop = self._tstop or sim.tstop
+ self._interval = self._interval or sim.dt
+ self._multimeter = nest.Create('multimeter',
+ params={'interval': self._interval, 'start': self._tstart, 'stop': self._tstop,
+ 'to_file': True, 'to_memory': False,
+ 'withtime': True,
+ 'record_from': self._variable_name,
+ 'label': self.__output_label})
+
+ nest.Connect(self._multimeter, self._nest_ids)
+
+ def finalize(self, sim):
+ io.barrier() # Makes sure all nodes finish, but not sure if actually required by nest
+
+ # min_delay needs to be fetched after simulation otherwise the value will be off. There also seems to be some
+ # MPI barrier inside GetKernelStatus
+ self._min_delay = nest.GetKernelStatus('min_delay')
+ # print self._min_delay
+ if self._to_h5 and MPI_RANK == 0:
+ for gid in self._gids:
+ self._var_recorder.add_cell(gid, sec_list=[0], seg_list=[0.0])
+
+ # Initialize hdf5 file including preallocated data block of recorded variables
+ # Unfortantely with NEST the final time-step recorded can't be calculated in advanced, and even with the
+ # same min/max_delay can be different. We need to read the output-file to get n_steps
+ def get_var_recorder(node_recording_df):
+ if not self._var_recorder.is_initialized:
+ self._var_recorder.tstart = node_recording_df['time'].min()
+ self._var_recorder.tstop = node_recording_df['time'].max()
+ self._var_recorder.dt = self._interval
+ self._var_recorder.initialize(len(node_recording_df))
+
+ return self._var_recorder
+
+ gid_map = sim.net._nestid2gid
+ for nest_file in glob.glob('{}*'.format(self.__output_label)):
+ report_df = pd.read_csv(nest_file, index_col=False, names=['nest_id', 'time']+self._variable_name,
+ sep='\t')
+ for grp_id, grp_df in report_df.groupby(by='nest_id'):
+ gid = gid_map[grp_id]
+ vr = get_var_recorder(grp_df)
+ for var_name in self._variable_name:
+ vr.record_cell_block(gid, var_name, grp_df[var_name])
+
+ if self._delete_dat:
+ # remove csv file created by nest
+ os.remove(nest_file)
+
+ self._var_recorder.close()
+
+ io.barrier()
diff --git a/bmtk-vb/bmtk/simulator/pointnet/modules/record_spikes.py b/bmtk-vb/bmtk/simulator/pointnet/modules/record_spikes.py
new file mode 100644
index 0000000..9791fdc
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/modules/record_spikes.py
@@ -0,0 +1,90 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import glob
+from bmtk.utils.io.spike_trains import SpikeTrainWriter
+from bmtk.simulator.pointnet.io_tools import io
+
+import nest
+
+
+MPI_RANK = nest.Rank()
+N_HOSTS = nest.NumProcesses()
+
+
+class SpikesMod(object):
+ """Module use for saving spikes
+
+ """
+
+ def __init__(self, tmp_dir, spikes_file_csv=None, spikes_file=None, spikes_file_nwb=None, spikes_sort_order=None):
+ def _get_path(file_name):
+ # Unless file-name is an absolute path then it should be placed in the $OUTPUT_DIR
+ if file_name is None:
+ return None
+ return file_name if os.path.isabs(file_name) else os.path.join(tmp_dir, file_name)
+
+ self._csv_fname = _get_path(spikes_file_csv)
+ self._h5_fname = _get_path(spikes_file)
+ self._nwb_fname = _get_path(spikes_file_nwb)
+
+ self._tmp_dir = tmp_dir
+ self._tmp_file_base = 'tmp_spike_times'
+ self._spike_labels = os.path.join(self._tmp_dir, self._tmp_file_base)
+
+ self._spike_writer = SpikeTrainWriter(tmp_dir=tmp_dir, mpi_rank=MPI_RANK, mpi_size=N_HOSTS)
+ self._spike_writer.delimiter = '\t'
+ self._spike_writer.gid_col = 0
+ self._spike_writer.time_col = 1
+ self._sort_order = spikes_sort_order
+
+ self._spike_detector = None
+
+ def initialize(self, sim):
+ self._spike_detector = nest.Create("spike_detector", 1, {'label': self._spike_labels, 'withtime': True,
+ 'withgid': True, 'to_file': True})
+
+ for pop_name, pop in sim._graph._nestid2nodeid_map.items():
+ nest.Connect(list(pop.keys()), self._spike_detector)
+
+ def finalize(self, sim):
+ if MPI_RANK == 0:
+ for gdf_file in glob.glob(self._spike_labels + '*.gdf'):
+ self._spike_writer.add_spikes_file(gdf_file)
+ io.barrier()
+
+ gid_map = sim._graph._nestid2gid
+
+ if self._csv_fname is not None:
+ self._spike_writer.to_csv(self._csv_fname, sort_order=self._sort_order, gid_map=gid_map)
+ io.barrier()
+
+ if self._h5_fname is not None:
+ self._spike_writer.to_hdf5(self._h5_fname, sort_order=self._sort_order, gid_map=gid_map)
+ io.barrier()
+
+ if self._nwb_fname is not None:
+ self._spike_writer.to_nwb(self._nwb_fname, sort_order=self._sort_order, gid_map=gid_map)
+ io.barrier()
+
+ self._spike_writer.close()
diff --git a/bmtk-vb/bmtk/simulator/pointnet/pointnetwork.py b/bmtk-vb/bmtk/simulator/pointnet/pointnetwork.py
new file mode 100644
index 0000000..0cc781f
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/pointnetwork.py
@@ -0,0 +1,176 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+import functools
+import nest
+
+from bmtk.simulator.core.simulator_network import SimNetwork
+from bmtk.simulator.pointnet.sonata_adaptors import PointNodeAdaptor, PointEdgeAdaptor
+from bmtk.simulator.pointnet import pyfunction_cache
+from bmtk.simulator.pointnet.io_tools import io
+
+
+class PointNetwork(SimNetwork):
+ def __init__(self, **properties):
+ super(PointNetwork, self).__init__(**properties)
+ self._io = io
+
+ self.__weight_functions = {}
+ self._params_cache = {}
+
+ self._virtual_ids_map = {}
+
+ self._batch_nodes = True
+
+ self._nest_id_map = {}
+ self._nestid2nodeid_map = {}
+
+ self._nestid2gid = {}
+
+ self._nodes_table = {}
+ self._gid2nestid = {}
+
+ @property
+ def py_function_caches(self):
+ return pyfunction_cache
+
+ def __get_params(self, node_params):
+ if node_params.with_dynamics_params:
+ # TODO: use property, not name
+ return node_params['dynamics_params']
+
+ params_file = node_params[self._params_column]
+ # params_file = self._MT.params_column(node_params) #node_params['dynamics_params']
+ if params_file in self._params_cache:
+ return self._params_cache[params_file]
+ else:
+ params_dir = self.get_component('models_dir')
+ params_path = os.path.join(params_dir, params_file)
+ params_dict = json.load(open(params_path, 'r'))
+ self._params_cache[params_file] = params_dict
+ return params_dict
+
+ def _register_adaptors(self):
+ super(PointNetwork, self)._register_adaptors()
+ self._node_adaptors['sonata'] = PointNodeAdaptor
+ self._edge_adaptors['sonata'] = PointEdgeAdaptor
+
+ # TODO: reimplement with py_modules like in bionet
+ def add_weight_function(self, function, name=None):
+ fnc_name = name if name is not None else function.__name__
+ self.__weight_functions[fnc_name] = functools.partial(function)
+
+ def set_default_weight_function(self, function):
+ self.add_weight_function(function, 'default_weight_fnc', overwrite=True)
+
+ def get_weight_function(self, name):
+ return self.__weight_functions[name]
+
+ def build_nodes(self):
+ for node_pop in self.node_populations:
+ nid2nest_map = {}
+ nest2nid_map = {}
+ if node_pop.internal_nodes_only:
+ for node in node_pop.get_nodes():
+ node.build()
+ for nid, gid, nest_id in zip(node.node_ids, node.gids, node.nest_ids):
+ self._nestid2gid[nest_id] = gid
+ self._gid2nestid[gid] = nest_id
+ nid2nest_map[nid] = nest_id
+ nest2nid_map[nest_id] = nid
+
+ elif node_pop.mixed_nodes:
+ for node in node_pop.get_nodes():
+ if node.model_type != 'virtual':
+ node.build()
+ for nid, gid, nest_id in zip(node.node_ids, node.gids, node.nest_ids):
+ self._nestid2gid[nest_id] = gid
+ self._gid2nestid[gid] = nest_id
+ nid2nest_map[nid] = nest_id
+ nest2nid_map[nest_id] = nid
+
+ self._nest_id_map[node_pop.name] = nid2nest_map
+ self._nestid2nodeid_map[node_pop.name] = nest2nid_map
+
+ def build_recurrent_edges(self):
+ recurrent_edge_pops = [ep for ep in self._edge_populations if not ep.virtual_connections]
+ if not recurrent_edge_pops:
+ return
+
+ for edge_pop in recurrent_edge_pops:
+ src_nest_ids = self._nest_id_map[edge_pop.source_nodes]
+ trg_nest_ids = self._nest_id_map[edge_pop.target_nodes]
+ for edge in edge_pop.get_edges():
+ nest_srcs = [src_nest_ids[nid] for nid in edge.source_node_ids]
+ nest_trgs = [trg_nest_ids[nid] for nid in edge.target_node_ids]
+ nest.Connect(nest_srcs, nest_trgs, conn_spec='one_to_one', syn_spec=edge.nest_params)
+
+ def find_edges(self, source_nodes=None, target_nodes=None):
+ # TODO: Move to parent
+ selected_edges = self._edge_populations[:]
+
+ if source_nodes is not None:
+ selected_edges = [edge_pop for edge_pop in selected_edges if edge_pop.source_nodes == source_nodes]
+
+ if target_nodes is not None:
+ selected_edges = [edge_pop for edge_pop in selected_edges if edge_pop.target_nodes == target_nodes]
+
+ return selected_edges
+
+ def add_spike_trains(self, spike_trains, node_set):
+ # Build the virtual nodes
+ src_nodes = [node_pop for node_pop in self.node_populations if node_pop.name in node_set.population_names()]
+ for node_pop in src_nodes:
+ if node_pop.name in self._virtual_ids_map:
+ continue
+
+ virt_node_map = {}
+ if node_pop.virtual_nodes_only:
+ for node in node_pop.get_nodes():
+ nest_ids = nest.Create('spike_generator', node.n_nodes, {})
+ for node_id, nest_id in zip(node.node_ids, nest_ids):
+ virt_node_map[node_id] = nest_id
+ nest.SetStatus([nest_id], {'spike_times': spike_trains.get_spikes(node_id)})
+
+ elif node_pop.mixed_nodes:
+ for node in node_pop.get_nodes():
+ if node.model_type != 'virtual':
+ continue
+
+ nest_ids = nest.Create('spike_generator', node.n_nodes, {})
+ for node_id, nest_id in zip(node.node_ids, nest_ids):
+ virt_node_map[node_id] = nest_id
+ nest.SetStatus([nest_id], {'spike_times': spike_trains.get_spikes(node_id)})
+
+ self._virtual_ids_map[node_pop.name] = virt_node_map
+
+ # Create virtual synaptic connections
+ for source_reader in src_nodes:
+ for edge_pop in self.find_edges(source_nodes=source_reader.name):
+ src_nest_ids = self._virtual_ids_map[edge_pop.source_nodes]
+ trg_nest_ids = self._nest_id_map[edge_pop.target_nodes]
+ for edge in edge_pop.get_edges():
+ nest_srcs = [src_nest_ids[nid] for nid in edge.source_node_ids]
+ nest_trgs = [trg_nest_ids[nid] for nid in edge.target_node_ids]
+ nest.Connect(nest_srcs, nest_trgs, conn_spec='one_to_one', syn_spec=edge.nest_params)
diff --git a/bmtk-vb/bmtk/simulator/pointnet/pointsimulator.py b/bmtk-vb/bmtk/simulator/pointnet/pointsimulator.py
new file mode 100644
index 0000000..a434da6
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/pointsimulator.py
@@ -0,0 +1,266 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import glob
+import nest
+from six import string_types
+from six import moves
+
+from bmtk.simulator.core.simulator import Simulator
+from bmtk.simulator.pointnet.config import Config
+#import bmtk.simulator.pointnet.config as cfg
+from bmtk.simulator.pointnet.io_tools import io
+import bmtk.simulator.utils.simulation_reports as reports
+import bmtk.simulator.utils.simulation_inputs as inputs
+from bmtk.utils.io import spike_trains
+from . import modules as mods
+from bmtk.simulator.core.node_sets import NodeSet
+
+
+class PointSimulator(Simulator):
+ def __init__(self, graph, dt=0.001, overwrite=True, print_time=False):
+ self._tstop = 0.0 # simulation time
+ self._dt = dt # time step
+ self._output_dir = './output/' # directory where log and temporary output will be stored
+ self._overwrite = overwrite
+ self._block_run = False
+ self._block_size = -1
+
+ self._cells_built = False
+ self._internal_connections_built = False
+
+ self._graph = graph
+ self._external_cells = {} # dict-of-dict of external pointnet cells with keys [network_name][cell_id]
+ self._internal_cells = {} # dictionary of internal pointnet cells with cell_id as key
+ self._nest_id_map = {} # a map between NEST IDs and Node-IDs
+
+ self._spikedetector = None
+ self._spikes_file = None # File where all output spikes will be collected and saved
+ self._tmp_spikes_file = None # temporary gdf files of spike-trains
+ self._spike_trains_ds = {} # used to temporary store NWB datasets containing spike trains
+
+ self._spike_detector = None
+
+ self._mods = []
+
+ self._inputs = {} # Used to hold references to nest input objects (current_generators, etc)
+
+ # Reset the NEST kernel for a new simualtion
+ # TODO: move this into it's own function and make sure it is called before network is built
+ nest.ResetKernel()
+ nest.SetKernelStatus({"resolution": self._dt, "overwrite_files": self._overwrite, "print_time": print_time})
+
+ @property
+ def tstart(self):
+ return 0.0
+
+ @property
+ def dt(self):
+ return self._dt
+
+ @property
+ def tstop(self):
+ return self._tstop
+
+ @tstop.setter
+ def tstop(self, val):
+ self._tstop = val
+
+ @property
+ def n_steps(self):
+ return long((self.tstop-self.tstart)/self.dt)
+
+ @property
+ def net(self):
+ return self._graph
+
+ @property
+ def gid_map(self):
+ return self._graph._nestid2gid
+
+ def _get_block_trial(self, duration):
+ """
+ Compute necessary number of block trials, the length of block simulation and the simulation length of the last
+ block run, if necessary.
+ """
+ if self._block_run:
+ data_res = self._block_size * self._dt
+ fn = duration / data_res
+ n = int(fn)
+ res = fn - n
+ else:
+ n = -1
+ res = -1
+ data_res = -1
+ return n, res, data_res
+
+ '''
+ def set_spikes_recordings(self):
+ # TODO: Pass in output-dir and file name to save to
+ # TODO: Allow for sorting - overwrite bionet module
+ self._spike_detector = nest.Create("spike_detector", 1, {'label': os.path.join(self.output_dir, 'tmp_spike_times'),
+ 'withtime': True, 'withgid': True, 'to_file': True})
+ # print self._spike_detector
+
+ for pop_name, pop in self._graph._nestid2nodeid_map.items():
+ # print pop.keys()
+
+ nest.Connect(pop.keys(), self._spike_detector)
+ # exit()
+ '''
+
+ def add_step_currents(self, amp_times, amp_values, node_set, input_name):
+ scg = nest.Create("step_current_generator",
+ params={'amplitude_times': amp_times, 'amplitude_values': amp_values})
+
+ if not isinstance(node_set, NodeSet):
+ node_set = self.net.get_node_set(node_set)
+
+ # Convert node set into list of gids and then look-up the nest-ids
+ nest_ids = [self.net._gid2nestid[gid] for gid in node_set.gids()]
+
+ # Attach current clamp to nodes
+ nest.Connect(scg, nest_ids, syn_spec={'delay': self.dt})
+
+ self._inputs[input_name] = nest_ids
+
+ def run(self, tstop=None):
+ if tstop is None:
+ tstop = self._tstop
+
+ for mod in self._mods:
+ mod.initialize(self)
+
+ io.barrier()
+
+ io.log_info('Starting Simulation')
+ n, res, data_res = self._get_block_trial(tstop)
+ if n > 0:
+ for r in moves.range(n):
+ nest.Simulate(data_res)
+ if res > 0:
+ nest.Simulate(res * self.dt)
+ if n < 0:
+ nest.Simulate(tstop)
+
+ io.barrier()
+ io.log_info('Simulation finished, finalizing results.')
+ for mod in self._mods:
+ mod.finalize(self)
+ io.barrier()
+ io.log_info('Done.')
+
+ def add_mod(self, mod):
+ self._mods.append(mod)
+
+ @classmethod
+ def from_config(cls, configure, graph):
+ # load the json file or object
+ if isinstance(configure, string_types):
+ config = Config.from_json(configure, validate=True)
+ elif isinstance(configure, dict):
+ config = configure
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(configure, type(configure)))
+
+ if 'run' not in config:
+ raise Exception('Json file is missing "run" entry. Unable to build Bionetwork.')
+ run_dict = config['run']
+
+ # Get network parameters
+ # step time (dt) is set in the kernel and should be passed
+ overwrite = run_dict['overwrite_output_dir'] if 'overwrite_output_dir' in run_dict else True
+ print_time = run_dict['print_time'] if 'print_time' in run_dict else False
+ dt = run_dict['dt'] # TODO: make sure dt exists
+ network = cls(graph, dt=dt, overwrite=overwrite)
+
+ if 'output_dir' in config['output']:
+ network.output_dir = config['output']['output_dir']
+
+ if 'block_run' in run_dict and run_dict['block_run']:
+ if 'block_size' not in run_dict:
+ raise Exception('"block_run" is set to True but "block_size" not found.')
+ network._block_size = run_dict['block_size']
+
+ if 'duration' in run_dict:
+ network.tstop = run_dict['duration']
+ elif 'tstop' in run_dict:
+ network.tstop = run_dict['tstop']
+
+ # Create the output-directory, or delete existing files if it already exists
+ graph.io.log_info('Setting up output directory')
+ if not os.path.exists(config['output']['output_dir']):
+ os.mkdir(config['output']['output_dir'])
+ elif overwrite:
+ for gfile in glob.glob(os.path.join(config['output']['output_dir'], '*.gdf')):
+ os.remove(gfile)
+
+ graph.io.log_info('Building cells.')
+ graph.build_nodes()
+
+ graph.io.log_info('Building recurrent connections')
+ graph.build_recurrent_edges()
+
+ for sim_input in inputs.from_config(config):
+ node_set = graph.get_node_set(sim_input.node_set)
+ if sim_input.input_type == 'spikes':
+ spikes = spike_trains.SpikesInput.load(name=sim_input.name, module=sim_input.module,
+ input_type=sim_input.input_type, params=sim_input.params)
+ io.log_info('Build virtual cell stimulations for {}'.format(sim_input.name))
+ graph.add_spike_trains(spikes, node_set)
+
+ elif sim_input.input_type == 'current_clamp':
+ # TODO: Need to make this more robust
+ amp_times = sim_input.params.get('amplitude_times', [])
+ amp_values = sim_input.params.get('amplitude_values', [])
+
+ if 'delay' in sim_input.params:
+ amp_times.append(sim_input.params['delay'])
+ amp_values.append(sim_input.params['amp'])
+
+ if 'duration' in sim_input.params:
+ amp_times.append(sim_input.params['delay'] + sim_input.params['duration'])
+ amp_values.append(0.0)
+
+ network.add_step_currents(amp_times, amp_values, node_set, sim_input.name)
+
+ else:
+ graph.io.log_warning('Unknown input type {}'.format(sim_input.input_type))
+
+ sim_reports = reports.from_config(config)
+ for report in sim_reports:
+ if report.module == 'spikes_report':
+ mod = mods.SpikesMod(**report.params)
+
+ elif isinstance(report, reports.MembraneReport):
+ # For convience and for compliance with SONATA format. "membrane_report" and "multimeter_report is the
+ # same in pointnet.
+ mod = mods.MultimeterMod(**report.params)
+
+ else:
+ graph.io.log_exception('Unknown report type {}'.format(report.module))
+
+ network.add_mod(mod)
+
+ io.log_info('Network created.')
+ return network
diff --git a/bmtk-vb/bmtk/simulator/pointnet/property_map.py b/bmtk-vb/bmtk/simulator/pointnet/property_map.py
new file mode 100644
index 0000000..dd1ecc4
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/property_map.py
@@ -0,0 +1,213 @@
+import types
+import numpy as np
+
+import nest
+
+from bmtk.simulator.pointnet.pyfunction_cache import py_modules
+from bmtk.simulator.pointnet.io_tools import io
+
+class NodePropertyMap(object):
+ def __init__(self, graph):
+ self._graph = graph
+ # TODO: Move template_cache to parent graph so it can be shared across diff populations.
+ self._template_cache = {}
+ self.node_types_table = None
+
+ self.batch = True
+
+
+ def _parse_model_template(self, model_template):
+ if model_template in self._template_cache:
+ return self._template_cache[model_template]
+ else:
+ template_parts = model_template.split(':')
+ assert(len(template_parts) == 2)
+ directive, template = template_parts[0], template_parts[1]
+ self._template_cache[model_template] = (directive, template)
+ return directive, template
+
+ def load_cell(self, node):
+ model_type = self._parse_model_template(node['model_template'])[1]
+ dynamics_params = self.dynamics_params(node)
+ fnc_name = node['model_processing']
+ if fnc_name is None:
+ return nest.Create(model_type, 1, dynamics_params)
+ else:
+ cell_fnc = py_modules.cell_processor(fnc_name)
+ return cell_fnc(model_type, node, dynamics_params)
+
+ @classmethod
+ def build_map(cls, node_group, graph):
+ prop_map = cls(graph)
+
+ node_types_table = node_group.parent.node_types_table
+ prop_map.node_types_table = node_types_table
+
+ if 'model_processing' in node_group.columns:
+ prop_map.batch = False
+ elif 'model_processing' in node_group.all_columns:
+ model_fncs = [node_types_table[ntid]['model_processing'] for ntid in np.unique(node_group.node_type_ids)
+ if node_types_table[ntid]['model_processing'] is not None]
+
+ if model_fncs:
+ prop_map.batch = False
+
+ if node_group.has_dynamics_params:
+ prop_map.batch = False
+ prop_map.dynamics_params = types.MethodType(group_dynamics_params, prop_map)
+ else: # 'dynamics_params' in node_group.all_columns:
+ prop_map.dynamics_params = types.MethodType(types_dynamics_params, prop_map)
+
+ if prop_map.batch:
+ prop_map.model_type = types.MethodType(model_type_batched, prop_map)
+ prop_map.model_params = types.MethodType(model_params_batched, prop_map)
+ else:
+ prop_map.model_type = types.MethodType(model_type, prop_map)
+ prop_map.model_params = types.MethodType(model_params, prop_map)
+
+ if node_group.has_gids:
+ prop_map.gid = types.MethodType(gid, prop_map)
+ else:
+ prop_map.gid = types.MethodType(node_id, prop_map)
+
+ return prop_map
+
+
+def gid(self, node):
+ return node['gid']
+
+
+def node_id(self, node):
+ return node.node_id
+
+
+def model_type(self, node):
+ return self._parse_model_template(node['model_template'])
+
+
+def model_type_batched(self, node_type_id):
+ return self._parse_model_template(self.node_types_table[node_type_id]['model_template'])
+
+
+def model_params(self, node):
+ return {}
+
+
+def model_params_batched(self, node_type_id):
+ return self.node_types_table[node_type_id]['dynamics_params']
+
+
+def types_dynamics_params(self, node):
+ return node['dynamics_params']
+
+
+def group_dynamics_params(self, node):
+ return node.dynamics_params
+
+
+class EdgePropertyMap(object):
+ def __init__(self, graph, source_population, target_population):
+ self._graph = graph
+ self._source_population = source_population
+ self._target_population = target_population
+
+ self.batch = True
+ self.synpatic_models = []
+
+
+ def synaptic_model(self, edge):
+ return edge['model_template']
+
+
+ def synpatic_params(self, edge):
+ params_dict = {'weight': self.syn_weight(edge), 'delay': edge['delay']}
+ params_dict.update(edge['dynamics_params'])
+ return params_dict
+
+ @classmethod
+ def build_map(cls, edge_group, biograph):
+ prop_map = cls(biograph, edge_group.parent.source_population, edge_group.parent.source_population)
+ if 'model_template' in edge_group.columns:
+ prop_map.batch = False
+ elif 'model_template' in edge_group.all_columns:
+ edge_types_table = edge_group.parent.edge_types_table
+ syn_models = set(edge_types_table[etid]['model_template']
+ for etid in np.unique(edge_types_table.edge_type_ids))
+ prop_map.synpatic_models = list(syn_models)
+ else:
+ prop_map.synpatic_models = ['static_synapse']
+ #s = [edge_types_table[ntid]['model_template'] for ntid in np.unique(edge_types_table.node_type_ids)
+ # if edge_types_table[ntid]['model_template'] is not None]
+
+
+ # For fetching/calculating synaptic weights
+ edge_types_weight_fncs = set()
+ edge_types_table = edge_group.parent.edge_types_table
+ for etid in edge_types_table.edge_type_ids:
+ weight_fnc = edge_types_table[etid].get('weight_function', None)
+ if weight_fnc is not None:
+ edge_types_weight_fncs.add(weight_fnc)
+
+ if 'weight_function' in edge_group.group_columns or edge_types_weight_fncs:
+ # Customized function for user to calculate the synaptic weight
+ prop_map.syn_weight = types.MethodType(weight_function, prop_map)
+
+ elif 'syn_weight' in edge_group.all_columns:
+ # Just return the synaptic weight
+ prop_map.syn_weight = types.MethodType(syn_weight, prop_map)
+ else:
+ io.log_exception('Could not find syn_weight or weight_function properties. Cannot create connections.')
+
+ # For determining the synapse placement
+ if 'nsyns' in edge_group.all_columns:
+ prop_map.nsyns = types.MethodType(nsyns, prop_map)
+ else:
+ # It will get here for connections onto point neurons
+ prop_map.nsyns = types.MethodType(no_syns, prop_map)
+
+ # For target sections
+ '''
+ if 'syn_weight' not in edge_group.all_columns:
+ io.log_exception('Edges {} missing syn_weight property for connections.'.format(edge_group.parent.name))
+ else:
+ prop_map.syn_weight = types.MethodType(syn_weight, prop_map)
+
+
+
+ if 'syn_weight' in edge_group.columns:
+ prop_map.weight = types.MethodType(syn_weight, prop_map)
+ prop_map.preselected_targets = True
+ prop_map.nsyns = types.MethodType(no_nsyns, prop_map)
+ else:
+ prop_map.preselected_targets = False
+ '''
+ return prop_map
+
+
+def syn_weight(self, edge):
+ return edge['syn_weight']*self.nsyns(edge)
+
+
+def weight_function(self, edge):
+ weight_fnc_name = edge['weight_function']
+ src_node = self._graph.get_node(self._source_population, edge.source_node_id)
+ trg_node = self._graph.get_node(self._target_population, edge.target_node_id)
+
+ if weight_fnc_name is None:
+ weight_fnc = py_modules.synaptic_weight('default_weight_fnc')
+ return weight_fnc(edge, src_node, trg_node)# *self.nsyns(edge)
+
+ elif py_modules.has_synaptic_weight(weight_fnc_name):
+ weight_fnc = py_modules.synaptic_weight(weight_fnc_name)
+ return weight_fnc(edge, src_node, trg_node)
+
+ else:
+ io.log_exception('weight_function {} is not defined.'.format(weight_fnc_name))
+
+
+def nsyns(self, edge):
+ return edge['nsyns']
+
+
+def no_syns(self, edge):
+ return 1
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/pointnet/pyfunction_cache.py b/bmtk-vb/bmtk/simulator/pointnet/pyfunction_cache.py
new file mode 100644
index 0000000..9e50616
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/pyfunction_cache.py
@@ -0,0 +1,246 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import types
+from functools import wraps
+
+
+class _PyFunctions(object):
+ """Structure for holding custom user-defined python functions.
+
+ Will store a set of functions created by the user. Should not access this directly but rather user the
+ decorators or setter functions, and use the py_modules class variable to access individual functions. Is divided
+ up into
+ synaptic_weight: functions for calcuating synaptic weight.
+ cell_model: should return NEURON cell hobj.
+ synapse model: should return a NEURON synapse object.
+ """
+ def __init__(self):
+ self.__syn_weights = {}
+ self.__cell_models = {}
+ self.__synapse_models = {}
+ self.__cell_processors = {}
+
+ def clear(self):
+ self.__syn_weights.clear()
+ self.__cell_models.clear()
+ self.__synapse_models.clear()
+ self.__cell_processors.clear()
+
+ def add_synaptic_weight(self, name, func, overwrite=True):
+ """stores synpatic fuction for given name"""
+ if overwrite or name not in self.__syn_weights:
+ self.__syn_weights[name] = func
+
+ @property
+ def synaptic_weight(self):
+ """return list of the names of all available synaptic weight functions"""
+ return self.__syn_weights.keys()
+
+ def synaptic_weight(self, name):
+ """return the synpatic weight function"""
+ return self.__syn_weights[name]
+
+ def has_synaptic_weight(self, name):
+ return name in self.__syn_weights
+
+ def __cell_model_key(self, directive, model_type):
+ return (directive, model_type)
+
+ def add_cell_model(self, directive, model_type, func, overwrite=True):
+ key = self.__cell_model_key(directive, model_type)
+ if overwrite or key not in self.__cell_models:
+ self.__cell_models[key] = func
+
+ @property
+ def cell_models(self):
+ return self.__cell_models.keys()
+
+ def cell_model(self, directive, model_type):
+ return self.__cell_models[self.__cell_model_key(directive, model_type)]
+
+ def has_cell_model(self, directive, model_type):
+ return self.__cell_model_key(directive, model_type) in self.__cell_models
+
+ def add_synapse_model(self, name, func, overwrite=True):
+ if overwrite or name not in self.__synapse_models:
+ self.__synapse_models[name] = func
+
+ @property
+ def synapse_models(self):
+ return self.__synapse_models.keys()
+
+ def synapse_model(self, name):
+ return self.__synapse_models[name]
+
+
+ @property
+ def cell_processors(self):
+ return self.__cell_processors.keys()
+
+ def cell_processor(self, name):
+ return self.__cell_processors[name]
+
+ def add_cell_processor(self, name, func, overwrite=True):
+ if overwrite or name not in self.__syn_weights:
+ self.__cell_processors[name] = func
+
+ def __repr__(self):
+ rstr = '{}: {}\n'.format('cell_models', self.cell_models)
+ rstr += '{}: {}\n'.format('synapse_models', self.synapse_models)
+ rstr += '{}: {}'.format('synaptic_weights', self.synaptic_weights)
+ return rstr
+
+py_modules = _PyFunctions()
+
+
+def synaptic_weight(*wargs, **wkwargs):
+ """A decorator for registering a function as a synaptic weight function.
+ To use either
+ @synaptic_weight
+ def weight_function(): ...
+
+ or
+ @synaptic_weight(name='name_in_edge_types')
+ def weight_function(): ...
+
+ Once the decorator has been attached and imported the functions will automatically be added to py_modules.
+ """
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_synaptic_weight(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_synaptic_weight(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def cell_model(*wargs, **wkwargs):
+ """A decorator for registering NEURON cell loader functions."""
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_cell_model(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_cell_model(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def synapse_model(*wargs, **wkwargs):
+ """A decorator for registering NEURON synapse loader functions."""
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_synapse_model(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_synapse_model(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def add_weight_function(func, name=None, overwrite=True):
+ assert(callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_synaptic_weight(func_name, func, overwrite)
+
+
+def add_cell_model(func, directive, model_type, overwrite=True):
+ assert(callable(func))
+ # func_name = name if name is not None else func.__name__
+ py_modules.add_cell_model(directive, model_type, func, overwrite)
+
+
+def add_cell_processor(func, name=None, overwrite=True):
+ assert(callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_cell_processor(func_name, func, overwrite)
+
+
+def add_synapse_model(func, name=None, overwrite=True):
+ assert (callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_synapse_model(func_name, func, overwrite)
+
+
+def load_py_modules(cell_models=None, syn_models=None, syn_weights=None):
+ # py_modules.clear()
+
+ if cell_models is not None:
+ assert(isinstance(cell_models, types.ModuleType))
+ for f in [cell_models.__dict__.get(f) for f in dir(cell_models)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_cell_model(f.__name__, f)
+
+ if syn_models is not None:
+ assert(isinstance(syn_models, types.ModuleType))
+ for f in [syn_models.__dict__.get(f) for f in dir(syn_models)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_synapse_model(f.__name__, f)
+
+ if syn_weights is not None:
+ assert(isinstance(syn_weights, types.ModuleType))
+ for f in [syn_weights.__dict__.get(f) for f in dir(syn_weights)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_synaptic_weight(f.__name__, f)
diff --git a/bmtk-vb/bmtk/simulator/pointnet/sonata_adaptors.py b/bmtk-vb/bmtk/simulator/pointnet/sonata_adaptors.py
new file mode 100644
index 0000000..b528dba
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/sonata_adaptors.py
@@ -0,0 +1,295 @@
+import numpy as np
+from collections import Counter
+import numbers
+import nest
+import types
+import pandas as pd
+
+from bmtk.simulator.core.sonata_reader import NodeAdaptor, SonataBaseNode, EdgeAdaptor, SonataBaseEdge
+from bmtk.simulator.pointnet.io_tools import io
+from bmtk.simulator.pointnet.pyfunction_cache import py_modules
+
+
+def all_null(node_group, column_name):
+ """Helper function to determine if a column has any non-NULL values"""
+ types_table = node_group.parent.types_table
+ non_null_vals = [types_table[ntid][column_name] for ntid in np.unique(node_group.node_type_ids)
+ if types_table[ntid][column_name] is not None]
+ return len(non_null_vals) == 0
+
+
+class PointNodeBatched(object):
+ def __init__(self, node_ids, gids, node_types_table, node_type_id):
+ self._n_nodes = len(node_ids)
+ self._node_ids = node_ids
+ self._gids = gids
+ self._nt_table = node_types_table
+ self._nt_id = node_type_id
+ self._nest_ids = []
+
+ @property
+ def n_nodes(self):
+ return self._n_nodes
+
+ @property
+ def node_ids(self):
+ return self._node_ids
+
+ @property
+ def gids(self):
+ return self._gids
+
+ @property
+ def nest_ids(self):
+ return self._nest_ids
+
+ @property
+ def nest_model(self):
+ return self._nt_table[self._nt_id]['model_template'].split(':')[1]
+
+ @property
+ def nest_params(self):
+ return self._nt_table[self._nt_id]['dynamics_params']
+
+ @property
+ def model_type(self):
+ return self._nt_table[self._nt_id]['model_type']
+
+ def build(self):
+ self._nest_ids = nest.Create(self.nest_model, self.n_nodes, self.nest_params)
+
+
+class PointNode(SonataBaseNode):
+ def __init__(self, node, prop_adaptor):
+ super(PointNode, self).__init__(node, prop_adaptor)
+ self._nest_ids = []
+
+ @property
+ def n_nodes(self):
+ return 1
+
+ @property
+ def node_ids(self):
+ return [self._prop_adaptor.node_id(self._node)]
+
+ @property
+ def gids(self):
+ return [self._prop_adaptor.gid(self._node)]
+
+ @property
+ def nest_ids(self):
+ return self._nest_ids
+
+ @property
+ def nest_model(self):
+ return self._prop_adaptor.model_template(self._node)[1]
+
+ @property
+ def nest_params(self):
+ return self.dynamics_params
+
+ def build(self):
+ nest_model = self.nest_model
+ dynamics_params = self.dynamics_params
+ fnc_name = self._node['model_processing']
+ if fnc_name is None:
+ self._nest_ids = nest.Create(nest_model, 1, dynamics_params)
+ else:
+ cell_fnc = py_modules.cell_processor(fnc_name)
+ self._nest_ids = cell_fnc(nest_model, self._node, dynamics_params)
+
+
+class PointNodeAdaptor(NodeAdaptor):
+ def __init__(self, network):
+ super(PointNodeAdaptor, self).__init__(network)
+
+ # Flag for determining if we can build multiple NEST nodes at once. If each individual node has unique
+ # NEST params or a model_processing function is being called then we must nest.Create for each individual cell.
+ # Otherwise we can try to call nest.Create for a batch of nodes that share the same properties
+ self._can_batch = True
+
+ @property
+ def batch_process(self):
+ return self._can_batch
+
+ @batch_process.setter
+ def batch_process(self, flag):
+ self._can_batch = flag
+
+ def get_node(self, sonata_node):
+ return PointNode(sonata_node, self)
+
+ def get_batches(self, node_group):
+ node_ids = node_group.node_ids
+ node_type_ids = node_group.node_type_ids
+ node_gids = node_group.gids
+ if node_gids is None:
+ node_gids = node_ids
+
+ ntids_counter = Counter(node_type_ids)
+
+ nid_groups = {nt_id: np.zeros(ntids_counter[nt_id], dtype=np.uint32) for nt_id in ntids_counter}
+ gid_groups = {nt_id: np.zeros(ntids_counter[nt_id], dtype=np.uint32) for nt_id in ntids_counter}
+ node_groups_counter = {nt_id: 0 for nt_id in ntids_counter}
+
+ for node_id, gid, node_type_id in zip(node_ids, node_gids, node_type_ids):
+ grp_indx = node_groups_counter[node_type_id]
+ nid_groups[node_type_id][grp_indx] = node_id
+ gid_groups[node_type_id][grp_indx] = gid
+ node_groups_counter[node_type_id] += 1
+
+ return [PointNodeBatched(nid_groups[nt_id], gid_groups[nt_id], node_group.parent.node_types_table, nt_id)
+ for nt_id in ntids_counter]
+
+ @staticmethod
+ def patch_adaptor(adaptor, node_group, network):
+ node_adaptor = NodeAdaptor.patch_adaptor(adaptor, node_group, network)
+
+ # If dynamics params is stored in the nodes.h5 then we have to build each node separate
+ if node_group.has_dynamics_params:
+ node_adaptor.batch_process = False
+
+ # If there is a non-null value in the model_processing column then it potentially means that every cell is
+ # uniquly built (currently model_processing is applied to each individ. cell) and nodes can't be batched
+ if 'model_processing' in node_group.columns:
+ node_adaptor.batch_process = False
+ elif 'model_processing' in node_group.all_columns and not all_null(node_group, 'model_processing'):
+ node_adaptor.batch_process = False
+
+ if node_adaptor.batch_process:
+ io.log_info('Batch processing nodes for {}/{}.'.format(node_group.parent.name, node_group.group_id))
+
+ return node_adaptor
+
+
+class PointEdge(SonataBaseEdge):
+ @property
+ def source_node_ids(self):
+ return [self._edge.source_node_id]
+
+ @property
+ def target_node_ids(self):
+ return [self._edge.target_node_id]
+
+ @property
+ def nest_params(self):
+ if self.model_template in py_modules.synapse_models:
+ syn_model_fnc = py_modules.synapse_model(self.model_template)
+ else:
+ syn_model_fnc = py_modules.synapse_models('default')
+
+ return syn_model_fnc(self)
+
+
+class PointEdgeBatched(object):
+ def __init__(self, source_nids, target_nids, nest_params):
+ self._src_nids = source_nids
+ self._trg_nids = target_nids
+ self._nest_params = nest_params
+
+ @property
+ def source_node_ids(self):
+ return self._src_nids
+
+ @property
+ def target_node_ids(self):
+ return self._trg_nids
+
+ @property
+ def nest_params(self):
+ return self._nest_params
+
+
+class PointEdgeAdaptor(EdgeAdaptor):
+ def __init__(self, network):
+ super(PointEdgeAdaptor, self).__init__(network)
+ self._can_batch = True
+
+ @property
+ def batch_process(self):
+ return self._can_batch
+
+ @batch_process.setter
+ def batch_process(self, flag):
+ self._can_batch = flag
+
+ def synaptic_params(self, edge):
+ # TODO: THIS NEEDS to be replaced with call to synapse_models
+ params_dict = {'weight': self.syn_weight(edge, None, None), 'delay': edge.delay}
+ params_dict.update(edge.dynamics_params)
+ return params_dict
+
+ def get_edge(self, sonata_node):
+ return PointEdge(sonata_node, self)
+
+
+ def get_batches(self, edge_group):
+ src_ids = {}
+ trg_ids = {}
+ edge_types_table = edge_group.parent.edge_types_table
+
+ edge_type_ids = edge_group.node_type_ids()
+ et_id_counter = Counter(edge_type_ids)
+ tmp_df = pd.DataFrame({'etid': edge_type_ids, 'src_nids': edge_group.src_node_ids(),
+ 'trg_nids': edge_group.trg_node_ids()})
+
+ for et_id, grp_vals in tmp_df.groupby('etid'):
+ src_ids[et_id] = np.array(grp_vals['src_nids'])
+ trg_ids[et_id] = np.array(grp_vals['trg_nids'])
+
+ # selected_etids = np.unique(edge_type_ids)
+ type_params = {et_id: {} for et_id in et_id_counter.keys()}
+ for et_id, p_dict in type_params.items():
+ p_dict.update(edge_types_table[et_id]['dynamics_params'])
+ if 'model_template' in edge_types_table[et_id]:
+ p_dict['model'] = edge_types_table[et_id]['model_template']
+
+ if 'delay' in edge_group.columns:
+ raise NotImplementedError
+ elif 'delay' in edge_types_table.columns:
+ for et_id, p_dict in type_params.items():
+ p_dict['delay'] = edge_types_table[et_id]['delay']
+
+ scalar_syn_weight = 'syn_weight' not in edge_group.columns
+ scalar_nsyns = 'nsyns' not in edge_group.columns
+
+ if scalar_syn_weight and scalar_nsyns:
+ for et_id, p_dict in type_params.items():
+ et_dict = edge_types_table[et_id]
+ p_dict['weight'] = et_dict['nsyns']*et_dict['syn_weight']
+
+ else:
+ if not scalar_nsyns and not scalar_syn_weight:
+ tmp_df['nsyns'] = edge_group.get_dataset('nsyns')
+ tmp_df['syn_weight'] = edge_group.get_dataset('syn_weight')
+ for et_id, grp_vals in tmp_df.groupby('etid'):
+ type_params[et_id]['weight'] = np.array(grp_vals['nsyns'])*np.array(grp_vals['syn_weight'])
+
+ elif scalar_nsyns:
+ tmp_df['syn_weight'] = edge_group.get_dataset('syn_weight')
+ for et_id, grp_vals in tmp_df.groupby('etid'):
+ type_params[et_id]['weight'] = edge_types_table[et_id].get('nsyns', 1) * np.array(grp_vals['syn_weight'])
+
+ elif scalar_syn_weight:
+ tmp_df['nsyns'] = edge_group.get_dataset('nsyns')
+ for et_id, grp_vals in tmp_df.groupby('etid'):
+ type_params[et_id]['weight'] = np.array(grp_vals['nsyns']) * edge_types_table[et_id]['syn_weight']
+
+ batched_edges = []
+ for et_id in et_id_counter.keys():
+ batched_edges.append(PointEdgeBatched(src_ids[et_id], trg_ids[et_id], type_params[et_id]))
+
+ return batched_edges
+
+ @staticmethod
+ def patch_adaptor(adaptor, edge_group):
+ edge_adaptor = EdgeAdaptor.patch_adaptor(adaptor, edge_group)
+
+ if 'weight_function' not in edge_group.all_columns and 'syn_weight' in edge_group.all_columns:
+ adaptor.syn_weight = types.MethodType(point_syn_weight, adaptor)
+
+ return edge_adaptor
+
+
+def point_syn_weight(self, edge, src_node, trg_node):
+ return edge['syn_weight']*edge.nsyns
diff --git a/bmtk-vb/bmtk/simulator/pointnet/utils.py b/bmtk-vb/bmtk/simulator/pointnet/utils.py
new file mode 100644
index 0000000..d71716a
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/pointnet/utils.py
@@ -0,0 +1,188 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import h5py
+from collections import defaultdict
+import pandas as pd
+import numpy as np
+import six
+"""
+Most of these functions were collected from previous version of pointnet and are no longer tested and tested. However
+some functions may still be used by some people internally at AI for running their own simulations. I have marked all
+such functions as UNUSED.
+
+I will leave them alone for now but in the future they should be purged or updated.
+"""
+
+
+def read_LGN_activity(trial_num, file_name):
+ # UNUSED.
+ spike_train_dict = {}
+ f5 = h5py.File(file_name, 'r')
+ trial_group = f5['processing/trial_{}/spike_train'.format(trial_num)]
+ for cid in trial_group.keys():
+ spike_train_dict[int(cid)] = trial_group[cid]['data'][...]
+
+ return spike_train_dict
+
+
+def read_conns(file_name):
+ # UNUSED.
+ fc = h5py.File(file_name)
+ indptr = fc['indptr']
+ cell_size = len(indptr) - 1
+ print(cell_size)
+ conns = {}
+ source = fc['src_gids']
+ for xin in six.moves.range(cell_size):
+ conns[str(xin)] = list(source[indptr[xin]:indptr[xin+1]])
+
+ return conns
+
+
+def gen_recurrent_csv(num, offset, csv_file):
+ # UNUSED.
+ conn_data = np.loadtxt(csv_file)
+ target_ids = conn_data[:, 0]
+ source_ids = conn_data[:, 1]
+ weight_scale = conn_data[:, 2]
+
+ pre = []
+ cell_num = num
+ params = []
+ for xin in six.moves.range(cell_num):
+ pre.append(xin+offset)
+ ind = np.where(source_ids == xin)
+
+ temp_param = {}
+ targets = target_ids[ind] + offset
+ weights = weight_scale[ind]
+ delays = np.ones(len(ind[0]))*1.5
+ targets.astype(float)
+ weights.astype(float)
+ temp_param['target'] = targets
+ temp_param['weight'] = weights*1
+ temp_param['delay'] = delays
+ params.append(temp_param)
+
+ return pre, params
+
+
+def gen_recurrent_h5(num, offset, h5_file):
+ # UNUSED.
+ fc = h5py.File(h5_file)
+ indptr = fc['indptr']
+ cell_size = len(indptr) - 1
+ src_gids = fc['src_gids']
+ nsyns = fc['nsyns']
+ source_ids = []
+ weight_scale = []
+ target_ids = []
+ delay_v = 1.5 # arbitrary value
+
+ for xin in six.moves.range(cell_size):
+ target_ids.append(xin)
+ source_ids.append(list(src_gids[indptr[xin]:indptr[xin+1]]))
+ weight_scale.append(list(nsyns[indptr[xin]:indptr[xin+1]]))
+ targets = defaultdict(list)
+ weights = defaultdict(list)
+ delays = defaultdict(list)
+
+ for xi, xin in enumerate(target_ids):
+ for yi, yin in enumerate(source_ids[xi]):
+ targets[yin].append(xin)
+ weights[yin].append(weight_scale[xi][yi])
+ delays[yin].append(delay_v)
+
+ presynaptic = []
+ params = []
+ for xin in targets:
+ presynaptic.append(xin+offset)
+ temp_param = {}
+ temp_array = np.array(targets[xin])*1.0 + offset
+ temp_array.astype(float)
+ temp_param['target'] = temp_array
+ temp_array = np.array(weights[xin])
+ temp_array.astype(float)
+ temp_param['weight'] = temp_array
+ temp_array = np.array(delays[xin])
+ temp_array.astype(float)
+ temp_param['delay'] = temp_array
+ params.append(temp_param)
+
+ return presynaptic, params
+
+
+def load_params(node_name, model_name):
+ """
+ load information regarding nodes and cell_models from csv files
+
+ Parameters
+ ----------
+ node_name: json file name for node information
+ model_name: json file name for neuron model information
+
+ Returns
+ -------
+ node_info: 2d array of node info read out from the json file
+ mode_info: 2d array of model info read out from the json file
+ dict_coordinates: dictionary of coordinates. keyword is the node_id and entries are the x,y and z coordinates.
+ """
+ # UNUSED.
+ node = pd.read_csv(node_name, sep=' ', quotechar='"', quoting=0)
+ model = pd.read_csv(model_name, sep=' ', quotechar='"', quoting=0)
+ node_info = node.values
+ model_info = model.values
+ # In NEST, cells do not have intrinsic coordinates. So we have to make some virutial links between cells and
+ # coordinates
+ dict_coordinates = defaultdict(list)
+
+ for xin in six.moves.range(len(node_info)):
+ dict_coordinates[str(node_info[xin, 0])] = [node_info[xin, 2], node_info[xin, 3], node_info[xin, 4]]
+ return node_info, model_info, dict_coordinates
+
+
+def load_conns(cnn_fn):
+ """
+ load information regarding connectivity from csv files
+
+ Parameters
+ ----------
+ cnn_fn: json file name for connection information
+
+ Returns
+ -------
+ connection dictionary
+ """
+ # UNUSED.
+ conns = pd.read_csv(cnn_fn, sep=' ', quotechar='"', quoting=0)
+ targets = conns.target_label
+ sources = conns.source_label
+ weights = conns.weight
+ delays = conns.delay
+
+ conns_mapping = {}
+ for xin in six.moves.range(len(targets)):
+ keys = sources[xin] + '-' + targets[xin]
+ conns_mapping[keys] = [weights[xin], delays[xin]]
+
+ return conns_mapping
diff --git a/bmtk-vb/bmtk/simulator/popnet/__init__.py b/bmtk-vb/bmtk/simulator/popnet/__init__.py
new file mode 100644
index 0000000..7b591ca
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/popnet/__init__.py
@@ -0,0 +1,25 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .popnetwork import PopNetwork
+from .popsimulator import PopSimulator
+from .config import Config
diff --git a/bmtk-vb/bmtk/simulator/popnet/config.py b/bmtk-vb/bmtk/simulator/popnet/config.py
new file mode 100644
index 0000000..567e5b6
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/popnet/config.py
@@ -0,0 +1,34 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# import bmtk.simulator.utils.config as msdk_config
+from bmtk.simulator.core.config import ConfigDict
+from bmtk.simulator.core.io_tools import io
+
+def from_json(config_file, validate=False):
+ conf_dict = ConfigDict.from_json(config_file)
+ conf_dict.io = io
+ return conf_dict
+
+
+class Config(ConfigDict):
+ pass
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/popnet/popedge.py b/bmtk-vb/bmtk/simulator/popnet/popedge.py
new file mode 100644
index 0000000..1e4e98e
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/popnet/popedge.py
@@ -0,0 +1,82 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from bmtk.simulator.utils.graph import SimEdge
+
+
+class PopEdge(SimEdge):
+ def __init__(self, source_pop, target_pop, edge_params, dynamics_params):
+ super(PopEdge, self).__init__(edge_params, dynamics_params)
+ self.__source_pop = source_pop
+ self.__target_pop = target_pop
+ self._weight = self.__get_prop('weight', 0.0)
+ self._nsyns = self.__get_prop('nsyns', 0)
+ self._delay = self.__get_prop('delay', 0.0)
+
+ @property
+ def source(self):
+ return self.__source_pop
+
+ @property
+ def target(self):
+ return self.__target_pop
+
+ @property
+ def params(self):
+ return self._orig_params
+
+ @property
+ def weight(self):
+ return self._weight
+
+ @weight.setter
+ def weight(self, value):
+ self._weight = value
+
+ @property
+ def nsyns(self):
+ return self._nsyns
+
+ @nsyns.setter
+ def nsyns(self, value):
+ self._nsyns = value
+
+ @property
+ def delay(self):
+ return self._delay
+
+ @delay.setter
+ def delay(self, value):
+ self._delay = value
+
+ def __get_prop(self, name, default=None):
+ if name in self._orig_params:
+ return self._orig_params[name]
+ elif name in self._dynamics_params:
+ return self._dynamics_params[name]
+ else:
+ return default
+
+ def __repr__(self):
+ relevant_params = "weight: {}, delay: {}, nsyns: {}".format(self.weight, self.delay, self.nsyns)
+ rstr = "{} --> {} {{{}}}".format(self.source.pop_id, self.target.pop_id, relevant_params)
+ return rstr
diff --git a/bmtk-vb/bmtk/simulator/popnet/popnetwork.py b/bmtk-vb/bmtk/simulator/popnet/popnetwork.py
new file mode 100644
index 0000000..46b7928
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/popnet/popnetwork.py
@@ -0,0 +1,695 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+import numpy as np
+
+from bmtk.simulator.core.simulator_network import SimNetwork
+#from bmtk.simulator.core.graph import SimGraph
+#from property_schemas import PopTypes, DefaultPropertySchema
+#from popnode import InternalNode, ExternalPopulation
+#from popedge import PopEdge
+from bmtk.simulator.popnet import utils as poputils
+from bmtk.simulator.popnet.sonata_adaptors import PopEdgeAdaptor
+
+from dipde.internals.internalpopulation import InternalPopulation
+from dipde.internals.externalpopulation import ExternalPopulation
+from dipde.internals.connection import Connection
+
+'''
+class PopNode(object):
+ def __init__(self, node, property_map, graph):
+ self._node = node
+ self._property_map = property_map
+ self._graph = graph
+
+ @property
+ def dynamics_params(self):
+ # TODO: Use propert map
+ return self._node['dynamics_params']
+
+ @property
+ def node_id(self):
+ # TODO: Use property map
+ return self._node.node_id
+'''
+
+
+class Population(object):
+ def __init__(self, pop_id):
+ self._pop_id = pop_id
+ self._nodes = []
+ self._params = None
+
+ self._dipde_obj = None
+
+ def add_node(self, pnode):
+ self._nodes.append(pnode)
+ if self._params is None and pnode.dynamics_params is not None:
+ self._params = pnode.dynamics_params.copy()
+
+ @property
+ def pop_id(self):
+ return self._pop_id
+
+ @property
+ def dipde_obj(self):
+ return self._dipde_obj
+
+ @property
+ def record(self):
+ return True
+
+ def build(self):
+ params = self._nodes[0].dynamics_params
+ self._dipde_obj = InternalPopulation(**params)
+
+ def get_gids(self):
+ for node in self._nodes:
+ yield node.node_id
+
+ def __getitem__(self, item):
+ return self._params[item]
+
+ def __setitem__(self, key, value):
+ self._params[key] = value
+
+ def __repr__(self):
+ return str(self._pop_id)
+
+
+class ExtPopulation(Population):
+ def __init__(self, pop_id):
+ super(ExtPopulation, self).__init__(pop_id)
+ self._firing_rate = None
+
+ @property
+ def record(self):
+ return False
+
+ @property
+ def firing_rate(self):
+ return self._firing_rate
+
+ @firing_rate.setter
+ def firing_rate(self, value):
+ self.build(value)
+
+ def build(self, firing_rate):
+ if firing_rate is not None:
+ self._firing_rate = firing_rate
+
+ self._dipde_obj = ExternalPopulation(firing_rate)
+
+
+class PopEdge(object):
+ def __init__(self, edge, property_map, graph):
+ self._edge = edge
+ self._prop_map = property_map
+ self._graph = graph
+
+ @property
+ def nsyns(self):
+ # TODO: Use property map
+ return self._edge['nsyns']
+
+ @property
+ def delay(self):
+ return self._edge['delay']
+
+ @property
+ def weight(self):
+ return self._edge['syn_weight']
+
+
+class PopConnection(object):
+ def __init__(self, src_pop, trg_pop):
+ self._src_pop = src_pop
+ self._trg_pop = trg_pop
+ self._edges = []
+
+ self._dipde_conn = None
+
+ def add_edge(self, edge):
+ self._edges.append(edge)
+
+ def build(self):
+ edge = self._edges[0]
+ self._dipde_conn = Connection(self._src_pop._dipde_obj, self._trg_pop._dipde_obj, edge.nsyns, edge.delay,
+ edge.syn_weight)
+
+ @property
+ def dipde_obj(self):
+ return self._dipde_conn
+
+
+class PopNetwork(SimNetwork):
+ def __init__(self, group_by='node_type_id', **properties):
+ super(PopNetwork, self).__init__()
+
+ self.__all_edges = []
+ self._group_key = group_by
+ self._gid_table = {}
+ self._edges = {}
+ self._target_edges = {}
+ self._source_edges = {}
+
+ self._params_cache = {}
+ #self._params_column = property_schema.get_params_column()
+ self._dipde_pops = {}
+ self._external_pop = {}
+ self._all_populations = []
+ # self._loaded_external_pops = {}
+
+ self._nodeid2pop_map = {}
+
+ self._connections = {}
+ self._external_connections = {}
+ self._all_connections = []
+
+ @property
+ def populations(self):
+ return self._all_populations
+
+ @property
+ def connections(self):
+ return self._all_connections
+
+ @property
+ def internal_populations(self):
+ return self._dipde_pops.values()
+
+ def _register_adaptors(self):
+ super(PopNetwork, self)._register_adaptors()
+ self._edge_adaptors['sonata'] = PopEdgeAdaptor
+
+ def build_nodes(self):
+ if self._group_key == 'node_id' or self._group_key is None:
+ self._build_nodes()
+ else:
+ self._build_nodes_grouped()
+
+ def _build_nodes(self):
+ for node_pop in self.node_populations:
+ if node_pop.internal_nodes_only:
+ nid2pop_map = {}
+ for node in node_pop.get_nodes():
+ #pnode = PopNode(node, prop_maps[node.group_id], self)
+ pop = Population(node.node_id)
+ pop.add_node(node)
+ pop.build()
+
+ self._dipde_pops[node.node_id] = pop
+ self._all_populations.append(pop)
+ nid2pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[node_pop.name] = nid2pop_map
+
+ """
+ for node_pop in self._internal_populations_map.values():
+ prop_maps = self._node_property_maps[node_pop.name]
+ nid2pop_map = {}
+ for node in node_pop:
+ pnode = PopNode(node, prop_maps[node.group_id], self)
+ pop = Population(node.node_id)
+ pop.add_node(pnode)
+ pop.build()
+
+ self._dipde_pops[node.node_id] = pop
+ self._all_populations.append(pop)
+ nid2pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[node_pop.name] = nid2pop_map
+ """
+
+ def _build_nodes_grouped(self):
+ # Organize every single sonata-node into a given population.
+ for node_pop in self.node_populations:
+ nid2pop_map = {}
+ if node_pop.internal_nodes_only:
+ for node in node_pop.get_nodes():
+ pop_key = node[self._group_key]
+ if pop_key not in self._dipde_pops:
+ pop = Population(pop_key)
+ self._dipde_pops[pop_key] = pop
+ self._all_populations.append(pop)
+
+ pop = self._dipde_pops[pop_key]
+ pop.add_node(node)
+ nid2pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[node_pop.name] = nid2pop_map
+
+ for dpop in self._dipde_pops.values():
+ dpop.build()
+
+ """
+ for node_pop in self._internal_populations_map.values():
+ prop_maps = self._node_property_maps[node_pop.name]
+ nid2pop_map = {}
+ for node in node_pop:
+ pop_key = node[self._group_key]
+ pnode = PopNode(node, prop_maps[node.group_id], self)
+ if pop_key not in self._dipde_pops:
+ pop = Population(pop_key)
+ self._dipde_pops[pop_key] = pop
+ self._all_populations.append(pop)
+
+ pop = self._dipde_pops[pop_key]
+ pop.add_node(pnode)
+ nid2pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[node_pop.name] = nid2pop_map
+
+ for dpop in self._dipde_pops.values():
+ dpop.build()
+ """
+
+ def build_recurrent_edges(self):
+ recurrent_edge_pops = [ep for ep in self._edge_populations if not ep.virtual_connections]
+
+ for edge_pop in recurrent_edge_pops:
+ if edge_pop.recurrent_connections:
+ src_pop_maps = self._nodeid2pop_map[edge_pop.source_nodes]
+ trg_pop_maps = self._nodeid2pop_map[edge_pop.target_nodes]
+ for edge in edge_pop.get_edges():
+ src_pop = src_pop_maps[edge.source_node_id]
+ trg_pop = trg_pop_maps[edge.target_node_id]
+ conn_key = (src_pop, trg_pop)
+ if conn_key not in self._connections:
+ conn = PopConnection(src_pop, trg_pop)
+ self._connections[conn_key] = conn
+ self._all_connections.append(conn)
+
+ self._connections[conn_key].add_edge(edge)
+
+ elif edge_pop.mixed_connections:
+ raise NotImplementedError()
+
+ for conn in self._connections.values():
+ conn.build()
+
+ """
+ recurrent_edges = [edge_pop for _, edge_list in self._recurrent_edges.items() for edge_pop in edge_list]
+ for edge_pop in recurrent_edges:
+ prop_maps = self._edge_property_maps[edge_pop.name]
+ src_pop_maps = self._nodeid2pop_map[edge_pop.source_population]
+ trg_pop_maps = self._nodeid2pop_map[edge_pop.target_population]
+ for edge in edge_pop:
+ src_pop = src_pop_maps[edge.source_node_id]
+ trg_pop = trg_pop_maps[edge.target_node_id]
+ conn_key = (src_pop, trg_pop)
+ if conn_key not in self._connections:
+ conn = PopConnection(src_pop, trg_pop)
+ self._connections[conn_key] = conn
+ self._all_connections.append(conn)
+
+ pop_edge = PopEdge(edge, prop_maps[edge.group_id], self)
+ self._connections[conn_key].add_edge(pop_edge)
+
+ for conn in self._connections.values():
+ conn.build()
+ # print len(self._connections)
+ """
+
+ def find_edges(self, source_nodes=None, target_nodes=None):
+ # TODO: Move to parent
+ selected_edges = self._edge_populations[:]
+
+ if source_nodes is not None:
+ selected_edges = [edge_pop for edge_pop in selected_edges if edge_pop.source_nodes == source_nodes]
+
+ if target_nodes is not None:
+ selected_edges = [edge_pop for edge_pop in selected_edges if edge_pop.target_nodes == target_nodes]
+
+ return selected_edges
+
+ def add_spike_trains(self, spike_trains, node_set):
+ # Build external node populations
+ src_nodes = [node_pop for node_pop in self.node_populations if node_pop.name in node_set.population_names()]
+ for node_pop in src_nodes:
+ pop_name = node_pop.name
+ if node_pop.name not in self._external_pop:
+ external_pop_map = {}
+ src_pop_map = {}
+ for node in node_pop.get_nodes():
+ pop_key = node[self._group_key]
+ if pop_key not in external_pop_map:
+ pop = ExtPopulation(pop_key)
+ external_pop_map[pop_key] = pop
+ self._all_populations.append(pop)
+
+ pop = external_pop_map[pop_key]
+ pop.add_node(node)
+ src_pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[pop_name] = src_pop_map
+
+ firing_rates = poputils.get_firing_rates(external_pop_map.values(), spike_trains)
+ self._external_pop[pop_name] = external_pop_map
+ for dpop in external_pop_map.values():
+ dpop.build(firing_rates[dpop.pop_id])
+
+ else:
+ # TODO: Throw error spike trains should only be called once per source population
+ # external_pop_map = self._external_pop[pop_name]
+ src_pop_map = self._nodeid2pop_map[pop_name]
+
+ unbuilt_connections = []
+ for source_reader in src_nodes:
+ for edge_pop in self.find_edges(source_nodes=source_reader.name):
+ trg_pop_map = self._nodeid2pop_map[edge_pop.target_nodes]
+ for edge in edge_pop.get_edges():
+ src_pop = src_pop_map[edge.source_node_id]
+ trg_pop = trg_pop_map[edge.target_node_id]
+ conn_key = (src_pop, trg_pop)
+ if conn_key not in self._external_connections:
+ pconn = PopConnection(src_pop, trg_pop)
+ self._external_connections[conn_key] = pconn
+ unbuilt_connections.append(pconn)
+ self._all_connections.append(pconn)
+
+ #pop_edge = PopEdge(edge, prop_maps[edge.group_id], self)
+ self._external_connections[conn_key].add_edge(edge)
+
+ for pedge in unbuilt_connections:
+ pedge.build()
+ #exit()
+
+ """
+ print node_pop.name
+
+
+ exit()
+ if node_pop.name in self._virtual_ids_map:
+ continue
+
+ virt_node_map = {}
+ if node_pop.virtual_nodes_only:
+ print 'HERE'
+ exit()
+
+
+ for pop_name, node_pop in self._virtual_populations_map.items():
+ if pop_name not in spike_trains.populations:
+ continue
+
+ # Build external population if it already hasn't been built
+ if pop_name not in self._external_pop:
+ prop_maps = self._node_property_maps[pop_name]
+ external_pop_map = {}
+ src_pop_map = {}
+ for node in node_pop:
+ pop_key = node[self._group_key]
+ pnode = PopNode(node, prop_maps[node.group_id], self)
+ if pop_key not in external_pop_map:
+ pop = ExtPopulation(pop_key)
+ external_pop_map[pop_key] = pop
+ self._all_populations.append(pop)
+
+ pop = external_pop_map[pop_key]
+ pop.add_node(pnode)
+ src_pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[pop_name] = src_pop_map
+
+ firing_rates = poputils.get_firing_rates(external_pop_map.values(), spike_trains)
+ self._external_pop[pop_name] = external_pop_map
+ for dpop in external_pop_map.values():
+ dpop.build(firing_rates[dpop.pop_id])
+
+ else:
+ # TODO: Throw error spike trains should only be called once per source population
+ # external_pop_map = self._external_pop[pop_name]
+ src_pop_map = self._nodeid2pop_map[pop_name]
+
+ unbuilt_connections = []
+ for node_pop in self._internal_populations_map.values():
+ trg_pop_map = self._nodeid2pop_map[node_pop.name]
+ for edge_pop in self.external_edge_populations(src_pop=pop_name, trg_pop=node_pop.name):
+ for edge in edge_pop:
+ src_pop = src_pop_map[edge.source_node_id]
+ trg_pop = trg_pop_map[edge.target_node_id]
+ conn_key = (src_pop, trg_pop)
+ if conn_key not in self._external_connections:
+ pconn = PopConnection(src_pop, trg_pop)
+ self._external_connections[conn_key] = pconn
+ unbuilt_connections.append(pconn)
+ self._all_connections.append(pconn)
+
+ pop_edge = PopEdge(edge, prop_maps[edge.group_id], self)
+ self._external_connections[conn_key].add_edge(pop_edge)
+
+ for pedge in unbuilt_connections:
+ pedge.build()
+ """
+
+
+ def add_rates(self, rates, node_set):
+ if self._group_key == 'node_id':
+ id_lookup = lambda n: n.node_id
+ else:
+ id_lookup = lambda n: n[self._group_key]
+
+ src_nodes = [node_pop for node_pop in self.node_populations if node_pop.name in node_set.population_names()]
+ for node_pop in src_nodes:
+ pop_name = node_pop.name
+ if node_pop.name not in self._external_pop:
+ external_pop_map = {}
+ src_pop_map = {}
+ for node in node_pop.get_nodes():
+ pop_key = id_lookup(node)
+ if pop_key not in external_pop_map:
+ pop = ExtPopulation(pop_key)
+ external_pop_map[pop_key] = pop
+ self._all_populations.append(pop)
+
+ pop = external_pop_map[pop_key]
+ pop.add_node(node)
+ src_pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[pop_name] = src_pop_map
+
+ self._external_pop[pop_name] = external_pop_map
+ for dpop in external_pop_map.values():
+ firing_rates = rates.get_rate(dpop.pop_id)
+ dpop.build(firing_rates)
+
+ else:
+ # TODO: Throw error spike trains should only be called once per source population
+ # external_pop_map = self._external_pop[pop_name]
+ src_pop_map = self._nodeid2pop_map[pop_name]
+
+ unbuilt_connections = []
+ for source_reader in src_nodes:
+ for edge_pop in self.find_edges(source_nodes=source_reader.name):
+ trg_pop_map = self._nodeid2pop_map[edge_pop.target_nodes]
+ for edge in edge_pop.get_edges():
+ src_pop = src_pop_map[edge.source_node_id]
+ trg_pop = trg_pop_map[edge.target_node_id]
+ conn_key = (src_pop, trg_pop)
+ if conn_key not in self._external_connections:
+ pconn = PopConnection(src_pop, trg_pop)
+ self._external_connections[conn_key] = pconn
+ unbuilt_connections.append(pconn)
+ self._all_connections.append(pconn)
+
+ #pop_edge = PopEdge(edge, prop_maps[edge.group_id], self)
+ self._external_connections[conn_key].add_edge(edge)
+
+ for pedge in unbuilt_connections:
+ pedge.build()
+
+ """
+ for pop_name, node_pop in self._virtual_populations_map.items():
+ if pop_name not in rates.populations:
+ continue
+
+ # Build external population if it already hasn't been built
+ if pop_name not in self._external_pop:
+ prop_maps = self._node_property_maps[pop_name]
+ external_pop_map = {}
+ src_pop_map = {}
+ for node in node_pop:
+ pop_key = id_lookup(node)
+ #pop_key = node[self._group_key]
+ pnode = PopNode(node, prop_maps[node.group_id], self)
+ if pop_key not in external_pop_map:
+ pop = ExtPopulation(pop_key)
+ external_pop_map[pop_key] = pop
+ self._all_populations.append(pop)
+
+ pop = external_pop_map[pop_key]
+ pop.add_node(pnode)
+ src_pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[pop_name] = src_pop_map
+
+ firing_rate = rates.get_rate(pop_key)
+ self._external_pop[pop_name] = external_pop_map
+ for dpop in external_pop_map.values():
+ dpop.build(firing_rate)
+
+ else:
+ # TODO: Throw error spike trains should only be called once per source population
+ # external_pop_map = self._external_pop[pop_name]
+ src_pop_map = self._nodeid2pop_map[pop_name]
+ """
+
+ '''
+ def _add_node(self, node, network):
+ pops = self._networks[network]
+ pop_key = node[self._group_key]
+ if pop_key in pops:
+ pop = pops[pop_key]
+ pop.add_gid(node.gid)
+ self._gid_table[network][node.gid] = pop
+ else:
+ model_class = self.property_schema.get_pop_type(node)
+ if model_class == PopTypes.Internal:
+ pop = InternalNode(pop_key, self, network, node)
+ pop.add_gid(node.gid)
+ pop.model_params = self.__get_params(node)
+ self._add_internal_node(pop, network)
+
+ elif model_class == PopTypes.External:
+ # TODO: See if we can get firing rate from dynamics_params
+ pop = ExternalPopulation(pop_key, self, network, node)
+ pop.add_gid(node.gid)
+ self._add_external_node(pop, network)
+
+ else:
+ raise Exception('Unknown model type')
+
+ if network not in self._gid_table:
+ self._gid_table[network] = {}
+ self._gid_table[network][node.gid] = pop
+ '''
+
+ def __get_params(self, node_params):
+ if node_params.with_dynamics_params:
+ return node_params['dynamics_params']
+
+ params_file = node_params[self._params_column]
+ if params_file in self._params_cache:
+ return self._params_cache[params_file]
+ else:
+ params_dir = self.get_component('models_dir')
+ params_path = os.path.join(params_dir, params_file)
+ params_dict = json.load(open(params_path, 'r'))
+ self._params_cache[params_file] = params_dict
+ return params_dict
+
+ def _preprocess_node_types(self, node_population):
+ node_type_ids = np.unique(node_population.type_ids)
+ # TODO: Verify all the node_type_ids are in the table
+ node_types_table = node_population.types_table
+
+ if 'dynamics_params' in node_types_table.columns and 'model_type' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ dynamics_params = node_type['dynamics_params']
+ model_type = node_type['model_type']
+
+ if model_type == 'biophysical':
+ params_dir = self.get_component('biophysical_neuron_models_dir')
+ elif model_type == 'point_process':
+ params_dir = self.get_component('point_neuron_models_dir')
+ elif model_type == 'point_soma':
+ params_dir = self.get_component('point_neuron_models_dir')
+ elif model_type == 'population':
+ params_dir = self.get_component('population_models_dir')
+ else:
+ # Not sure what to do in this case, throw Exception?
+ params_dir = self.get_component('custom_neuron_models')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ node_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ self.io.log_exception('Could not find node dynamics_params file {}.'.format(params_path))
+
+
+ '''
+ def add_edges(self, edges, target_network=None, source_network=None):
+ # super(PopGraph, self).add_edges(edges)
+
+ target_network = target_network if target_network is not None else edges.target_network
+ if target_network not in self._target_edges:
+ self._target_edges[target_network] = []
+
+ source_network = source_network if source_network is not None else edges.source_network
+ if source_network not in self._source_edges:
+ self._source_edges[source_network] = []
+
+ target_pops = self.get_populations(target_network)
+ source_pops = self.get_populations(source_network)
+ source_gid_table = self._gid_table[source_network]
+
+ for target_pop in target_pops:
+ for target_gid in target_pop.get_gids():
+ for edge in edges.edges_itr(target_gid):
+ source_pop = source_gid_table[edge.source_gid]
+ self._add_edge(source_pop, target_pop, edge)
+ '''
+
+ def _add_edge(self, source_pop, target_pop, edge):
+ src_id = source_pop.node_id
+ trg_id = target_pop.node_id
+ edge_type_id = edge['edge_type_id']
+ edge_key = (src_id, source_pop.network, trg_id, target_pop.network, edge_type_id)
+
+ if edge_key in self._edges:
+ return
+ else:
+ # TODO: implement dynamics params
+ dynamics_params = self._get_edge_params(edge)
+ pop_edge = PopEdge(source_pop, target_pop, edge, dynamics_params)
+ self._edges[edge_key] = pop_edge
+ self._source_edges[source_pop.network].append(pop_edge)
+ self._target_edges[target_pop.network].append(pop_edge)
+
+ def get_edges(self, source_network):
+ return self._source_edges[source_network]
+
+ def edges_table(self, target_network, source_network):
+ return self._edges_table[(target_network, source_network)]
+
+ def get_populations(self, network):
+ return super(PopNetwork, self).get_nodes(network)
+
+ def get_population(self, node_set, gid):
+ return self._nodeid2pop_map[node_set][gid]
+
+ def rebuild(self):
+ for _, ns in self._nodeid2pop_map.items():
+ for _, pop in ns.items():
+ pop.build()
+
+ for pc in self._all_connections:
+ pc.build()
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/popnet/popnetwork_OLD.py b/bmtk-vb/bmtk/simulator/popnet/popnetwork_OLD.py
new file mode 100644
index 0000000..cfdeddb
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/popnet/popnetwork_OLD.py
@@ -0,0 +1,327 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import logging
+
+from dipde.internals.internalpopulation import InternalPopulation
+from dipde.internals.externalpopulation import ExternalPopulation
+from dipde.internals.connection import Connection
+import dipde
+
+import bmtk.simulator.popnet.config as cfg
+import bmtk.simulator.popnet.utils as poputils
+
+
+class PopNetwork (object):
+ def __init__(self, graph):
+ self._graph = graph
+
+ self._duration = 0.0
+ self._dt = 0.0001
+ self._rates_file = None # name of file where the output is saved
+
+ self.__population_list = [] # list of all populations, internal and external
+ self.__population_table = {graph: {} for graph in self._graph.networks} # population lookup by [network][id]
+ self.__connection_list = [] # list of all connections
+ self._dipde_network = None # reference to dipde.Network object
+
+ # diction of rates for every external network/pop_id. Prepopulate dictionary with populations whose rates
+ # have already been manually set, otherwise they should use one of the add_rates_* function.
+ self._rates = {network: {pop.pop_id: pop.firing_rate for pop in self._graph.get_populations(network)
+ if not pop.is_internal and pop.is_firing_rate_set}
+ for network in self._graph.networks}
+
+ """
+ for network in self._graph.networks:
+ for pop in self._graph.get_populations(network):
+
+ if pop.is_internal:
+ dipde_pop = self.__create_internal_pop(pop)
+
+ else:
+ if pop.is_firing_rate_set:
+ rates = pop.firing_rate
+ """
+
+ @property
+ def duration(self):
+ return self._duration
+
+ @duration.setter
+ def duration(self, value):
+ self._duration = value
+
+ @property
+ def dt(self):
+ return self._dt
+
+ @dt.setter
+ def dt(self, value):
+ self._dt = value
+
+ @property
+ def rates_file(self):
+ return self._rates_file
+
+ @rates_file.setter
+ def rates_file(self, value):
+ self._rates_file = value
+
+ @property
+ def populations(self):
+ return self.__population_list
+
+ @property
+ def connections(self):
+ return self.__connection_list
+
+ def add_rates_nwb(self, network, nwb_file, trial, force=False):
+ """Creates external population firing rates from an NWB file.
+
+ Will iterate through a processing trial of an NWB file by assigning gids the population it belongs too and
+ taking the average firing rate.
+
+ This should be done before calling build_cells(). If a population has already been assigned a firing rate an
+ error will occur unless force=True.
+
+ :param network: Name of network with external populations.
+ :param nwb_file: NWB file with spike rates.
+ :param trial: trial id in NWB file
+ :param force: will overwrite existing firing rates
+ """
+ existing_rates = self._rates[network] # TODO: validate network exists
+ # Get all unset, external populations in a network.
+ network_pops = self._graph.get_populations(network)
+ selected_pops = []
+ for pop in network_pops:
+ if pop.is_internal:
+ continue
+ elif not force and pop.pop_id in existing_rates:
+ print('Firing rate for {}/{} has already been set, skipping.'.format(network, pop.pop_id))
+ else:
+ selected_pops.append(pop)
+
+ if selected_pops:
+ # assign firing rates from NWB file
+ # TODO:
+ rates_dict = poputils.get_firing_rate_from_nwb(selected_pops, nwb_file, trial)
+ self._rates[network].update(rates_dict)
+
+ def add_rate_hz(self, network, pop_id, rate, force=False):
+ """Set the firing rate of an external population.
+
+ This should be done before calling build_cells(). If a population has already been assigned a firing rate an
+ error will occur unless force=True.
+
+ :param network: name of network with wanted exteranl population
+ :param pop_id: name/id of external population
+ :param rate: firing rate in Hz.
+ :param force: will overwrite existing firing rates
+ """
+ self.__add_rates_validator(network, pop_id, force)
+ self._rates[network][pop_id] = rate
+
+ def __add_rates_validator(self, network, pop_id, force):
+ if network not in self._graph.networks:
+ raise Exception('No network {} found in PopGraph.'.format(network))
+
+ pop = self._graph.get_population(network, pop_id)
+ if pop is None:
+ raise Exception('No population with id {} found in {}.'.format(pop_id, network))
+ if pop.is_internal:
+ raise Exception('Population {} in {} is not an external population.'.format(pop_id, network))
+ if not force and pop_id in self._rates[network]:
+ raise Exception('The firing rate for {}/{} already set and force=False.'.format(network, pop_id))
+
+ def _get_rate(self, network, pop):
+ """Gets the firing rate for a given population"""
+ return self._rates[network][pop.pop_id]
+
+ def build_populations(self):
+ """Build dipde Population objects from graph nodes.
+
+ To calculate external populations firing rates, it first see if a population's firing rate has been manually
+ set in the graph. Otherwise it attempts to calulate the firing rate from the call to add_rate_hz, add_rates_NWB,
+ etc. (which should be called first).
+ """
+ for network in self._graph.networks:
+ for pop in self._graph.get_populations(network):
+ if pop.is_internal:
+ dipde_pop = self.__create_internal_pop(pop)
+
+ else:
+ dipde_pop = self.__create_external_pop(pop, self._get_rate(network, pop))
+
+ self.__population_list.append(dipde_pop)
+ self.__population_table[network][pop.pop_id] = dipde_pop
+
+ def set_logging(self, log_file):
+ # TODO: move this out of the function, put in io class
+ if os.path.exists(log_file):
+ os.remove(log_file)
+
+ # get root logger
+ logger = logging.getLogger()
+ for h in list(logger.handlers):
+ # remove existing handlers that will write to console.
+ logger.removeHandler(h)
+
+ # creates handler that write to log_file
+ logging.basicConfig(filename=log_file, filemode='w', level=logging.DEBUG)
+
+ def set_external_connections(self, network_name):
+ """Sets the external connections for populations in a given network.
+
+ :param network_name: name of external network with External Populations to connect to internal pops.
+ """
+ for edge in self._graph.get_edges(network_name):
+ # Get source and target populations
+ src = edge.source
+ source_pop = self.__population_table[src.network][src.pop_id]
+ trg = edge.target
+ target_pop = self.__population_table[trg.network][trg.pop_id]
+
+ # build a connection.
+ self.__connection_list.append(self.__create_connection(source_pop, target_pop, edge))
+
+ def set_recurrent_connections(self):
+ """Initialize internal connections."""
+ for network in self._graph.internal_networks():
+ for edge in self._graph.get_edges(network):
+ src = edge.source
+ source_pop = self.__population_table[src.network][src.pop_id]
+ trg = edge.target
+ target_pop = self.__population_table[trg.network][trg.pop_id]
+ self.__connection_list.append(self.__create_connection(source_pop, target_pop, edge))
+
+ def run(self, duration=None):
+ # TODO: Check if cells/connections need to be rebuilt.
+
+ # Create the networ
+ self._dipde_network = dipde.Network(population_list=self.populations, connection_list=self.__connection_list)
+
+ if duration is None:
+ duration = self.duration
+
+ print("running simulation...")
+ self._dipde_network.run(t0=0.0, tf=duration, dt=self.dt)
+ # TODO: make record_rates optional?
+ self.__record_rates()
+ print("done simulation.")
+
+ def __create_internal_pop(self, params):
+ # TODO: use getter methods directly in case arguments are not stored in dynamics params
+ # pop = InternalPopulation(**params.dynamics_params)
+ pop = InternalPopulation(**params.model_params)
+ return pop
+
+ def __create_external_pop(self, params, rates):
+ pop = ExternalPopulation(rates, record=False)
+ return pop
+
+ def __create_connection(self, source, target, params):
+ return Connection(source, target, nsyn=params.nsyns, delays=params.delay, weights=params.weight)
+
+ def __record_rates(self):
+ with open(self._rates_file, 'w') as f:
+ # TODO: store internal populations separately, unless there is a reason to save external populations
+ # (there isn't and it will be problematic)
+ for network, pop_list in self.__population_table.items():
+ for pop_id, pop in pop_list.items():
+ if pop.record:
+ for time, rate in zip(pop.t_record, pop.firing_rate_record):
+ f.write('{} {} {}\n'.format(pop_id, time, rate))
+
+ @classmethod
+ def from_config(cls, configure, graph):
+ # load the json file or object
+ if isinstance(configure, basestring):
+ config = cfg.from_json(configure, validate=True)
+ elif isinstance(configure, dict):
+ config = configure
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(configure, type(configure)))
+ network = cls(graph)
+
+ if 'run' not in config:
+ raise Exception('Json file is missing "run" entry. Unable to build Bionetwork.')
+ run_dict = config['run']
+
+ # Create the output file
+ if 'output' in config:
+ out_dict = config['output']
+
+ rates_file = out_dict.get('rates_file', None)
+ if rates_file is not None:
+ # create directory if required
+ network.rates_file = rates_file
+ parent_dir = os.path.dirname(rates_file)
+ if not os.path.exists(parent_dir):
+ os.makedirs(parent_dir)
+
+ if 'log_file' in out_dict:
+ log_file = out_dict['log_file']
+ network.set_logging(log_file)
+
+ # get network parameters
+ if 'duration' in run_dict:
+ network.duration = run_dict['duration']
+
+ if 'dt' in run_dict:
+ network.dt = run_dict['dt']
+
+ # TODO: need to get firing rates before building populations
+ if 'input' in config:
+ for netinput in config['input']:
+ if netinput['type'] == 'external_spikes' and netinput['format'] == 'nwb' and netinput['active']:
+ # Load external network spike trains from an NWB file.
+ print('Setting firing rates for {} from {}.'.format(netinput['source_nodes'], netinput['file']))
+ network.add_rates_nwb(netinput['source_nodes'], netinput['file'], netinput['trial'])
+
+ if netinput['type'] == 'pop_rate':
+ print('Setting {}/{} to fire at {} Hz.'.format(netinput['source_nodes'], netinput['pop_id'], netinput['rate']))
+ network.add_rate_hz(netinput['source_nodes'], netinput['pop_id'], netinput['rate'])
+
+ # TODO: take input as function with Population argument
+
+ # Build populations
+ print('Building Populations')
+ network.build_populations()
+
+ # Build recurrent connections
+ if run_dict['connect_internal']:
+ print('Building recurrention connections')
+ network.set_recurrent_connections()
+
+ # Build external connections. Set connection to default True and turn off only if explicitly stated.
+ # NOTE: It might be better to set to default off?!?! Need to dicuss what would be more intuitive for the users.
+ # TODO: ignore case of network name
+ external_network_settings = {name: True for name in graph.external_networks()}
+ if 'connect_external' in run_dict:
+ external_network_settings.update(run_dict['connect_external'])
+ for netname, connect in external_network_settings.items():
+ if connect:
+ print('Setting external connections for {}'.format(netname))
+ network.set_external_connections(netname)
+
+ return network
diff --git a/bmtk-vb/bmtk/simulator/popnet/popnode.py b/bmtk-vb/bmtk/simulator/popnet/popnode.py
new file mode 100644
index 0000000..6288762
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/popnet/popnode.py
@@ -0,0 +1,158 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from bmtk.simulator.utils.graph import SimNode
+
+class PopNode(SimNode):
+ def __init__(self, node_id, graph, network, params):
+ self._graph = graph
+ self._node_id = node_id
+ self._network = network
+ self._graph_params = params
+
+ self._dynamics_params = {}
+ self._updated_params = {'dynamics_params': self._dynamics_params}
+
+ self._gids = set()
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def pop_id(self):
+ return self._node_id
+
+ @property
+ def network(self):
+ return self._network
+
+ @property
+ def dynamics_params(self):
+ return self._dynamics_params
+
+ @dynamics_params.setter
+ def dynamics_params(self, value):
+ self._dynamics_params = value
+
+ @property
+ def is_internal(self):
+ return False
+
+ def __getitem__(self, item):
+ if item in self._updated_params:
+ return self._updated_params[item]
+ elif item in self._graph_params:
+ return self._graph_params[item]
+ elif self._model_params is not None:
+ return self._model_params[item]
+
+ def add_gid(self, gid):
+ self._gids.add(gid)
+
+ def get_gids(self):
+ return list(self._gids)
+
+
+class InternalNode(PopNode):
+ """
+ def __init__(self, node_id, graph, network, params):
+ super(InternalNode, self).__init__(node_id, graph, network, params)
+ #self._pop_id = node_id
+ #self._graph = graph
+ #self._network = network
+ #self._graph_params = params
+ #self._dynamics_params = {}
+ #self._update_params = {'dynamics_params': self._dynamics_params}
+ """
+ @property
+ def tau_m(self):
+ return self['tau_m']
+ #return self._dynamics_params.get('tau_m', None)
+
+ @tau_m.setter
+ def tau_m(self, value):
+ #return self['tau_m']
+ self._dynamics_params['tau_m'] = value
+
+ @property
+ def v_max(self):
+ return self._dynamics_params.get('v_max', None)
+
+ @v_max.setter
+ def v_max(self, value):
+ self._dynamics_params['v_max'] = value
+
+ @property
+ def dv(self):
+ return self._dynamics_params.get('dv', None)
+
+ @dv.setter
+ def dv(self, value):
+ self._dynamics_params['dv'] = value
+
+ @property
+ def v_min(self):
+ return self._dynamics_params.get('v_min', None)
+
+ @v_min.setter
+ def v_min(self, value):
+ self._dynamics_params['v_min'] = value
+
+ @property
+ def is_internal(self):
+ return True
+
+ def __repr__(self):
+ props = 'pop_id={}, tau_m={}, v_max={}, v_min={}, dv={}'.format(self.pop_id, self.tau_m, self.v_max, self.v_min,
+ self.dv)
+ return 'InternalPopulation({})'.format(props)
+
+
+class ExternalPopulation(PopNode):
+ def __init__(self, node_id, graph, network, params):
+ super(ExternalPopulation, self).__init__(node_id, graph, network, params)
+ self._firing_rate = -1
+ if 'firing_rate' in params:
+ self._firing_rate = params['firing_rate']
+
+ @property
+ def firing_rate(self):
+ return self._firing_rate
+
+ @property
+ def is_firing_rate_set(self):
+ return self._firing_rate >= 0
+
+ @firing_rate.setter
+ def firing_rate(self, rate):
+ assert(isinstance(rate, float) and rate >= 0)
+ self._firing_rate = rate
+
+ @property
+ def is_internal(self):
+ return False
+
+ def __repr__(self):
+ props = 'pop_id={}, firing_rate={}'.format(self.pop_id, self.firing_rate)
+ return 'ExternalPopulation({})'.format(props)
+
diff --git a/bmtk-vb/bmtk/simulator/popnet/popsimulator.py b/bmtk-vb/bmtk/simulator/popnet/popsimulator.py
new file mode 100644
index 0000000..38c660a
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/popnet/popsimulator.py
@@ -0,0 +1,451 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import logging
+from six import string_types
+
+from dipde.internals.internalpopulation import InternalPopulation
+from dipde.internals.externalpopulation import ExternalPopulation
+from dipde.internals.connection import Connection
+import dipde
+
+from bmtk.simulator.core.simulator import Simulator
+from . import config as cfg
+from . import utils as poputils
+import bmtk.simulator.utils.simulation_inputs as inputs
+from bmtk.utils.io import spike_trains, firing_rates
+
+
+class PopSimulator(Simulator):
+ def __init__(self, graph, dt=0.0001, tstop=0.0, overwrite=True):
+ self._graph = graph
+
+ self._tstop = tstop
+ self._dt = dt
+ self._rates_file = None # name of file where the output is saved
+
+ self.__population_list = [] # list of all populations, internal and external
+ #self.__population_table = {graph: {} for graph in self._graph.networks} # population lookup by [network][id]
+ self.__connection_list = [] # list of all connections
+ self._dipde_network = None # reference to dipde.Network object
+
+ # diction of rates for every external network/pop_id. Prepopulate dictionary with populations whose rates
+ # have already been manually set, otherwise they should use one of the add_rates_* function.
+ #self._rates = {network: {pop.pop_id: pop.firing_rate for pop in self._graph.get_populations(network)
+ # if not pop.is_internal and pop.is_firing_rate_set}
+ # for network in self._graph.networks}
+
+ """
+ for network in self._graph.networks:
+ for pop in self._graph.get_populations(network):
+
+ if pop.is_internal:
+ dipde_pop = self.__create_internal_pop(pop)
+
+ else:
+ if pop.is_firing_rate_set:
+ rates = pop.firing_rate
+ """
+
+ @property
+ def tstop(self):
+ return self._tstop
+
+ @tstop.setter
+ def tstop(self, value):
+ self._tstop = value
+
+ @property
+ def dt(self):
+ return self._dt
+
+ @dt.setter
+ def dt(self, value):
+ self._dt = value
+
+ @property
+ def rates_file(self):
+ return self._rates_file
+
+ @rates_file.setter
+ def rates_file(self, value):
+ self._rates_file = value
+
+ @property
+ def populations(self):
+ return self.__population_list
+
+ @property
+ def connections(self):
+ return self.__connection_list
+
+ def add_rates_nwb(self, network, nwb_file, trial, force=False):
+ """Creates external population firing rates from an NWB file.
+
+ Will iterate through a processing trial of an NWB file by assigning gids the population it belongs too and
+ taking the average firing rate.
+
+ This should be done before calling build_cells(). If a population has already been assigned a firing rate an
+ error will occur unless force=True.
+
+ :param network: Name of network with external populations.
+ :param nwb_file: NWB file with spike rates.
+ :param trial: trial id in NWB file
+ :param force: will overwrite existing firing rates
+ """
+ existing_rates = self._rates[network] # TODO: validate network exists
+ # Get all unset, external populations in a network.
+ network_pops = self._graph.get_populations(network)
+ selected_pops = []
+ for pop in network_pops:
+ if pop.is_internal:
+ continue
+ elif not force and pop.pop_id in existing_rates:
+ print('Firing rate for {}/{} has already been set, skipping.'.format(network, pop.pop_id))
+ else:
+ selected_pops.append(pop)
+
+ if selected_pops:
+ # assign firing rates from NWB file
+ # TODO:
+ rates_dict = poputils.get_firing_rate_from_nwb(selected_pops, nwb_file, trial)
+ self._rates[network].update(rates_dict)
+
+ def add_rate_hz(self, network, pop_id, rate, force=False):
+ """Set the firing rate of an external population.
+
+ This should be done before calling build_cells(). If a population has already been assigned a firing rate an
+ error will occur unless force=True.
+
+ :param network: name of network with wanted exteranl population
+ :param pop_id: name/id of external population
+ :param rate: firing rate in Hz.
+ :param force: will overwrite existing firing rates
+ """
+ self.__add_rates_validator(network, pop_id, force)
+ self._rates[network][pop_id] = rate
+
+ def __add_rates_validator(self, network, pop_id, force):
+ if network not in self._graph.networks:
+ raise Exception('No network {} found in PopGraph.'.format(network))
+
+ pop = self._graph.get_population(network, pop_id)
+ if pop is None:
+ raise Exception('No population with id {} found in {}.'.format(pop_id, network))
+ if pop.is_internal:
+ raise Exception('Population {} in {} is not an external population.'.format(pop_id, network))
+ if not force and pop_id in self._rates[network]:
+ raise Exception('The firing rate for {}/{} already set and force=False.'.format(network, pop_id))
+
+ def _get_rate(self, network, pop):
+ """Gets the firing rate for a given population"""
+ return self._rates[network][pop.pop_id]
+
+ def build_populations(self):
+ """Build dipde Population objects from graph nodes.
+
+ To calculate external populations firing rates, it first see if a population's firing rate has been manually
+ set in the graph. Otherwise it attempts to calulate the firing rate from the call to add_rate_hz, add_rates_NWB,
+ etc. (which should be called first).
+ """
+ for network in self._graph.networks:
+ for pop in self._graph.get_populations(network):
+ if pop.is_internal:
+ dipde_pop = self.__create_internal_pop(pop)
+
+ else:
+ dipde_pop = self.__create_external_pop(pop, self._get_rate(network, pop))
+
+ self.__population_list.append(dipde_pop)
+ self.__population_table[network][pop.pop_id] = dipde_pop
+
+ def set_logging(self, log_file):
+ # TODO: move this out of the function, put in io class
+ if os.path.exists(log_file):
+ os.remove(log_file)
+
+ # get root logger
+ logger = logging.getLogger()
+ for h in list(logger.handlers):
+ # remove existing handlers that will write to console.
+ logger.removeHandler(h)
+
+ # creates handler that write to log_file
+ logging.basicConfig(filename=log_file, filemode='w', level=logging.DEBUG)
+
+ def set_external_connections(self, network_name):
+ """Sets the external connections for populations in a given network.
+
+ :param network_name: name of external network with External Populations to connect to internal pops.
+ """
+ for edge in self._graph.get_edges(network_name):
+ # Get source and target populations
+ src = edge.source
+ source_pop = self.__population_table[src.network][src.pop_id]
+ trg = edge.target
+ target_pop = self.__population_table[trg.network][trg.pop_id]
+
+ # build a connection.
+ self.__connection_list.append(self.__create_connection(source_pop, target_pop, edge))
+
+ def set_recurrent_connections(self):
+ """Initialize internal connections."""
+ for network in self._graph.internal_networks():
+ for edge in self._graph.get_edges(network):
+ src = edge.source
+ source_pop = self.__population_table[src.network][src.pop_id]
+ trg = edge.target
+ target_pop = self.__population_table[trg.network][trg.pop_id]
+ self.__connection_list.append(self.__create_connection(source_pop, target_pop, edge))
+
+ def run(self, tstop=None):
+ # TODO: Check if cells/connections need to be rebuilt.
+
+ # Create the networ
+ dipde_pops = [p.dipde_obj for p in self._graph.populations]
+ dipde_conns = [c.dipde_obj for c in self._graph.connections]
+ #print dipde_pops
+ #print dipde_conns
+ #exit()
+
+ self._dipde_network = dipde.Network(population_list=dipde_pops, connection_list=dipde_conns)
+
+ #self._dipde_network = dipde.Network(population_list=self._graph.populations,
+ # connection_list=self._graph.connections)
+
+ if tstop is None:
+ tstop = self.tstop
+
+ #print tstop, self.dt
+ #print self._graph.populations
+ #exit()
+ print("running simulation...")
+ self._dipde_network.run(t0=0.0, tf=tstop, dt=self.dt)
+ # TODO: make record_rates optional?
+ self.__record_rates()
+ print("done simulation.")
+
+ def __create_internal_pop(self, params):
+ # TODO: use getter methods directly in case arguments are not stored in dynamics params
+ # pop = InternalPopulation(**params.dynamics_params)
+ pop = InternalPopulation(**params.model_params)
+ return pop
+
+ def __create_external_pop(self, params, rates):
+ pop = ExternalPopulation(rates, record=False)
+ return pop
+
+ def __create_connection(self, source, target, params):
+ return Connection(source, target, nsyn=params.nsyns, delays=params.delay, weights=params.weight)
+
+ def __record_rates(self):
+ with open(self._rates_file, 'w') as f:
+ for pop in self._graph.internal_populations:
+ if pop.record:
+ for time, rate in zip(pop.dipde_obj.t_record, pop.dipde_obj.firing_rate_record):
+ f.write('{} {} {}\n'.format(pop.pop_id, time, rate))
+
+ '''
+ @classmethod
+ def from_config(cls, configure, graph):
+ # load the json file or object
+ if isinstance(configure, basestring):
+ config = cfg.from_json(configure, validate=True)
+ elif isinstance(configure, dict):
+ config = configure
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(configure, type(configure)))
+ network = cls(graph)
+
+ if 'run' not in config:
+ raise Exception('Json file is missing "run" entry. Unable to build Bionetwork.')
+ run_dict = config['run']
+
+ # Create the output file
+ if 'output' in config:
+ out_dict = config['output']
+
+ rates_file = out_dict.get('rates_file', None)
+ if rates_file is not None:
+ # create directory if required
+ network.rates_file = rates_file
+ parent_dir = os.path.dirname(rates_file)
+ if not os.path.exists(parent_dir):
+ os.makedirs(parent_dir)
+
+ if 'log_file' in out_dict:
+ log_file = out_dict['log_file']
+ network.set_logging(log_file)
+
+ # get network parameters
+ if 'duration' in run_dict:
+ network.duration = run_dict['duration']
+
+ if 'dt' in run_dict:
+ network.dt = run_dict['dt']
+
+ # TODO: need to get firing rates before building populations
+ if 'input' in config:
+ for netinput in config['input']:
+ if netinput['type'] == 'external_spikes' and netinput['format'] == 'nwb' and netinput['active']:
+ # Load external network spike trains from an NWB file.
+ print('Setting firing rates for {} from {}.'.format(netinput['source_nodes'], netinput['file']))
+ network.add_rates_nwb(netinput['source_nodes'], netinput['file'], netinput['trial'])
+
+ if netinput['type'] == 'pop_rate':
+ print('Setting {}/{} to fire at {} Hz.'.format(netinput['source_nodes'], netinput['pop_id'], netinput['rate']))
+ network.add_rate_hz(netinput['source_nodes'], netinput['pop_id'], netinput['rate'])
+
+ # TODO: take input as function with Population argument
+
+ # Build populations
+ print('Building Populations')
+ network.build_populations()
+
+ # Build recurrent connections
+ if run_dict['connect_internal']:
+ print('Building recurrention connections')
+ network.set_recurrent_connections()
+
+ # Build external connections. Set connection to default True and turn off only if explicitly stated.
+ # NOTE: It might be better to set to default off?!?! Need to dicuss what would be more intuitive for the users.
+ # TODO: ignore case of network name
+ external_network_settings = {name: True for name in graph.external_networks()}
+ if 'connect_external' in run_dict:
+ external_network_settings.update(run_dict['connect_external'])
+ for netname, connect in external_network_settings.items():
+ if connect:
+ print('Setting external connections for {}'.format(netname))
+ network.set_external_connections(netname)
+
+ return network
+ '''
+
+ @classmethod
+ def from_config(cls, configure, graph):
+ # load the json file or object
+ if isinstance(configure, string_types):
+ config = cfg.from_json(configure, validate=True)
+ elif isinstance(configure, dict):
+ config = configure
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(configure, type(configure)))
+
+ if 'run' not in config:
+ raise Exception('Json file is missing "run" entry. Unable to build Bionetwork.')
+ run_dict = config['run']
+
+ # Get network parameters
+ # step time (dt) is set in the kernel and should be passed
+ overwrite = run_dict['overwrite_output_dir'] if 'overwrite_output_dir' in run_dict else True
+ print_time = run_dict['print_time'] if 'print_time' in run_dict else False
+ dt = run_dict['dt'] # TODO: make sure dt exists
+ tstop = float(config.tstop) / 1000.0
+ network = cls(graph, dt=config.dt, tstop=tstop, overwrite=overwrite)
+
+ if 'output_dir' in config['output']:
+ network.output_dir = config['output']['output_dir']
+
+ # network.spikes_file = config['output']['spikes_ascii']
+
+ if 'block_run' in run_dict and run_dict['block_run']:
+ if 'block_size' not in run_dict:
+ raise Exception('"block_run" is set to True but "block_size" not found.')
+ network._block_size = run_dict['block_size']
+
+ if 'duration' in run_dict:
+ network.duration = run_dict['duration']
+
+ graph.io.log_info('Building cells.')
+ graph.build_nodes()
+
+ graph.io.log_info('Building recurrent connections')
+ graph.build_recurrent_edges()
+
+ for sim_input in inputs.from_config(config):
+ node_set = graph.get_node_set(sim_input.node_set)
+ if sim_input.input_type == 'spikes':
+ spikes = spike_trains.SpikesInput.load(name=sim_input.name, module=sim_input.module,
+ input_type=sim_input.input_type, params=sim_input.params)
+ graph.io.log_info('Build virtual cell stimulations for {}'.format(sim_input.name))
+ graph.add_spike_trains(spikes, node_set)
+ else:
+ graph.io.log_info('Build virtual cell stimulations for {}'.format(sim_input.name))
+ rates = firing_rates.RatesInput(sim_input.params)
+ graph.add_rates(rates, node_set)
+
+ # Create the output file
+ if 'output' in config:
+ out_dict = config['output']
+
+ rates_file = out_dict.get('rates_file', None)
+ if rates_file is not None:
+ rates_file = rates_file if os.path.isabs(rates_file) else os.path.join(config.output_dir, rates_file)
+ # create directory if required
+ network.rates_file = rates_file
+ parent_dir = os.path.dirname(rates_file)
+ if not os.path.exists(parent_dir):
+ os.makedirs(parent_dir)
+
+ if 'log_file' in out_dict:
+ log_file = out_dict['log_file']
+ network.set_logging(log_file)
+
+
+ # exit()
+
+
+ # build the cells
+ #io.log('Building cells')
+ #network.build_cells()
+
+ # Build internal connections
+ #if run_dict['connect_internal']:
+ # io.log('Creating recurrent connections')
+ # network.set_recurrent_connections()
+
+ # Build external connections. Set connection to default True and turn off only if explicitly stated.
+ # NOTE: It might be better to set to default off?!?! Need to dicuss what would be more intuitive for the users.
+ # TODO: ignore case of network name
+
+ '''
+ external_network_settings = {name: True for name in graph.external_networks()}
+ if 'connect_external' in run_dict:
+ external_network_settings.update(run_dict['connect_external'])
+ for netname, connect in external_network_settings.items():
+ if connect:
+ io.log('Setting external connections for {}'.format(netname))
+ network.set_external_connections(netname)
+
+ # Build inputs
+ if 'input' in config:
+ for netinput in config['input']:
+ if netinput['type'] == 'external_spikes' and netinput['format'] == 'nwb' and netinput['active']:
+ network.add_spikes_nwb(netinput['source_nodes'], netinput['file'], netinput['trial'])
+
+ io.log_info('Adding stimulations')
+ network.make_stims()
+ '''
+
+ graph.io.log_info('Network created.')
+ return network
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/popnet/property_schemas/__init__.py b/bmtk-vb/bmtk/simulator/popnet/property_schemas/__init__.py
new file mode 100644
index 0000000..4d7c64c
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/popnet/property_schemas/__init__.py
@@ -0,0 +1,28 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from base_schema import PopTypes
+import property_schema_ver0 as v0
+import property_schema_ver1 as v1
+
+DefaultPropertySchema = v1.PropertySchema()
+AIPropertySchema = v0.PropertySchema()
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/simulator/popnet/property_schemas/base_schema.py b/bmtk-vb/bmtk/simulator/popnet/property_schemas/base_schema.py
new file mode 100644
index 0000000..cc880a6
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/popnet/property_schemas/base_schema.py
@@ -0,0 +1,50 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+class PopTypes:
+ """Essentially an enum to store the type/group of each cell. It's faster and more robust than doing multiple string
+ comparisons.
+ """
+ Internal = 0
+ External = 1
+ Other = 2 # should never really get here
+
+ @staticmethod
+ def len():
+ return 3
+
+
+class PropertySchema(object):
+ #######################################
+ # For nodes/cells properties
+ #######################################
+ def get_pop_type(self, pop_params):
+ model_type = pop_params['model_type'].lower()
+ if model_type == 'virtual' or model_type == 'external':
+ return PopTypes.External
+ elif model_type == 'internal':
+ return PopTypes.Internal
+ else:
+ return PopTypes.Unknown
+
+ def get_params_column(self):
+ raise NotImplementedError()
diff --git a/bmtk-vb/bmtk/simulator/popnet/property_schemas/property_schema_ver0.py b/bmtk-vb/bmtk/simulator/popnet/property_schemas/property_schema_ver0.py
new file mode 100644
index 0000000..6c5c542
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/popnet/property_schemas/property_schema_ver0.py
@@ -0,0 +1,28 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from base_schema import PopTypes, PropertySchema as BaseSchema
+
+
+class PropertySchema(BaseSchema):
+ def get_params_column(self):
+ return 'params_file'
diff --git a/bmtk-vb/bmtk/simulator/popnet/property_schemas/property_schema_ver1.py b/bmtk-vb/bmtk/simulator/popnet/property_schemas/property_schema_ver1.py
new file mode 100644
index 0000000..8794525
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/popnet/property_schemas/property_schema_ver1.py
@@ -0,0 +1,28 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from base_schema import PropertySchema as BaseSchema
+
+
+class PropertySchema(BaseSchema):
+ def get_params_column(self):
+ return 'dynamics_params'
diff --git a/bmtk-vb/bmtk/simulator/popnet/sonata_adaptors.py b/bmtk-vb/bmtk/simulator/popnet/sonata_adaptors.py
new file mode 100644
index 0000000..dcc1300
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/popnet/sonata_adaptors.py
@@ -0,0 +1,12 @@
+from bmtk.simulator.core.sonata_reader import NodeAdaptor, SonataBaseNode, EdgeAdaptor, SonataBaseEdge
+
+
+class PopNetEdge(SonataBaseEdge):
+ @property
+ def syn_weight(self):
+ return self._edge['syn_weight']
+
+
+class PopEdgeAdaptor(EdgeAdaptor):
+ def get_edge(self, sonata_edge):
+ return PopNetEdge(sonata_edge, self)
diff --git a/bmtk-vb/bmtk/simulator/popnet/utils.py b/bmtk-vb/bmtk/simulator/popnet/utils.py
new file mode 100644
index 0000000..ceeeaa3
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/popnet/utils.py
@@ -0,0 +1,287 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import math
+import warnings
+import numpy as np
+import pandas as pd
+import scipy.interpolate as spinterp
+import collections
+import h5py
+import itertools
+import scipy.io as sio
+import json
+import importlib
+
+"""
+Most of these functions are not being used directly by popnet, but may still be used in some other capcity. These have
+been marked as depreciated, and should be removed soon.
+
+
+"""
+
+
+def get_firing_rate_from_nwb(populations, nwb_file, trial):
+ """Calculates firing rates for an external population"""
+ h5_file = h5py.File(nwb_file, 'r')
+ spike_trains_ds = h5_file['processing'][trial]['spike_train']
+
+ # TODO: look into adding a time window rather than searching for min/max t.
+ firing_rates = {}
+ for pop in populations:
+ spike_counts = []
+ spike_min_t = 1.0e30
+ spike_max_t = 0.0
+ for gid in pop.get_gids():
+ spike_train_ds = spike_trains_ds[str(gid)]['data']
+ if spike_train_ds is not None and len(spike_train_ds[...]) > 0:
+ spike_times = spike_train_ds[...]
+ tmp_min = min(spike_times)
+ spike_min_t = tmp_min if tmp_min < spike_min_t else spike_min_t
+ tmp_max = max(spike_times)
+ spike_max_t = tmp_max if tmp_max > spike_max_t else spike_max_t
+ spike_counts.append(len(spike_times))
+
+ # TODO make sure t_diffs is not null and spike_counts has some values
+ firing_rates[pop.pop_id] = 1.0e03 * np.mean(spike_counts) / (spike_max_t - spike_min_t)
+ return firing_rates
+
+
+def get_firing_rates(populations, spike_trains):
+ """Calculates firing rates for an external population"""
+ #h5_file = h5py.File(nwb_file, 'r')
+ #spike_trains_ds = h5_file['processing'][trial]['spike_train']
+
+ # TODO: look into adding a time window rather than searching for min/max t.
+ firing_rates = {}
+ for pop in populations:
+ spike_counts = []
+ spike_min_t = 1.0e30
+ spike_max_t = 0.0
+ for gid in pop.get_gids():
+ spike_times = spike_trains.get_spikes(gid)
+ if spike_times is not None and len(spike_times) > 0:
+ tmp_min = min(spike_times)
+ spike_min_t = tmp_min if tmp_min < spike_min_t else spike_min_t
+ tmp_max = max(spike_times)
+ spike_max_t = tmp_max if tmp_max > spike_max_t else spike_max_t
+ spike_counts.append(len(spike_times))
+
+ # TODO make sure t_diffs is not null and spike_counts has some values
+ firing_rates[pop.pop_id] = 1.0e03 * np.mean(spike_counts) / (spike_max_t - spike_min_t)
+ return firing_rates
+
+#############################################
+# Depreciated
+#############################################
+def list_of_dicts_to_dict_of_lists(list_of_dicts, default=None):
+ new_dict = {}
+ for curr_dict in list_of_dicts:
+ print(curr_dict.keys())
+
+
+#############################################
+# Depreciated
+#############################################
+class KeyDefaultDict(collections.defaultdict):
+ def __missing__(self, key):
+ if self.default_factory is None:
+ raise KeyError
+ else:
+ ret = self[key] = self.default_factory(key)
+ return ret
+
+
+#############################################
+# Depreciated
+#############################################
+def create_firing_rate_server(t, y):
+
+ warnings.warn('Hard coded bug fix for mindscope council 4/27/15')
+ t = t/.001/200
+ interpolation_callable = spinterp.interp1d(t, y, bounds_error=False, fill_value=0)
+ return lambda t: interpolation_callable(t)
+
+
+#############################################
+# Depreciated
+#############################################
+def create_nwb_server_file_path(nwb_file_name, nwb_path):
+ f = h5py.File(nwb_file_name, 'r')
+ y = f['%s/data' % nwb_path][:]
+ dt = f['%s/data' % nwb_path].dims[0][0].value
+ t = np.arange(len(y))*dt
+ f.close()
+ return create_firing_rate_server(t, y)
+
+
+#############################################
+# Depreciated
+#############################################
+def get_mesoscale_connectivity_dict():
+
+ # Extract data into a dictionary:
+ mesoscale_data_dir = '/data/mat/iSee_temp_shared/packages/mesoscale_connectivity'
+ nature_data = {}
+ for mat, side in itertools.product(['W', 'PValue'],['ipsi', 'contra']):
+ data, row_labels, col_labels = [sio.loadmat(os.path.join(mesoscale_data_dir, '%s_%s.mat' % (mat, side)))[key]
+ for key in ['data', 'row_labels', 'col_labels']]
+ for _, (row_label, row) in enumerate(zip(row_labels, data)):
+ for _, (col_label, val) in enumerate(zip(col_labels, row)):
+ nature_data[mat, side, str(row_label.strip()), str(col_label.strip())] = val
+
+ return nature_data
+
+
+#############################################
+# Depreciated
+#############################################
+def reorder_columns_in_frame(frame, var):
+ varlist = [w for w in frame.columns if w not in var]
+ return frame[var+varlist]
+
+
+#############################################
+# Depreciated
+#############################################
+def population_to_dict_for_dataframe(p):
+
+ black_list = ['firing_rate_record',
+ 'initial_firing_rate',
+ 'metadata',
+ 't_record']
+
+ json_list = ['p0', 'tau_m']
+
+ return_dict = {}
+ p_dict = p.to_dict()
+
+ for key, val in p_dict['metadata'].items():
+ return_dict[key] = val
+
+ for key, val in p_dict.items():
+ if key not in black_list:
+ if key in json_list:
+ val = json.dumps(val)
+ return_dict[key] = val
+
+ return return_dict
+
+
+#############################################
+# Depreciated
+#############################################
+def network_dict_to_target_adjacency_dict(network_dict):
+ print(network_dict)
+
+
+#############################################
+# Depreciated
+#############################################
+def population_list_to_dataframe(population_list):
+ df = pd.DataFrame({'_tmp': [None]})
+ for p in population_list:
+ model_dict = {'_tmp': [None]}
+ for key, val in population_to_dict_for_dataframe(p).items():
+ model_dict.setdefault(key, []).append(val)
+ df_tmp = pd.DataFrame(model_dict)
+
+ df = pd.merge(df, df_tmp, how='outer')
+ df.drop('_tmp', inplace=True, axis=1)
+ return df
+
+
+#############################################
+# Depreciated
+#############################################
+def df_to_csv(df, save_file_name, index=False, sep=' ', na_rep='None'):
+ df.to_csv(save_file_name, index=index, sep=sep, na_rep=na_rep)
+
+
+#############################################
+# Depreciated
+#############################################
+def population_list_to_csv(population_list, save_file_name):
+ df = population_list_to_dataframe(population_list)
+ df_to_csv(df, save_file_name)
+
+
+#############################################
+# Depreciated
+#############################################
+def create_instance(data_dict):
+ '''Helper function to create an object from a dictionary containing:
+
+ "module": The name of the module containing the class
+ "class": The name of the class to be used to create the object
+ '''
+
+ curr_module, curr_class = data_dict.pop('module'), data_dict.pop('class')
+ curr_instance = getattr(importlib.import_module(curr_module), curr_class)(**data_dict)
+
+ return curr_instance
+
+
+#############################################
+# Depreciated
+#############################################
+def assert_model_known(model, model_dict):
+ """Test if a model in in the model_dict; if not, raise UnknownModelError"""
+
+ try:
+ assert model in model_dict
+ except:
+ raise Exception('model {} does not exist.'.format(model))
+
+
+#############################################
+# Depreciated
+#############################################
+def create_population_list(node_table, model_table):
+ """Create a population list from the node and model pandas tables"""
+
+ model_dict = {}
+ for row in model_table.iterrows():
+ model = row[1].to_dict()
+ model_dict[model.pop('model')] = model
+
+ population_list = []
+ for row in node_table.iterrows():
+ node = row[1].to_dict()
+ model = node.pop('model')
+
+ # Check if model type in model dict:
+ assert_model_known(model, model_dict)
+
+ # Clean up:
+ curr_model = {}
+ for key, val in model_dict[model].items():
+ if not (isinstance(val, float) and math.isnan(val)):
+ curr_model[key] = val
+ curr_model.setdefault('metadata', {})['model'] = model
+
+ curr_module, curr_class = curr_model['module'], curr_model['class']
+ curr_instance = getattr(importlib.import_module(curr_module), curr_class)(**curr_model)
+ population_list.append(curr_instance)
+
+ return population_list
diff --git a/bmtk-vb/bmtk/simulator/utils/__init__.py b/bmtk-vb/bmtk/simulator/utils/__init__.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/bmtk/simulator/utils/__init__.pyc b/bmtk-vb/bmtk/simulator/utils/__init__.pyc
new file mode 100644
index 0000000..d08f5a6
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/utils/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/utils/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/simulator/utils/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..c9640cf
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/utils/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/utils/__pycache__/config.cpython-37.pyc b/bmtk-vb/bmtk/simulator/utils/__pycache__/config.cpython-37.pyc
new file mode 100644
index 0000000..ef4a035
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/utils/__pycache__/config.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/utils/__pycache__/sim_validator.cpython-37.pyc b/bmtk-vb/bmtk/simulator/utils/__pycache__/sim_validator.cpython-37.pyc
new file mode 100644
index 0000000..2aa7e21
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/utils/__pycache__/sim_validator.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/utils/__pycache__/simulation_inputs.cpython-37.pyc b/bmtk-vb/bmtk/simulator/utils/__pycache__/simulation_inputs.cpython-37.pyc
new file mode 100644
index 0000000..58e5306
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/utils/__pycache__/simulation_inputs.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/utils/__pycache__/simulation_reports.cpython-37.pyc b/bmtk-vb/bmtk/simulator/utils/__pycache__/simulation_reports.cpython-37.pyc
new file mode 100644
index 0000000..9f05e3b
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/utils/__pycache__/simulation_reports.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/utils/config.py b/bmtk-vb/bmtk/simulator/utils/config.py
new file mode 100644
index 0000000..aa5ee5e
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/config.py
@@ -0,0 +1,438 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+import re
+import copy
+import datetime
+from six import string_types
+
+
+from bmtk.simulator.core.io_tools import io
+
+
+def from_json(config_file, validator=None):
+ """Builds and validates a configuration json file.
+
+ :param config_file: File object or path to a json file.
+ :param validator: A SimConfigValidator object to validate json file. Won't validate if set to None
+ :return: A dictionary, verified against json validator and with manifest variables resolved.
+ """
+ #print(config_file)
+ #if os.path.isfile(config_file):
+ #if isinstance(config_file, file):
+ # conf = json.load(config_file)
+ if isinstance(config_file, string_types):
+ conf = json.load(open(config_file, 'r'))
+ elif isinstance(config_file, dict):
+ conf = config_file.copy()
+ else:
+ raise Exception('{} is not a file or file path.'.format(config_file))
+
+ # insert file path into dictionary
+ if 'config_path' not in conf:
+ conf['config_path'] = os.path.abspath(config_file)
+ conf['config_dir'] = os.path.dirname(conf['config_path'])
+
+ # Will resolve manifest variables and validate
+ return from_dict(conf, validator)
+
+
+def from_dict(config_dict, validator=None):
+ """Builds and validates a configuration json dictionary object. Best to directly use from_json when possible.
+
+ :param config_dict: Dictionary object
+ :param validator: A SimConfigValidator object to validate json file. Won't validate if set to None
+ :return: A dictionary, verified against json validator and with manifest variables resolved.
+ """
+ assert(isinstance(config_dict, dict))
+ conf = copy.deepcopy(config_dict) # Since the functions will mutate the dictionary we will copy just-in-case.
+
+ if 'config_path' not in conf:
+ conf['config_path'] = os.path.join(os.getcwd(), 'tmp_cfg.dict')
+ conf['config_dir'] = os.path.dirname(conf['config_path'])
+
+ # Build the manifest and resolve variables.
+ # TODO: Check that manifest exists
+ manifest = __build_manifest(conf)
+ conf['manifest'] = manifest
+ __recursive_insert(conf, manifest)
+
+ # In our work with Blue-Brain it was agreed that 'network' and 'simulator' parts of config may be split up into
+ # separate files. If this is the case we build each sub-file separately and merge into this one
+ for childconfig in ['network', 'simulation']:
+ if childconfig in conf and isinstance(conf[childconfig], string_types):
+ # Try to resolve the path of the network/simulation config files. If an absolute path isn't used find
+ # the file relative to the current config file. TODO: test if this will work on windows?
+ conf_str = conf[childconfig]
+ conf_path = conf_str if conf_str.startswith('/') else os.path.join(conf['config_dir'], conf_str)
+
+ # Build individual json file and merge into parent.
+ child_json = from_json(conf_path)
+ del child_json['config_path'] # we don't want 'config_path' of parent being overwritten.
+ conf.update(child_json)
+
+ # Run the validator
+ if validator is not None:
+ validator.validate(conf)
+
+ return conf
+
+
+def copy_config(conf):
+ """Copy configuration file to different directory, with manifest variables resolved.
+
+ :param conf: configuration dictionary
+ """
+ output_dir = conf.output_dir
+ config_name = os.path.basename(conf['config_path'])
+ output_path = os.path.join(output_dir, config_name)
+ with open(output_path, 'w') as fp:
+ out_cfg = conf.copy()
+ if 'manifest' in out_cfg:
+ del out_cfg['manifest']
+ json.dump(out_cfg, fp, indent=2)
+
+
+def __special_variables(conf):
+ """A list of preloaded variables to insert into the manifest, containing things like path to run-time directory,
+ configuration directory, etc.
+ """
+ pre_manifest = dict()
+ pre_manifest['$workingdir'] = os.path.dirname(os.getcwd())
+ if 'config_path' in conf:
+ pre_manifest['$configdir'] = os.path.dirname(conf['config_path']) # path of configuration file
+ pre_manifest['$configfname'] = conf['config_path']
+
+ dt_now = datetime.datetime.now()
+ pre_manifest['$time'] = dt_now.strftime('%H-%M-%S')
+ pre_manifest['$date'] = dt_now.strftime('%Y-%m-%d')
+ pre_manifest['$datetime'] = dt_now.strftime('%Y-%m-%d_%H-%M-%S')
+
+ return pre_manifest
+
+
+def __build_manifest(conf):
+ """Resolves the manifest section and resolve any internal variables"""
+ if 'manifest' not in conf:
+ return __special_variables(conf)
+
+ manifest = conf["manifest"]
+ resolved_manifest = __special_variables(conf)
+ resolved_keys = set()
+ unresolved_keys = set(manifest.keys())
+
+ # No longer using recursion since that can lead to an infinite loop if the person who writes the config file isn't
+ # careful. Also added code to allow for ${VAR} format in-case user wants to user "$.../some_${MODEl}_here/..."
+ while unresolved_keys:
+ for key in unresolved_keys:
+ # Find all variables in manifest and see if they can be replaced by the value in resolved_manifest
+ value = __find_variables(manifest[key], resolved_manifest)
+
+ # If value no longer has variables, and key-value pair to resolved_manifest and remove from unresolved-keys
+ if value.find('$') < 0:
+ resolved_manifest[key] = value
+ resolved_keys.add(key)
+
+ # remove resolved key-value pairs from set, and make sure at every iteration unresolved_keys shrinks to prevent
+ # infinite loops
+ n_unresolved = len(unresolved_keys)
+ unresolved_keys -= resolved_keys
+ if n_unresolved == len(unresolved_keys):
+ msg = "Unable to resolve manifest variables: {}".format(unresolved_keys)
+ raise Exception(msg)
+
+ return resolved_manifest
+
+
+def __recursive_insert(json_obj, manifest):
+ """Loop through the config and substitute the path variables (e.g.: $MY_DIR) with the values from the manifest
+
+ :param json_obj: A json dictionary object that may contain variables needing to be resolved.
+ :param manifest: A dictionary of variable values
+ :return: A new json dictionar config file with variables resolved
+ """
+ if isinstance(json_obj, string_types):
+ return __find_variables(json_obj, manifest)
+
+ elif isinstance(json_obj, list):
+ new_list = []
+ for itm in json_obj:
+ new_list.append(__recursive_insert(itm, manifest))
+ return new_list
+
+ elif isinstance(json_obj, dict):
+ for key, val in json_obj.items():
+ if key == 'manifest':
+ continue
+ json_obj[key] = __recursive_insert(val, manifest)
+
+ return json_obj
+
+ else:
+ return json_obj
+
+
+def __find_variables(json_str, manifest):
+ """Replaces variables (i.e. $VAR, ${VAR}) with their values from the manifest.
+
+ :param json_str: a json string that may contain none, one or multiple variable
+ :param manifest: dictionary of variable lookup values
+ :return: json_str with resolved variables. Won't resolve variables that don't exist in manifest.
+ """
+ variables = [m for m in re.finditer('\$\{?[\w]+\}?', json_str)]
+ for var in variables:
+ var_lookup = var.group()
+ if var_lookup.startswith('${') and var_lookup.endswith('}'):
+ # replace ${VAR} with $VAR
+ var_lookup = "$" + var_lookup[2:-1]
+ if var_lookup in manifest:
+ json_str = json_str.replace(var.group(), manifest[var_lookup])
+
+ return json_str
+
+
+class ConfigDict(dict):
+ def __init__(self, *args, **kwargs):
+ self.update(*args, **kwargs)
+ self._env_built = False
+ self._io = None
+
+ self._node_set = {}
+ self._load_node_set()
+
+ @property
+ def io(self):
+ if self._io is None:
+ self._io = io
+ return self._io
+
+ @io.setter
+ def io(self, io):
+ self._io = io
+
+ @property
+ def run(self):
+ return self['run']
+
+ @property
+ def tstart(self):
+ return self.run.get('tstart', 0.0)
+
+ @property
+ def tstop(self):
+ return self.run['tstop']
+
+ @property
+ def dt(self):
+ return self.run.get('dt', 0.1)
+
+ @property
+ def spike_threshold(self):
+ return self.run.get('spike_threshold', -15.0)
+
+ @property
+ def dL(self):
+ return self.run.get('dL', 20.0)
+
+ @property
+ def gid_mappings(self):
+ return self.get('gid_mapping_file', None)
+
+ @property
+ def block_step(self):
+ return self.run.get('nsteps_block', 5000)
+
+ @property
+ def calc_ecp(self):
+ return self.run.get('calc_ecp', False)
+
+ @property
+ def conditions(self):
+ return self['conditions']
+
+ @property
+ def celsius(self):
+ return self.conditions['celsius']
+
+ @property
+ def v_init(self):
+ return self.conditions['v_init']
+
+ @property
+ def path(self):
+ return self['config_path']
+
+ @property
+ def output(self):
+ return self['output']
+
+ @property
+ def output_dir(self):
+ return self.output['output_dir']
+
+ @property
+ def overwrite_output(self):
+ return self.output.get('overwrite_output_dir', False)
+
+ @property
+ def log_file(self):
+ return self.output['log_file']
+
+ @property
+ def components(self):
+ return self.get('components', {})
+
+ @property
+ def morphologies_dir(self):
+ return self.components['morphologies_dir']
+
+ @property
+ def synaptic_models_dir(self):
+ return self.components['synaptic_models_dir']
+
+ @property
+ def point_neuron_models_dir(self):
+ return self.components['point_neuron_models_dir']
+
+ @property
+ def mechanisms_dir(self):
+ return self.components['mechanisms_dir']
+
+ @property
+ def biophysical_neuron_models_dir(self):
+ return self.components['biophysical_neuron_models_dir']
+
+ @property
+ def templates_dir(self):
+ return self.components.get('templates_dir', None)
+
+ @property
+ def with_networks(self):
+ return 'networks' in self and len(self.nodes) > 0
+
+ @property
+ def networks(self):
+ return self['networks']
+
+ @property
+ def nodes(self):
+ return self.networks.get('nodes', [])
+
+ @property
+ def edges(self):
+ return self.networks.get('edges', [])
+
+ @property
+ def reports(self):
+ return self.get('reports', {})
+
+ @property
+ def inputs(self):
+ return self.get('inputs', {})
+
+ @property
+ def node_sets(self):
+ return self._node_set
+
+ @property
+ def spikes_file(self):
+ return os.path.join(self.output_dir, self.output['spikes_file'])
+
+ def _load_node_set(self):
+ if 'node_sets_file' in self.keys():
+ node_set_val = self['node_sets_file']
+ elif 'node_sets' in self.keys():
+ node_set_val = self['node_sets']
+ else:
+ self._node_set = {}
+ return
+
+ if isinstance(node_set_val, dict):
+ self._node_set = node_set_val
+ else:
+ try:
+ self._node_set = json.load(open(node_set_val, 'r'))
+ except Exception as e:
+ io.log_exception('Unable to load node_sets_file {}'.format(node_set_val))
+
+ def copy_to_output(self):
+ copy_config(self)
+
+ def get_modules(self, module_name):
+ return [report for report in self.reports.values() if report['module'] == module_name]
+
+ def _set_logging(self):
+ """Check if log-level and/or log-format string is being changed through the config"""
+ output_sec = self.output
+ if 'log_format' in output_sec:
+ self._io.set_log_format(output_sec['log_format'])
+
+ if 'log_level' in output_sec:
+ self._io.set_log_level(output_sec['log_level'])
+
+ if 'log_to_console' in output_sec:
+ self._io.log_to_console = output_sec['log_to_console']
+
+ if 'quiet_simulator' in output_sec and output_sec['quiet_simulator']:
+ self._io.quiet_simulator()
+
+ def build_env(self):
+ if self._env_built:
+ return
+
+ self._set_logging()
+ self.io.setup_output_dir(self.output_dir, self.log_file, self.overwrite_output)
+ self.copy_to_output()
+ self._env_built = True
+
+ @staticmethod
+ def get_validator():
+ raise NotImplementedError
+
+ @classmethod
+ def from_json(cls, config_file, validate=False):
+ validator = cls.get_validator() if validate else None
+ return cls(from_json(config_file, validator))
+
+ @classmethod
+ def from_dict(cls, config_dict, validate=False):
+ validator = cls.get_validator() if validate else None
+ return cls(from_dict(config_dict, validator))
+
+ @classmethod
+ def from_yaml(cls, config_file, validate=False):
+ raise NotImplementedError
+
+ @classmethod
+ def load(cls, config_file, validate=False):
+ # Implement factory method that can resolve the format/type of input configuration.
+ if isinstance(config_file, dict):
+ return cls.from_dict(config_file, validate)
+ elif isinstance(config_file, string_types):
+ if config_file.endswith('yml') or config_file.endswith('yaml'):
+ return cls.from_yaml(config_file, validate)
+ else:
+ return cls.from_json(config_file, validate)
+ else:
+ raise Exception
diff --git a/bmtk-vb/bmtk/simulator/utils/config.pyc b/bmtk-vb/bmtk/simulator/utils/config.pyc
new file mode 100644
index 0000000..ba09d6d
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/utils/config.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/utils/graph.py b/bmtk-vb/bmtk/simulator/utils/graph.py
new file mode 100644
index 0000000..629ea1d
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/graph.py
@@ -0,0 +1,408 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+import ast
+import numpy as np
+
+import config as cfg
+from property_maps import NodePropertyMap, EdgePropertyMap
+from bmtk.utils import sonata
+
+
+"""Creates a graph of nodes and edges from multiple network files for all simulators.
+
+Consists of edges and nodes. All classes are abstract and should be reimplemented by a specific simulator. Also
+contains base factor methods for building a network from a config file (or other).
+"""
+
+
+class SimEdge(object):
+ def __init__(self, original_params, dynamics_params):
+ self._orig_params = original_params
+ self._dynamics_params = dynamics_params
+ self._updated_params = {'dynamics_params': self._dynamics_params}
+
+ @property
+ def edge_type_id(self):
+ return self._orig_params['edge_type_id']
+
+ def __getitem__(self, item):
+ if item in self._updated_params:
+ return self._updated_params[item]
+ else:
+ return self._orig_params[item]
+
+
+class SimNode(object):
+ def __init__(self, node_id, graph, network, params):
+ self._node_id = node_id
+ self._graph = graph
+ self._graph_params = params
+ self._node_type_id = params['node_type_id']
+ self._network = network
+ self._updated_params = {}
+
+ self._model_params = {}
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def node_type_id(self):
+ return self._node_type_id
+
+ @property
+ def network(self):
+ """Name of network node belongs too."""
+ return self._network
+
+ @property
+ def model_params(self):
+ """Parameters (json file, nml, dictionary) that describe a specific node"""
+ return self._model_params
+
+ @model_params.setter
+ def model_params(self, value):
+ self._model_params = value
+
+ def __contains__(self, item):
+ return item in self._updated_params or item in self._graph_params
+
+ def __getitem__(self, item):
+ if item in self._updated_params:
+ return self._updated_params[item]
+ else:
+ return self._graph_params[item]
+
+
+class SimGraph(object):
+ model_type_col = 'model_type'
+
+ def __init__(self):
+ self._components = {} # components table, i.e. paths to model files.
+ self._io = None # TODO: create default io module (without mpi)
+
+ self._node_property_maps = {}
+ self._edge_property_maps = {}
+
+ self._node_populations = {}
+ self._internal_populations_map = {}
+ self._virtual_populations_map = {}
+
+ self._virtual_cells_nid = {}
+
+ self._recurrent_edges = {}
+ self._external_edges = {}
+
+ @property
+ def io(self):
+ return self._io
+
+ @property
+ def internal_pop_names(self):
+ return self
+
+ @property
+ def node_populations(self):
+ return list(self._node_populations.keys())
+
+ def get_component(self, key):
+ """Get the value of item in the components dictionary.
+
+ :param key: name of component
+ :return: value assigned to component
+ """
+ return self._components[key]
+
+ def add_component(self, key, value):
+ """Add a component key-value pair
+
+ :param key: name of component
+ :param value: value
+ """
+ self._components[key] = value
+
+ def _from_json(self, file_name):
+ return cfg.from_json(file_name)
+
+ def _validate_components(self):
+ """Make sure various components (i.e. paths) exists before attempting to build the graph."""
+ return True
+
+ def _create_nodes_prop_map(self, grp):
+ return NodePropertyMap()
+
+ def _create_edges_prop_map(self, grp):
+ return EdgePropertyMap()
+
+ def __avail_model_types(self, population):
+ model_types = set()
+ for grp in population.groups:
+ if self.model_type_col not in grp.all_columns:
+ self.io.log_exception('model_type is missing from nodes.')
+
+ model_types.update(set(np.unique(grp.get_values(self.model_type_col))))
+ return model_types
+
+ def _preprocess_node_types(self, node_population):
+ # TODO: The following figures out the actually used node-type-ids. For mem and speed may be better to just
+ # process them all
+ node_type_ids = node_population.type_ids
+ # TODO: Verify all the node_type_ids are in the table
+ node_types_table = node_population.types_table
+
+ # TODO: Convert model_type to a enum
+ morph_dir = self.get_component('morphologies_dir')
+ if morph_dir is not None and 'morphology' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ if node_type['morphology'] is None:
+ continue
+ # TODO: Check the file exits
+ # TODO: See if absolute path is stored in csv
+ node_type['morphology'] = os.path.join(morph_dir, node_type['morphology'])
+
+ if 'dynamics_params' in node_types_table.columns and 'model_type' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ dynamics_params = node_type['dynamics_params']
+ if isinstance(dynamics_params, dict):
+ continue
+
+ model_type = node_type['model_type']
+ if model_type == 'biophysical':
+ params_dir = self.get_component('biophysical_neuron_models_dir')
+ elif model_type == 'point_process':
+ params_dir = self.get_component('point_neuron_models_dir')
+ elif model_type == 'point_soma':
+ params_dir = self.get_component('point_neuron_models_dir')
+ else:
+ # Not sure what to do in this case, throw Exception?
+ params_dir = self.get_component('custom_neuron_models')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ node_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ self.io.log_exception('Could not find node dynamics_params file {}.'.format(params_path))
+
+ def _preprocess_edge_types(self, edge_pop):
+ edge_types_table = edge_pop.types_table
+ edge_type_ids = np.unique(edge_pop.type_ids)
+
+ for et_id in edge_type_ids:
+ if 'dynamics_params' in edge_types_table.columns:
+ edge_type = edge_types_table[et_id]
+ dynamics_params = edge_type['dynamics_params']
+ params_dir = self.get_component('synaptic_models_dir')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ edge_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ self.io.log_exception('Could not find edge dynamics_params file {}.'.format(params_path))
+
+ # Split target_sections
+ if 'target_sections' in edge_type:
+ trg_sec = edge_type['target_sections']
+ if trg_sec is not None:
+ try:
+ edge_type['target_sections'] = ast.literal_eval(trg_sec)
+ except Exception as exc:
+ self.io.log_warning('Unable to split target_sections list {}'.format(trg_sec))
+ edge_type['target_sections'] = None
+
+ # Split target distances
+ if 'distance_range' in edge_type:
+ dist_range = edge_type['distance_range']
+ if dist_range is not None:
+ try:
+ # TODO: Make the distance range has at most two values
+ edge_type['distance_range'] = json.loads(dist_range)
+ except Exception as e:
+ try:
+ edge_type['distance_range'] = [0.0, float(dist_range)]
+ except Exception as e:
+ self.io.log_warning('Unable to parse distance_range {}'.format(dist_range))
+ edge_type['distance_range'] = None
+
+ def external_edge_populations(self, src_pop, trg_pop):
+ return self._external_edges.get((src_pop, trg_pop), [])
+
+ def add_nodes(self, sonata_file, populations=None):
+ """Add nodes from a network to the graph.
+
+ :param sonata_file: A NodesFormat type object containing list of nodes.
+ :param populations: name/identifier of network. If none will attempt to retrieve from nodes object
+ """
+ nodes = sonata_file.nodes
+
+ selected_populations = nodes.population_names if populations is None else populations
+ for pop_name in selected_populations:
+ if pop_name not in nodes:
+ # when user wants to simulation only a few populations in the file
+ continue
+
+ if pop_name in self.node_populations:
+ # Make sure their aren't any collisions
+ self.io.log_exception('There are multiple node populations with name {}.'.format(pop_name))
+
+ node_pop = nodes[pop_name]
+ self._preprocess_node_types(node_pop)
+ self._node_populations[pop_name] = node_pop
+
+ # Segregate into virtual populations and non-virtual populations
+ model_types = self.__avail_model_types(node_pop)
+ if 'virtual' in model_types:
+ self._virtual_populations_map[pop_name] = node_pop
+ self._virtual_cells_nid[pop_name] = {}
+ model_types -= set(['virtual'])
+ if model_types:
+ # We'll allow a population to have virtual and non-virtual nodes but it is not ideal
+ self.io.log_warning('Node population {} contains both virtual and non-virtual nodes which can ' +
+ 'cause memory and build-time inefficency. Consider separating virtual nodes ' +
+ 'into their own population'.format(pop_name))
+
+ if model_types:
+ self._internal_populations_map[pop_name] = node_pop
+
+ self._node_property_maps[pop_name] = {grp.group_id: self._create_nodes_prop_map(grp)
+ for grp in node_pop.groups}
+
+ def build_nodes(self):
+ raise NotImplementedError
+
+ def build_recurrent_edges(self):
+ raise NotImplementedError
+
+ def add_edges(self, sonata_file, populations=None, source_pop=None, target_pop=None):
+ """
+
+ :param sonata_file:
+ :param populations:
+ :param source_pop:
+ :param target_pop:
+ :return:
+ """
+ edges = sonata_file.edges
+ selected_populations = edges.population_names if populations is None else populations
+
+ for pop_name in selected_populations:
+ if pop_name not in edges:
+ continue
+
+ edge_pop = edges[pop_name]
+ self._preprocess_edge_types(edge_pop)
+
+ # Check the source nodes exists
+ src_pop = source_pop if source_pop is not None else edge_pop.source_population
+ is_internal_src = src_pop in self._internal_populations_map.keys()
+ is_external_src = src_pop in self._virtual_populations_map.keys()
+
+ trg_pop = target_pop if target_pop is not None else edge_pop.target_population
+ is_internal_trg = trg_pop in self._internal_populations_map.keys()
+
+ if not is_internal_trg:
+ self.io.log_exception(('Node population {} does not exists (or consists of only virtual nodes). ' +
+ '{} edges cannot create connections.').format(trg_pop, pop_name))
+
+ if not (is_internal_src or is_external_src):
+ self.io.log_exception('Source node population {} not found. Please update {} edges'.format(src_pop,
+ pop_name))
+ if is_internal_src:
+ if trg_pop not in self._recurrent_edges:
+ self._recurrent_edges[trg_pop] = []
+ self._recurrent_edges[trg_pop].append(edge_pop)
+
+ if is_external_src:
+ if trg_pop not in self._external_edges:
+ self._external_edges[(src_pop, trg_pop)] = []
+ self._external_edges[(src_pop, trg_pop)].append(edge_pop)
+
+ self._edge_property_maps[pop_name] = {grp.group_id: self._create_edges_prop_map(grp)
+ for grp in edge_pop.groups}
+
+ @classmethod
+ def from_config(cls, conf, **properties):
+ """Generates a graph structure from a json config file or dictionary.
+
+ :param conf: name of json config file, or a dictionary with config parameters
+ :param properties: optional properties.
+ :return: A graph object of type cls
+ """
+ graph = cls(**properties)
+ if isinstance(conf, basestring):
+ config = graph._from_json(conf)
+ elif isinstance(conf, dict):
+ config = conf
+ else:
+ graph.io.log_exception('Could not convert {} (type "{}") to json.'.format(conf, type(conf)))
+
+ run_dict = config['run']
+ if 'spike_threshold' in run_dict:
+ # TODO: FIX, spike-thresholds should be set by simulation code, allow for diff. values based on node-group
+ graph.spike_threshold = run_dict['spike_threshold']
+ if 'dL' in run_dict:
+ graph.dL = run_dict['dL']
+
+ if not config.with_networks:
+ graph.io.log_exception('Could not find any network files. Unable to build network.')
+
+ # load components
+ for name, value in config.components.items():
+ graph.add_component(name, value)
+ graph._validate_components()
+
+ # load nodes
+ for node_dict in config.nodes:
+ nodes_net = sonata.File(data_files=node_dict['nodes_file'], data_type_files=node_dict['node_types_file'])
+ graph.add_nodes(nodes_net)
+
+ # load edges
+ for edge_dict in config.edges:
+ target_network = edge_dict['target'] if 'target' in edge_dict else None
+ source_network = edge_dict['source'] if 'source' in edge_dict else None
+ edge_net = sonata.File(data_files=edge_dict['edges_file'], data_type_files=edge_dict['edge_types_file'])
+ graph.add_edges(edge_net, source_pop=target_network, target_pop=source_network)
+
+ '''
+ graph.io.log_info('Building cells.')
+ graph.build_nodes()
+
+ graph.io.log_info('Building recurrent connections')
+ graph.build_recurrent_edges()
+ '''
+
+ return graph
diff --git a/bmtk-vb/bmtk/simulator/utils/io.py b/bmtk-vb/bmtk/simulator/utils/io.py
new file mode 100644
index 0000000..b6e5e5c
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/io.py
@@ -0,0 +1,54 @@
+import os
+import shutil
+import logging
+
+
+class IOUtils(object):
+ def __init__(self):
+ self.mpi_rank = 0
+ self.mpi_size = 1
+
+ self._log_format = '%(asctime)s [%(levelname)s] %(message)s'
+ self._logger = logging.getLogger()
+ self.set_console_logging()
+
+ @property
+ def logger(self):
+ return None
+
+ def set_console_logging(self):
+ pass
+
+ def barrier(self):
+ pass
+
+ def quit(self):
+ exit(1)
+
+ def setup_output_dir(self, config_dir, log_file, overwrite=True):
+ if self.mpi_rank == 0:
+ # Create output directory
+ if os.path.exists(config_dir):
+ if overwrite:
+ shutil.rmtree(config_dir)
+ else:
+ self.log_exception('ERROR: Directory already exists (remove or set to overwrite).')
+ os.makedirs(config_dir)
+
+ # Create log file
+ if log_file is not None:
+ file_logger = logging.FileHandler(log_file)
+ file_logger.setFormatter(self._log_format)
+ self.logger.addHandler(file_logger)
+ self.log_info('Created log file')
+
+ self.barrier()
+
+ def log_info(self, message, all_ranks=False):
+ print(message)
+
+ def log_warning(self, message, all_ranks=False):
+ print(message)
+
+ def log_exception(self, message):
+ raise Exception(message)
diff --git a/bmtk-vb/bmtk/simulator/utils/load_spikes.py b/bmtk-vb/bmtk/simulator/utils/load_spikes.py
new file mode 100644
index 0000000..8c16caf
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/load_spikes.py
@@ -0,0 +1,91 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import h5py
+import numpy as np
+import os
+import datetime
+
+
+def load_spikes_ascii(file_name):
+ '''
+ Load ascii spike file
+ '''
+ t = os.path.getmtime(file_name)
+ print(file_name, "modified on:", datetime.datetime.fromtimestamp(t))
+ spk_ts,spk_gids = np.loadtxt(file_name,
+ dtype='float32,int',
+ unpack=True)
+
+ spk_ts=spk_ts*1E-3
+
+ print('loaded spikes from ascii')
+
+ return [spk_ts,spk_gids]
+
+
+def load_spikes_h5(file_name):
+ '''
+ Load ascii spike file
+ '''
+
+ t = os.path.getmtime(file_name)
+ print(file_name, "modified on:", datetime.datetime.fromtimestamp(t))
+
+ with h5py.File(file_name,'r') as h5:
+
+ spk_ts=h5["time"][...]*1E-3
+ spk_gids=h5["gid"][...]
+
+
+ print('loaded spikes from hdf5')
+
+ return [spk_ts,spk_gids]
+
+
+def load_spikes_nwb(file_name,trial_name):
+
+ '''
+ Load spikes from the nwb file
+
+ Returns:
+ -------
+
+ spike_times: list
+ spike_gids: list
+ '''
+ f5 = h5py.File(file_name, 'r')
+
+
+ spike_trains_handle = f5['processing/%s/spike_train' % trial_name] # nwb.SpikeTrain.get_processing(f5,'trial_0')
+
+ spike_times = []
+ spike_gids = []
+
+ for gid in spike_trains_handle.keys():
+
+ times_gid = spike_trains_handle['%d/data' %int(gid)][:]
+ spike_times.extend(times_gid)
+ spike_gids.extend([int(gid)]*len(times_gid))
+
+ return [np.array(spike_times)*1E-3,np.array(spike_gids)]
+
diff --git a/bmtk-vb/bmtk/simulator/utils/nwb.py b/bmtk-vb/bmtk/simulator/utils/nwb.py
new file mode 100644
index 0000000..4d18d16
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/nwb.py
@@ -0,0 +1,530 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import copy
+import numpy as np
+import os
+import h5py
+import time
+import uuid
+import tempfile
+from bmtk.analyzer.visualization.widgets import PlotWidget, MovieWidget
+
+__version__ = '0.1.0'
+
+allowed_dimensions = {'firing_rate': ('hertz',),
+ 'time': ('second', 'millisecond'),
+ 'brightness': ('intensity',),
+ 'distance': ('pixel',),
+ 'index': ('gid',),
+ 'intensity': ('bit',None),
+ 'voltage': ('volt',),
+ 'current': ('ampere',),
+ None: (None,),
+ 'dev': ('dev',)}
+
+allowed_groups = {'firing_rate': ('firing_rate',),
+ 'spike_train': ('index', 'time'),
+ 'grayscale_movie': ('intensity',),
+ 'time_series': ('voltage', 'current'),
+ 'dev': ('dev',)}
+
+top_level_data = ['file_create_date',
+ 'stimulus',
+ 'acquisition',
+ 'analysis',
+ 'processing',
+ 'epochs',
+ 'general',
+ 'session_description',
+ 'nwb_version',
+ 'identifier']
+
+
+def open_file(file_name):
+ return h5py.File(file_name)
+
+
+class Scale(object):
+ def __init__(self, scale_range, dimension, unit):
+ assert dimension in allowed_dimensions
+ assert unit in allowed_dimensions[dimension]
+
+ self.scale_range = scale_range
+ self.dimension = dimension
+ self.unit = unit
+ self._hdf5_location = None
+
+ def __eq__(self, other):
+ d = self.dimension == other.dimension
+ u = self.unit == other.unit
+ s = np.allclose(self.scale_range, other.scale_range)
+ return d and u and s
+
+ @ property
+ def data(self):
+ return self.scale_range
+
+
+class DtScale(object):
+ def __init__(self, dt, dimension, unit):
+ assert dimension in allowed_dimensions
+ assert unit in allowed_dimensions[dimension]
+
+ self.dt = dt
+ self.dimension = dimension
+ self.unit = unit
+ self._hdf5_location = None
+
+ def __eq__(self, other):
+ d = self.dimension == other.dimension
+ u = self.unit == other.unit
+ s = np.allclose(self.scale_range, other.scale_range)
+ return d and u and s
+
+ @ property
+ def data(self):
+ return self.dt
+
+
+class NullScale(object):
+
+ def __init__(self):
+ self._hdf5_location = None
+ self.data = None
+ self.dimension = None
+ self.unit = None
+
+
+class Data(object):
+ def __init__(self, data, dimension, unit, scales, metadata):
+ assert dimension in allowed_dimensions
+ assert unit in allowed_dimensions[dimension]
+ if isinstance(scales, (Scale, DtScale)):
+ assert len(data.shape) == 1
+ scales = (scales,)
+
+ for key in metadata.iterkeys():
+ assert isinstance(key, (str, unicode))
+ for ii, scale in enumerate(scales):
+ if isinstance(scale, Scale):
+ assert len(scale.scale_range) == data.shape[ii]
+ elif isinstance(scale, DtScale):
+ assert isinstance(scale.dt, (float, np.float)) and scale.dt > 0
+ else:
+ raise Exception
+
+ if len(scales) == 0:
+ scales = [NullScale()]
+
+ metadata = copy.copy(metadata)
+ self.data = data
+ self.scales = scales
+ self.dimension = dimension
+ self.unit = unit
+ self.metadata = metadata
+ self._hdf5_location = None
+
+ def __eq__(self, other):
+ da = np.allclose(self.data, other.data)
+ d = self.dimension == other.dimension
+ u = self.unit == other.unit
+ s = [s1 == s2 for s1, s2 in zip(self.scales, other.scales)].count(True) == len(self.scales)
+ if len(self.metadata) != len(other.metadata):
+ m = False
+ else:
+ try:
+ sum = 0
+ for key in self.metadata.keys():
+ sum += other.metadata[key] == self.metadata[key]
+ assert sum == len(self.metadata)
+ m = True
+ except:
+ m = False
+ return da and d and u and s and m
+
+ @staticmethod
+ def _get_from_group(object_class, parent_group, group_name, ii=0):
+
+ data_group = parent_group['%s/%s' % (group_name, ii)]
+ data, scales, dimension, unit, metadata = _get_data(data_group)
+
+ assert dimension in allowed_groups[object_class.group]
+
+ if unit == "None":
+ unit = None
+ scale_list = []
+ for scale in scales:
+ if scale.attrs['type'] == 'Scale':
+ curr_scale = Scale(scale, scale.attrs['dimension'], scale.attrs['unit'])
+ elif scale.attrs['type'] == 'DtScale':
+ curr_scale = DtScale(float(scale.value), scale.attrs['dimension'], scale.attrs['unit'])
+ elif scale.attrs['type'] == 'NullScale':
+ curr_scale = None
+ else:
+ raise Exception
+ if curr_scale is not None:
+ scale_list.append(curr_scale)
+
+ if len(scale_list) == 1:
+ scale_list = scale_list[0]
+
+ return object_class(data, dimension=dimension, unit=unit, scale=scale_list, metadata=metadata)
+
+ def add_to_stimulus(self, f, compression='gzip', compression_opts=4):
+ self._add_to_group(f, 'stimulus', self.__class__.group, compression=compression,
+ compression_opts=compression_opts)
+
+ @classmethod
+ def get_stimulus(cls, f, ii=None):
+ if ii is None:
+ return_data = [cls.get_stimulus(f, ii) for ii in range(len(f['stimulus/%s' % cls.group]))]
+ if len(return_data) == 1:
+ return_data = return_data[0]
+ return return_data
+ else:
+ return Data._get_from_group(cls, f['stimulus'], cls.group, ii=ii)
+
+ def add_to_acquisition(self, f, compression='gzip', compression_opts=4):
+ self._add_to_group(f, 'acquisition', self.__class__.group, compression=compression,
+ compression_opts=compression_opts)
+
+ @classmethod
+ def get_acquisition(cls, f, ii=None):
+ if ii is None:
+ return_data = [cls.get_acquisition(f, ii) for ii in range(len(f['acquisition/%s' % cls.group]))]
+ if len(return_data) == 1:
+ return_data = return_data[0]
+ return return_data
+
+ else:
+ return Data._get_from_group(cls, f['acquisition'], cls.group, ii=ii)
+
+ def add_to_processing(self, f, processing_submodule_name):
+ if processing_submodule_name not in f['processing']:
+ f['processing'].create_group(processing_submodule_name)
+ return self._add_to_group(f, 'processing/%s' % processing_submodule_name, self.__class__.group)
+
+ @classmethod
+ def get_processing(cls, f, subgroup_name, ii=None):
+ if ii is None:
+ return_data = {}
+ for ii in range(len(f['processing/%s/%s' % (subgroup_name, cls.group)])):
+ return_data[ii] = cls.get_processing(f, subgroup_name, ii)
+ return return_data
+
+ else:
+ return Data._get_from_group(cls, f['processing/%s' % subgroup_name], cls.group, ii=ii)
+
+ def add_to_analysis(self, f, analysis_submodule_name):
+ if analysis_submodule_name not in f['analysis']:
+ f['analysis'].create_group(analysis_submodule_name)
+ return self._add_to_group(f, 'analysis/%s' % analysis_submodule_name, self.__class__.group)
+
+ @classmethod
+ def get_analysis(cls, f, subgroup_name, ii=None):
+ if ii is None:
+ return [cls.get_analysis(f, ii, subgroup_name)
+ for ii in range(len(f['analysis/%s/%s' % (subgroup_name, cls.group)]))]
+ else:
+ return Data._get_from_group(cls, f['analysis/%s' % subgroup_name], cls.group, ii=ii)
+
+ def _add_to_group(self, f, parent_name, group_name, compression='gzip', compression_opts=4):
+ assert group_name in allowed_groups
+ assert self.dimension in allowed_groups[group_name]
+ try:
+ parent_group = f[parent_name]
+ except ValueError:
+ try:
+ file_name = f.filename
+ raise Exception('Parent group:%s not found in file %s' % parent_name, file_name)
+ except ValueError:
+ raise Exception('File not valid: %s' % f)
+
+ if self.__class__.group in parent_group:
+ subgroup = parent_group[self.__class__.group]
+ int_group_name = str(len(subgroup))
+ else:
+ subgroup = parent_group.create_group(self.__class__.group)
+ int_group_name = '0'
+
+ # Create external link:
+ if isinstance(self.data, h5py.Dataset):
+ if subgroup.file == self.data.file:
+ raise NotImplementedError
+ else:
+ return _set_data_external_link(subgroup, int_group_name, self.data.parent)
+ else:
+ dataset_group = subgroup.create_group(int_group_name)
+
+ # All this to allow do shared scale management:
+ scale_group = None
+ scale_list = []
+ for ii, scale in enumerate(self.scales):
+ if isinstance(scale, (Scale, DtScale, NullScale)):
+ if scale._hdf5_location is None:
+ if scale_group is None:
+ scale_group = dataset_group.create_group('scale')
+ curr_scale = _set_scale(scale_group, 'dimension_%s' % ii, scale.data, scale.dimension,
+ scale.unit, scale.__class__.__name__)
+ scale._hdf5_location = curr_scale
+ else:
+ curr_scale = _set_scale(scale_group, 'dimension_%s' % ii, scale.data, scale.dimension,
+ scale.unit, scale.__class__.__name__)
+ scale._hdf5_location = curr_scale
+ else:
+ curr_scale = scale._hdf5_location
+ elif isinstance(scale, h5py.Dataset):
+ curr_scale = scale
+ else:
+ raise Exception
+
+ scale_list.append(curr_scale)
+
+ _set_data(subgroup, dataset_group.name, self.data, scale_list, self.dimension, self.unit,
+ metadata=self.metadata, compression=compression, compression_opts=compression_opts)
+
+
+class FiringRate(Data):
+ group = 'firing_rate'
+
+ def __init__(self, data, **kwargs):
+ dimension = 'firing_rate'
+ unit = 'hertz'
+ scale = kwargs.get('scale')
+ metadata = kwargs.get('metadata', {})
+ assert isinstance(scale, (Scale, DtScale))
+ super(FiringRate, self).__init__(data, dimension, unit, scale, metadata)
+
+ def get_widget(self, **kwargs):
+ rate_data = self.data[:]
+ t_range = self.scales[0].data[:]
+ return PlotWidget(t_range, rate_data, metadata=self.metadata, **kwargs)
+
+
+class Dev(Data):
+ group = 'dev'
+
+ def __init__(self, data, **kwargs):
+ dimension = kwargs.get('dimension')
+ unit = kwargs.get('unit')
+ scale = kwargs.get('scale')
+ metadata = kwargs.get('metadata', {})
+
+ super(Dev, self).__init__(data, dimension, unit, scale, metadata)
+
+
+class TimeSeries(Data):
+ group = 'time_series'
+
+ def __init__(self, data, **kwargs):
+ dimension = kwargs.get('dimension')
+ unit = kwargs.get('unit')
+ scale = kwargs.get('scale')
+ metadata = kwargs.get('metadata', {})
+
+ assert isinstance(scale, (Scale, DtScale))
+ assert scale.dimension == 'time'
+ super(TimeSeries, self).__init__(data, dimension, unit, scale, metadata)
+
+
+class SpikeTrain(Data):
+ group = 'spike_train'
+
+ def __init__(self, data, **kwargs):
+ scales = kwargs.get('scale',[])
+ unit = kwargs.get('unit', 'gid')
+ metadata = kwargs.get('metadata',{})
+
+ if isinstance(scales, Scale):
+ super(SpikeTrain, self).__init__(data, 'index', unit, scales, metadata)
+ elif len(scales) == 0:
+ assert unit in allowed_dimensions['time']
+ scales = []
+ super(SpikeTrain, self).__init__(data, 'time', unit, scales, metadata)
+ else:
+ assert len(scales) == 1 and isinstance(scales[0], Scale)
+ super(SpikeTrain, self).__init__(data, 'index', unit, scales, metadata)
+
+
+class GrayScaleMovie(Data):
+ group = 'grayscale_movie'
+
+ def __init__(self, data, **kwargs):
+ dimension = 'intensity'
+ unit = kwargs.get('unit', None)
+ scale = kwargs.get('scale')
+ metadata = kwargs.get('metadata', {})
+
+ super(GrayScaleMovie, self).__init__(data, dimension, unit, scale, metadata)
+
+ def get_widget(self, ax=None):
+ data = self.data[:]
+ t_range = self.scales[0].data[:]
+ return MovieWidget(t_range=t_range, data=data, ax=ax, metadata=self.metadata)
+
+
+def get_temp_file_name():
+ f = tempfile.NamedTemporaryFile(delete=False)
+ temp_file_name = f.name
+ f.close()
+ os.remove(f.name)
+ return temp_file_name
+
+
+def create_blank_file(save_file_name=None, force=False, session_description='', close=False):
+
+ if save_file_name is None:
+ save_file_name = get_temp_file_name()
+
+ if not force:
+ f = h5py.File(save_file_name, 'w-')
+ else:
+ if os.path.exists(save_file_name):
+ os.remove(save_file_name)
+ f = h5py.File(save_file_name, 'w')
+
+ f.create_group('acquisition')
+ f.create_group('analysis')
+ f.create_group('epochs')
+ f.create_group('general')
+ f.create_group('processing')
+ f.create_group('stimulus')
+
+ f.create_dataset("file_create_date", data=np.string_(time.ctime()))
+ f.create_dataset("session_description", data=session_description)
+ f.create_dataset("nwb_version", data='iSee_%s' % __version__)
+ f.create_dataset("identifier", data=str(uuid.uuid4()))
+
+ if close:
+ f.close()
+ else:
+ return f
+
+
+def assert_subgroup_exists(child_name, parent):
+ try:
+ assert child_name in parent
+ except:
+ raise RuntimeError('Group: %s has no subgroup %s' % (parent.name, child_name))
+
+
+def _set_data_external_link(parent_group, dataset_name, data):
+ parent_group[dataset_name] = h5py.ExternalLink(data.file.filename, data.name)
+
+
+def _set_scale_external_link(parent_group, name, scale):
+ print(parent_group, name, scale)
+ print(scale.file.filename, scale.name)
+ parent_group[name] = h5py.ExternalLink(scale.file.filename, scale.name)
+ return parent_group[name]
+
+
+def _set_data(parent_group, dataset_name, data, scales, dimension, unit, force=False, metadata={}, compression='gzip',
+ compression_opts=4):
+ # Check inputs:
+ if isinstance(scales, h5py.Dataset):
+ scales = (scales,)
+ else:
+ assert isinstance(scales, (list, tuple))
+
+ assert data.ndim == len(scales)
+ assert dimension in allowed_dimensions
+ assert unit in allowed_dimensions[dimension]
+ for ii, scale in enumerate(scales):
+ assert len(scale.shape) in (0, 1)
+ check_dimension = str(scale.attrs['dimension'])
+ if check_dimension == 'None':
+ check_dimension = None
+ check_unit = scale.attrs['unit']
+ if check_unit == 'None':
+ check_unit = None
+ assert check_dimension in allowed_dimensions
+ assert check_unit in allowed_dimensions[check_dimension]
+ if len(scale.shape) == 1:
+ assert len(scale) == data.shape[ii] or len(scale) == 0
+
+ if dataset_name not in parent_group:
+ dataset_group = parent_group.create_group(dataset_name)
+ else:
+ dataset_group = parent_group[dataset_name]
+
+ for key, val in metadata.iteritems():
+ assert key not in dataset_group.attrs
+ dataset_group.attrs[key] = val
+
+ if 'data' in dataset_group:
+ if not force:
+ raise IOError('Field "stimulus" of %s is not empty; override with force=True' % parent_group.name)
+ else:
+ del dataset_group['data']
+
+ dataset = dataset_group.create_dataset(name='data', data=data, compression=compression,
+ compression_opts=compression_opts)
+
+ for ii, scale in enumerate(scales):
+ dataset.dims[ii].label = scale.attrs['dimension']
+ dataset.dims[ii].attach_scale(scale)
+
+ dataset.attrs.create('dimension', str(dimension))
+ dataset.attrs.create('unit', str(unit))
+
+ return dataset
+
+
+def _set_scale(parent_group, name, scale, dimension, unit, scale_class_name):
+ assert dimension in allowed_dimensions
+ assert unit in allowed_dimensions[dimension]
+
+ if scale is None:
+ scale = parent_group.create_dataset(name=name, shape=(0,))
+ else:
+ scale = np.array(scale)
+ assert scale.ndim in (0, 1)
+ scale = parent_group.create_dataset(name=name, data=scale)
+ scale.attrs['dimension'] = str(dimension)
+ scale.attrs['unit'] = str(unit)
+ scale.attrs['type'] = scale_class_name
+
+ return scale
+
+
+def _get_data(dataset_group):
+ data = dataset_group['data']
+ dimension = dataset_group['data'].attrs['dimension']
+ unit = dataset_group['data'].attrs['unit']
+ scales = tuple([dim[0] for dim in dataset_group['data'].dims])
+ metadata = dict(dataset_group.attrs)
+
+ return data, scales, dimension, unit, metadata
+
+
+def get_stimulus(f):
+ category = 'stimulus'
+ for parent_group in f[category]:
+ for data_group in f[category][parent_group]:
+ print(f[category][parent_group][data_group])
+
+
+def add_external_links(parent_group, external_file_name, external_group_name_list=top_level_data):
+ for subgroup in external_group_name_list:
+ parent_group[subgroup] = h5py.ExternalLink(external_file_name, subgroup)
diff --git a/bmtk-vb/bmtk/simulator/utils/property_maps.py b/bmtk-vb/bmtk/simulator/utils/property_maps.py
new file mode 100644
index 0000000..9a22515
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/property_maps.py
@@ -0,0 +1,7 @@
+class NodePropertyMap(object):
+ pass
+
+
+class EdgePropertyMap(object):
+ pass
+
diff --git a/bmtk-vb/bmtk/simulator/utils/scripts/convert_filters.py b/bmtk-vb/bmtk/simulator/utils/scripts/convert_filters.py
new file mode 100644
index 0000000..298c101
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/scripts/convert_filters.py
@@ -0,0 +1,71 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import numpy as np
+from bmtk.simulator.utils import nwb
+import pickle
+import re
+
+pickle_regex = re.compile('.*\.pkl')
+
+def convert_filters(src_dir, tgt_dir):
+
+ for file_name in os.listdir(src_dir):
+ if not pickle_regex.match(file_name) is None:
+
+ print 'Converting: %s' % file_name
+
+ full_path_to_src_file = os.path.join(src_dir, file_name)
+ full_path_to_tgt_file = os.path.join(tgt_dir, file_name).replace('.pkl', '.nwb')
+
+ try:
+ f = nwb.NWB(file_name=full_path_to_tgt_file,
+ identifier='iSee example filter dataset',
+ description='Convering an example inhomogenous Poisson rate collection from a filter to drive simulations')
+
+ # Load data from file:
+ data = pickle.load(open(full_path_to_src_file, 'r'))
+ timestamps = data['t']
+
+ # Load first cell into file:
+ ts0 = f.create_timeseries('TimeSeries', "Cell_0", "acquisition")
+ ts0.set_data(data['cells'][0], unit='Hz', resolution=float('nan'), conversion=1.)
+ ts0.set_time_by_rate(0.,1000.)
+ ts0.set_value('num_samples', len(timestamps))
+ ts0.finalize()
+
+ # Load remaining cells into file, linking timestamps:
+ for ii in np.arange(1,len(data['cells'])):
+ ts = f.create_timeseries('TimeSeries', "Cell_%s" % ii, "acquisition")
+ ts.set_data(data['cells'][ii], unit='Hz', resolution=float('nan'), conversion=1.)
+ ts.set_time_by_rate(0.,1000.)
+ ts.set_value('num_samples', len(timestamps))
+ ts.finalize()
+
+ # Close out:
+ f.close()
+
+ except:
+ print ' Conversion failed: %s' % file_name
+
+
diff --git a/bmtk-vb/bmtk/simulator/utils/sim_validator.py b/bmtk-vb/bmtk/simulator/utils/sim_validator.py
new file mode 100644
index 0000000..447dda1
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/sim_validator.py
@@ -0,0 +1,126 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+from jsonschema import Draft4Validator
+from jsonschema.exceptions import ValidationError
+import pandas as pd
+
+
+class SimConfigValidator(Draft4Validator):
+ """
+ A JSON Schema validator class that will store a schema (passed into the constructor) and validate a json file.
+ It has all the functionality of the JSONSchema format, plus includes special types and parameters like making
+ sure a value is a file or directory type, checking csv files, etc.
+
+ To Use:
+ validator = SimConfigValidator(json_schema.json)
+ validator.validate(file.json)
+ """
+
+ def __init__(self, schema, types=(), resolver=None, format_checker=None, file_formats=()):
+ super(SimConfigValidator, self).__init__(schema, types, resolver, format_checker)
+
+ # custom parameter
+ self.VALIDATORS["exists"] = self._check_path
+
+ self._file_formats = {} # the "file_format" property the validity of a (non-json) file.
+ for (name, schema) in file_formats:
+ self._file_formats[name] = self._parse_file_formats(schema)
+ self.VALIDATORS["file_format"] = self._validate_file
+
+ def is_type(self, instance, dtype):
+ # override type since checking for file and directory type is potentially more complicated.
+ if dtype == "directory":
+ return self._is_directory_type(instance)
+
+ elif dtype == "file":
+ return self._is_file_type(instance)
+
+ else:
+ return super(SimConfigValidator, self).is_type(instance, dtype)
+
+ def _is_directory_type(self, instance):
+ """Check if instance value is a valid directory file path name
+
+ :param instance: string that represents a directory path
+ :return: True if instance is a valid dir path (even if it doesn't exists).
+ """
+ # Always return true for now, rely on the "exists" property (_check_path) to actual determine if file exists.
+ # TODO: check that instance string is a valid path string, even if path doesn't yet exists.
+ return True
+
+ def _is_file_type(self, instance):
+ """Check if instance value is a valid file path.
+
+ :param instance: string of file path
+ :return: True if instance is a valid file path (but doesn't necessary exists), false otherwise.
+ """
+ # Same issue as with _is_directory_type
+ return True
+
+ def _parse_file_formats(self, schema_file):
+ # Open the schema file and based on "file_type" property create a Format validator
+ schema = json.load(open(schema_file, 'r'))
+ if schema['file_type'] == 'csv':
+ return self._CSVFormat(schema)
+ else:
+ return Exception("No format found")
+
+ @staticmethod
+ def _check_path(validator, schema_bool, path, schema):
+ """Makes sure a file/directory exists or doesn't based on the "exists" property in the schema
+
+ :param validator:
+ :param schema_bool: True means file must exists, False means file should not exists
+ :param path: path of the file
+ :param schema:
+ :return: True if schema is satisfied.
+ """
+ assert(schema['type'] == 'directory' or schema['type'] == 'file')
+ path_exists = os.path.exists(path)
+ if path_exists != schema_bool:
+ raise ValidationError("{} {} exists.".format(path, "already" if path_exists else "does not"))
+
+ def _validate_file(self, validator, file_format, file_path, schema):
+ file_validator = self._file_formats.get(file_format, None)
+ if file_validator is None:
+ raise ValidationError("Could not find file validator {}".format(file_format))
+
+ if not file_validator.check(file_path):
+ raise ValidationError("File {} could not be validated against {}.".format(file_path, file_format))
+
+ # A series of validators for indivdiual types of files. All of them should have a check(file) function that returns
+ # true only when it is formated correctly.
+ class _CSVFormat(object):
+ def __init__(self, schema):
+ self._properties = schema['file_properties']
+ self._required_columns = [header for header, props in schema['columns'].items() if props['required']]
+
+ def check(self, file_name):
+ csv_headers = set(pd.read_csv(file_name, nrows=0, **self._properties).columns)
+ for col in self._required_columns:
+ if col not in csv_headers:
+ return False
+
+ return True
diff --git a/bmtk-vb/bmtk/simulator/utils/sim_validator.pyc b/bmtk-vb/bmtk/simulator/utils/sim_validator.pyc
new file mode 100644
index 0000000..3a844c8
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/utils/sim_validator.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/utils/simulation_inputs.py b/bmtk-vb/bmtk/simulator/utils/simulation_inputs.py
new file mode 100644
index 0000000..bdd1588
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/simulation_inputs.py
@@ -0,0 +1,77 @@
+
+class SimInput(object):
+ registry = {} # For factory function
+
+ def __init__(self, input_name, input_type, module, params):
+ self.name = input_name
+ self.input_type = input_type
+ self.module = module
+ self.params = params.copy()
+
+ # Remove the 'module' and 'input_type' from the params since user should access it through the variable
+ for param_key in ['module', 'input_type']:
+ if param_key in self.params:
+ del self.params[param_key]
+
+ # Special variable, not a part of standard but still want for ease of testing
+ if 'enabled' in params:
+ self.enabled = params['enabled']
+ del params['enabled']
+ else:
+ self.enabled = True
+
+ # Fill in missing values with default (as specified by the subclass)
+ for var_name, default_val in self._get_defaults():
+ if var_name not in self.params:
+ self.params[var_name] = default_val
+
+ # Check there are no missing parameters
+
+ @property
+ def node_set(self):
+ return self.params.get('node_set', None)
+
+ def _get_defaults(self):
+ return []
+
+ @classmethod
+ def build(cls, input_name, params):
+ params = params.copy()
+ if 'module' not in params:
+ raise Exception('inputs setting {} does not specify the "module".'.format(input_name))
+
+ if 'input_type' not in params:
+ raise Exception('inputs setting {} does not specify the "input_type".'.format(input_name))
+
+ module_name = params['module']
+ input_type = params['input_type']
+ module_cls = SimInput.registry.get(module_name, SimInput)
+
+ return module_cls(input_name, input_type, module_name, params)
+
+ @classmethod
+ def register_module(cls, subclass):
+ # For factory, register subclass based on the module name(s)
+ assert(issubclass(subclass, cls))
+ mod_registry = cls.registry
+ mod_list = subclass.avail_modules()
+ modules = mod_list if isinstance(mod_list, list) else [mod_list]
+ for mod_name in modules:
+ if mod_name in mod_registry:
+ raise Exception('Multiple modules named {}'.format(mod_name))
+ mod_registry[mod_name] = subclass
+
+ return subclass
+
+
+def from_config(cfg):
+ inputs_list = []
+ for input_name, input_params in cfg.inputs.items():
+ input_setting = SimInput.build(input_name, input_params)
+ if input_setting.enabled:
+ inputs_list.append(input_setting)
+
+ return inputs_list
+
+
+
diff --git a/bmtk-vb/bmtk/simulator/utils/simulation_inputs.pyc b/bmtk-vb/bmtk/simulator/utils/simulation_inputs.pyc
new file mode 100644
index 0000000..66568ab
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/utils/simulation_inputs.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/utils/simulation_reports.py b/bmtk-vb/bmtk/simulator/utils/simulation_reports.py
new file mode 100644
index 0000000..a7bffd9
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/simulation_reports.py
@@ -0,0 +1,284 @@
+import os
+
+
+class SimReport(object):
+ default_dir = '.'
+ registry = {} # Used by factory to keep track of subclasses
+
+ def __init__(self, name, module, params):
+ self.report_name = name
+ self.module = module
+ self.params = params
+
+ # Not part of standard, just want a quick way to turn off modules
+ if 'enabled' in params:
+ self.enabled = params['enabled']
+ del params['enabled']
+ else:
+ self.enabled = True
+
+ # Set default parameter values (when not explicity stated). Should occur on a module-by-module basis
+ self._set_defaults()
+
+ @property
+ def node_set(self):
+ return self.params.get('cells', 'all')
+
+ def _set_defaults(self):
+ for var_name, default_val in self._get_defaults():
+ if var_name not in self.params:
+ self.params[var_name] = default_val
+
+ def _get_defaults(self):
+ """Should be overwritten by subclass with list of (var_name, default_val) tuples."""
+ return []
+
+ @staticmethod
+ def avail_modules():
+ # Return a string (or list of strings) to identify module name for each subclass
+ raise NotImplementedError
+
+ @classmethod
+ def build(cls, report_name, params):
+ """Factory method to get the module subclass, using the params (particularlly the 'module' value, which is
+ required). If there is no registered subclass a generic SimReport object will be returned
+
+ :param report_name: name of report
+ :param params: parameters of report
+ :return: A SimReport (or subclass) object with report parameters parsed out.
+ """
+ params = params.copy()
+ if 'module' not in params:
+ raise Exception('report {} does not specify the module.'.format(report_name))
+
+ module_name = params['module']
+ del params['module']
+ module_cls = SimReport.registry.get(module_name, SimReport)
+ return module_cls(report_name, module_name, params)
+
+ @classmethod
+ def register_module(cls, subclass):
+ # For factory, register subclass based on the module name(s)
+ assert(issubclass(subclass, cls))
+ mod_registry = cls.registry
+ mod_list = subclass.avail_modules()
+ modules = mod_list if isinstance(mod_list, list) else [mod_list]
+ for mod_name in modules:
+ if mod_name in mod_registry:
+ raise Exception('Multiple modules named {}'.format(mod_name))
+ mod_registry[mod_name] = subclass
+
+ return subclass
+
+
+@SimReport.register_module
+class MembraneReport(SimReport, object):
+ def __init__(self, report_name, module, params):
+ super(MembraneReport, self).__init__(report_name, module, params)
+ # Want variable_name option to allow for singular of list of params
+ variables = params['variable_name']
+ if isinstance(variables, list):
+ self.params['variable_name'] = variables
+ else:
+ self.params['variable_name'] = [variables]
+ self.variables = self.params['variable_name']
+
+ self.params['buffer_data'] = self.params.pop('buffer')
+
+ if self.params['transform'] and not isinstance(self.params['transform'], dict):
+ self.params['transform'] = {var_name: self.params['transform'] for var_name in self.variables}
+
+ def _get_defaults(self):
+ # directory for saving temporary files created during simulation
+ tmp_dir = self.default_dir
+
+ # Find the report file name. Either look for "file_name" parameter, or else it is .h5
+ if 'file_name' in self.params:
+ file_name = self.params['file_name']
+ elif self.report_name.endswith('.h5') or self.report_name.endswith('.hdf') \
+ or self.report_name.endswith('.hdf5'):
+ file_name = self.report_name # Check for case report.h5.h5
+ else:
+ file_name = '{}.h5'.format(self.report_name)
+
+ return [('cells', 'biophysical'), ('sections', 'all'), ('tmp_dir', tmp_dir), ('file_name', file_name),
+ ('buffer', True), ('transform', {})]
+
+ def add_variables(self, var_name, transform):
+ self.params['variable_name'].extend(var_name)
+ self.params['transform'].update(transform)
+
+ def can_combine(self, other):
+ def param_eq(key):
+ return self.params.get(key, None) == other.params.get(key, None)
+
+ return param_eq('cells') and param_eq('sections') and param_eq('file_name') and param_eq('buffer')
+
+ @staticmethod
+ def avail_modules():
+ return 'membrane_report'
+
+ @classmethod
+ def build(cls, name, params):
+ report = cls(name)
+ report.cells = params.get('cells', 'biophysical')
+ report.sections = params.get('sections', 'all')
+
+ if 'file_name' in params:
+ report.file_name = params['file_name']
+ report.tmp_dir = os.path.dirname(os.path.realpath(report.file_name))
+ else:
+ report.file_name = os.path.join(cls.default_dir, 'cell_vars.h5')
+ report.tmp_dir = cls.default_dir
+
+ variables = params['variable_name']
+ if isinstance(variables, list):
+ report.variables = variables
+ else:
+ report.variables = [variables]
+
+ return report
+
+
+@SimReport.register_module
+class SpikesReport(SimReport):
+ def __init__(self, report_name, module, params):
+ super(SpikesReport, self).__init__(report_name, module, params)
+
+ @classmethod
+ def build(cls, name, params):
+ return None
+
+ @staticmethod
+ def avail_modules():
+ return 'spikes_report'
+
+ @classmethod
+ def from_output_dict(cls, output_dict):
+ params = {
+ 'spikes_file': output_dict.get('spikes_file', None),
+ 'spikes_file_csv': output_dict.get('spikes_file_csv', None),
+ 'spikes_file_nwb': output_dict.get('spikes_file_nwb', None),
+ 'spikes_sort_order': output_dict.get('spikes_sort_order', None),
+ 'tmp_dir': output_dict.get('output_dir', cls.default_dir)
+ }
+ if not (params['spikes_file'] or params['spikes_file_csv'] or params['spikes_file_nwb']):
+ # User hasn't specified any spikes file
+ params['enabled'] = False
+
+ return cls('spikes_report', 'spikes_report', params)
+
+
+@SimReport.register_module
+class SEClampReport(SimReport):
+ def __init__(self, report_name, module, params):
+ super(SEClampReport, self).__init__(report_name, module, params)
+
+ @staticmethod
+ def avail_modules():
+ return 'SEClamp'
+
+
+@SimReport.register_module
+class ECPReport(SimReport):
+ def __init__(self, report_name, module, params):
+ super(ECPReport, self).__init__(report_name, module, params)
+ self.tmp_dir = self.default_dir
+ self.positions_file = None
+ self.file_name = None
+
+ @staticmethod
+ def avail_modules():
+ return 'extracellular'
+
+ def _get_defaults(self):
+ if 'file_name' in self.params:
+ file_name = self.params['file_name']
+ elif self.report_name.endswith('.h5') or self.report_name.endswith('.hdf') \
+ or self.report_name.endswith('.hdf5'):
+ file_name = self.report_name # Check for case report.h5.h5
+ else:
+ file_name = '{}.h5'.format(self.report_name)
+
+ return [('tmp_dir', self.default_dir), ('file_name', file_name),
+ ('contributions_dir', os.path.join(self.default_dir, 'ecp_contributions'))]
+
+ @classmethod
+ def build(cls, name, params):
+ report = cls(name)
+
+ if 'file_name' in params:
+ report.file_name = params['file_name']
+ report.tmp_dir = os.path.dirname(os.path.realpath(report.file_name))
+ else:
+ report.file_name = os.path.join(cls.default_dir, 'ecp.h5')
+ report.tmp_dir = cls.default_dir
+
+ report.contributions_dir = params.get('contributions_dir', cls.default_dir)
+ report.positions_file = params['electrode_positions']
+ return report
+
+
+@SimReport.register_module
+class SaveSynapses(SimReport):
+ def __init__(self, report_name, module, params):
+ super(SaveSynapses, self).__init__(report_name, module, params)
+
+ @staticmethod
+ def avail_modules():
+ return 'SaveSynapses'
+
+
+@SimReport.register_module
+class MultimeterReport(MembraneReport):
+
+ @staticmethod
+ def avail_modules():
+ return ['multimeter', 'multimeter_report']
+
+
+@SimReport.register_module
+class SectionReport(MembraneReport):
+
+ @staticmethod
+ def avail_modules():
+ return ['section_report']
+
+
+def from_config(cfg):
+ SimReport.default_dir = cfg.output_dir
+
+ reports_list = []
+ membrane_reports = []
+ has_spikes_report = False
+ for report_name, report_params in cfg.reports.items():
+ # Get the Report class from the module_name parameter
+ if not report_params.get('enabled', True):
+ # not a part of the standard but will help skip modules
+ continue
+
+ report = SimReport.build(report_name, report_params)
+
+ if isinstance(report, MembraneReport):
+ # When possible for membrane reports combine multiple reports into one module if all the parameters
+ # except for the variable name differs.
+ for existing_report in membrane_reports:
+ if existing_report.can_combine(report):
+ existing_report.add_variables(report.variables, report.params['transform'])
+ break
+ else:
+ reports_list.append(report)
+ membrane_reports.append(report)
+
+ else:
+ reports_list.append(report)
+
+ if not has_spikes_report:
+ report = SpikesReport.from_output_dict(cfg.output)
+ if report is None:
+ # TODO: Log exception or possibly warning
+ pass
+ else:
+ reports_list.append(report)
+
+ return reports_list
diff --git a/bmtk-vb/bmtk/simulator/utils/simulation_reports.pyc b/bmtk-vb/bmtk/simulator/utils/simulation_reports.pyc
new file mode 100644
index 0000000..0704f77
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/utils/simulation_reports.pyc differ
diff --git a/bmtk-vb/bmtk/simulator/utils/stimulus/LocallySparseNoise.py b/bmtk-vb/bmtk/simulator/utils/stimulus/LocallySparseNoise.py
new file mode 100644
index 0000000..ee43e9f
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/stimulus/LocallySparseNoise.py
@@ -0,0 +1,137 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+from scipy.misc import imresize
+import os
+import pandas as pd
+
+stimulus_folder = os.path.dirname(os.path.abspath(__file__))
+bob_stimlus = os.path.join(stimulus_folder,'lsn.npy')
+
+class LocallySparseNoise (object):
+
+ def __init__(self,stim_template=None, stim_table=None):
+
+ if stim_template is None or stim_table is None:
+ raise Exception("stim_template or stim_table not provided. Please provide them or call the class methods .with_new_stimulus or .with_bob_stimulus.")
+ else:
+ self.stim_template = stim_template
+ self.stim_table = stim_table
+
+ T,y,x = stim_template.shape
+
+ self.T = T
+ self.y = y
+ self.x = x
+
+
+ def get_image_input(self, new_size=None, add_channels=False):
+
+ if new_size is not None:
+ y,x = new_size
+ data_new_size = np.empty((self.T,y,x),dtype=np.float32)
+
+ for t in range(self.stim_template.shape[0]):
+ data_new_size[t] = imresize(self.stim_template[t].astype(np.float32),new_size,interp='nearest')
+
+ if add_channels:
+ return data_new_size[:,:,:,np.newaxis]
+ else:
+ return data_new_size
+
+ @staticmethod
+ def exclude(av,y_x,exclusion=0):
+ y, x = y_x
+ X,Y = np.meshgrid(np.arange(av.shape[1]), np.arange(av.shape[0]))
+
+ mask = ((X-x)**2 + (Y-y)**2) <= exclusion**2
+ av[mask] = False
+
+ @classmethod
+ def create_sparse_noise_matrix(cls,Y=16,X=28,exclusion=5,T=9000, buffer_x=6, buffer_y=6):
+
+ Xp = X+2*buffer_x
+ Yp = Y+2*buffer_y
+
+ # 127 is mean luminance value
+ sn = 127*np.ones([T,Yp,Xp],dtype=np.uint8)
+
+ for t in range(T):
+ available = np.ones([Yp,Xp]).astype(np.bool)
+
+ while np.any(available):
+ y_available, x_available = np.where(available)
+
+ pairs = zip(y_available,x_available)
+ pair_index = np.random.choice(range(len(pairs)))
+ y,x = pairs[pair_index]
+
+ p = np.random.random()
+ if p < 0.5:
+ sn[t,y,x] = 255
+ else:
+ sn[t,y,x] = 0
+
+ cls.exclude(available,(y,x),exclusion=exclusion)
+
+ return sn[:,buffer_y:(Y+buffer_y), buffer_x:(X+buffer_x)]
+
+ def save_to_hdf(self):
+
+ pass
+
+ @staticmethod
+ def generate_stim_table(T,start_time=0,trial_length=250):
+ '''frame_length is in milliseconds'''
+
+ start_time_array = trial_length*np.arange(T) + start_time
+ column_list = [np.arange(T),start_time_array, start_time_array+trial_length-1] # -1 is because the tables in BOb use inclusive intervals, so we'll stick to that convention
+ cols = np.vstack(column_list).T
+ stim_table = pd.DataFrame(cols,columns=['frame','start','end'])
+
+ return stim_table
+
+
+ @classmethod
+ def with_new_stimulus(cls,Y=16,X=28,exclusion=5,T=9000, buffer_x=6, buffer_y=6):
+
+ stim_template = cls.create_sparse_noise_matrix(Y=Y,X=X,exclusion=exclusion,T=T, buffer_x=buffer_x, buffer_y=buffer_y)
+ T,y,x = stim_template.shape
+
+ stim_table = cls.generate_stim_table(T)
+
+ new_locally_sparse_noise = cls(stim_template=stim_template, stim_table=stim_table)
+
+ return new_locally_sparse_noise
+
+ @classmethod
+ def with_brain_observatory_stimulus(cls):
+
+ stim_template = np.load(bob_stimlus)
+ T,y,x = stim_template.shape
+
+ stim_table = cls.generate_stim_table(T)
+
+ new_locally_sparse_noise = cls(stim_template=stim_template, stim_table=stim_table)
+
+ return new_locally_sparse_noise
diff --git a/bmtk-vb/bmtk/simulator/utils/stimulus/NaturalScenes.py b/bmtk-vb/bmtk/simulator/utils/stimulus/NaturalScenes.py
new file mode 100644
index 0000000..b04056b
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/stimulus/NaturalScenes.py
@@ -0,0 +1,337 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import os
+from PIL import Image
+import pandas as pd
+
+class NaturalScenes (object):
+ def __init__(self, new_size=(64,112), mode='L', dtype=np.float32, start_time=0, trial_length=250, add_channels=False):
+
+ self.new_size = new_size
+ self.mode = mode
+ self.dtype = dtype
+ self.add_channels = add_channels
+
+
+
+ def random_sample(self, n):
+ sample_indices = np.random.randint(0, self.num_images, n)
+ return self.stim_template[sample_indices]
+
+ # also a method random_sample_with_labels ?
+ def random_sample_with_labels(self, n):
+ pass
+
+ def get_image_input(self,**kwargs):
+ return self.stim_template
+
+ def add_gray_screen(self):
+
+ gray_screen = np.ones(self.new_size,dtype=self.dtype)*127 # using 127 as "gray" value
+ if self.add_channels:
+ gray_screen = gray_screen[:,:,np.newaxis]
+ self.stim_template = np.vstack([self.stim_template, gray_screen[np.newaxis,:,:]])
+
+ start = int(self.stim_table.tail(1)['end']) + 1
+ end = start+self.trial_length-1 #make trial_length an argument of this function?
+ frame = int(self.stim_table.tail(1)['frame']) + 1
+
+ self.stim_table = self.stim_table.append(pd.DataFrame([[frame,start,end]],columns=['frame','start','end']),ignore_index=True)
+
+ self.label_dataframe = self.label_dataframe.append(pd.DataFrame([['gray_screen']],columns=['image_name']),ignore_index=True)
+ self.num_images = self.num_images + 1
+
+ @classmethod
+ def with_brain_observatory_stimulus(cls, new_size=(64,112), mode='L', dtype=np.float32, start_time=0, trial_length=250, add_channels=False):
+
+ from sys import platform
+
+ if platform=='linux2':
+ image_dir = '/data/mat/iSee_temp_shared/CAM_Images.icns'
+ elif platform=='darwin':
+
+ image_dir = '/Users/michaelbu/Data/Images/CAM_Images.icns'
+ if not os.path.exists(image_dir):
+ print("Detected platform: OS X. I'm assuming you've mounted \\\\aibsdata\\mat at /Volumes/mat/")
+ image_dir = '/Volumes/mat/iSee_temp_shared/CAM_Images.icns'
+
+
+ elif platform=='win32':
+ image_dir = r'\\aibsdata\mat\iSee_temp_shared\CAM_Images.icns'
+
+ #image_dir = '/Users/michaelbu/Data/Images/CAM_Images' # change this to temp directory on aibsdata
+ new_ns = cls.with_new_stimulus_from_dataframe(image_dir=image_dir, new_size=new_size, mode=mode, dtype=dtype, start_time=start_time, trial_length=trial_length, add_channels=add_channels)
+
+ new_ns.add_gray_screen()
+
+ return new_ns
+
+ @staticmethod
+ def generate_stim_table(T,start_time=0,trial_length=250):
+ '''frame_length is in milliseconds'''
+
+ start_time_array = trial_length*np.arange(T) + start_time
+ column_list = [np.arange(T),start_time_array, start_time_array+trial_length-1] # -1 is because the tables in BOb use inclusive intervals, so we'll stick to that convention
+ cols = np.vstack(column_list).T
+ stim_table = pd.DataFrame(cols,columns=['frame','start','end'])
+
+ return stim_table
+
+ def to_h5(self,sample_indices=None):
+ pass
+
+ @classmethod
+ def with_new_stimulus_from_folder(cls, image_dir, new_size=(64,112), mode='L', dtype=np.float32, start_time=0, trial_length=250, add_channels=False):
+
+ new_ns = cls(new_size=new_size, mode=mode, dtype=dtype, start_time=start_time, trial_length=trial_length, add_channels=add_channels)
+
+ new_ns.im_list = os.listdir(image_dir)
+ new_ns.image_dir = image_dir
+
+ stim_list = []
+ for im in new_ns.im_list:
+ try:
+ im_data = Image.open(os.path.join(new_ns.image_dir,im))
+ except IOError:
+ print("Skipping file: ", im)
+ new_ns.im_list.remove(im)
+
+ im_data = im_data.convert(new_ns.mode)
+ if new_size is not None:
+ im_data = im_data.resize((new_ns.new_size[1], new_ns.new_size[0]))
+ im_data = np.array(im_data,dtype=new_ns.dtype)
+ if add_channels:
+ im_data = im_data[:,:,np.newaxis]
+ stim_list.append(im_data)
+
+ new_ns.stim_template = np.stack(stim_list)
+ new_ns.num_images = new_ns.stim_template.shape[0]
+
+ t,y,x = new_ns.stim_template.shape
+ new_ns.new_size = (y,x)
+
+ new_ns.trial_length = trial_length
+ new_ns.start_time = start_time
+ new_ns.stim_table = new_ns.generate_stim_table(new_ns.num_images,start_time=new_ns.start_time,trial_length=new_ns.trial_length)
+
+ new_ns.label_dataframe = pd.DataFrame(columns=['image_name'])
+ new_ns.label_dataframe['image_name'] = new_ns.im_list
+
+ return new_ns
+
+ @classmethod
+ def with_new_stimulus_from_dataframe(cls, image_dir, new_size=(64,112), mode='L', dtype=np.float32, start_time=0, trial_length=250, add_channels=False):
+ '''image_dir should contain a folder of images called 'images' and an hdf5 file with a
+ dataframe called 'label_dataframe.h5' with the frame stored in the key 'labels'.
+ dataframe should have columns ['relative_image_path','label_1', 'label_2', ...]'''
+
+ new_ns = cls(new_size=new_size, mode=mode, dtype=dtype, start_time=start_time, trial_length=trial_length, add_channels=add_channels)
+
+ image_path = os.path.join(image_dir,'images')
+ label_dataframe = pd.read_hdf(os.path.join(image_dir,'label_dataframe.h5'),'labels')
+ new_ns.label_dataframe = label_dataframe
+
+ new_ns.image_dir = image_path
+ new_ns.im_list = list(label_dataframe.image_name)
+
+ stim_list = []
+ for im in new_ns.im_list:
+ try:
+ im_data = Image.open(os.path.join(image_path,im))
+ except IOError:
+ print("Skipping file: ", im)
+ new_ns.im_list.remove(im)
+
+ im_data = im_data.convert(new_ns.mode)
+ if new_size is not None:
+ im_data = im_data.resize((new_ns.new_size[1], new_ns.new_size[0]))
+ im_data = np.array(im_data,dtype=new_ns.dtype)
+ if add_channels:
+ im_data = im_data[:,:,np.newaxis]
+ stim_list.append(im_data)
+
+ new_ns.stim_template = np.stack(stim_list)
+ new_ns.num_images = new_ns.stim_template.shape[0]
+
+ if add_channels:
+ t,y,x,_ = new_ns.stim_template.shape
+ else:
+ t,y,x = new_new.stim_template.shape
+ new_ns.new_size = (y,x)
+
+ new_ns.trial_length = trial_length
+ new_ns.start_time = start_time
+ new_ns.stim_table = new_ns.generate_stim_table(new_ns.num_images,start_time=new_ns.start_time,trial_length=new_ns.trial_length)
+
+ return new_ns
+
+ @staticmethod
+ def create_image_dir_from_hierarchy(folder, new_path, label_names=None):
+
+ import shutil
+
+ image_dataframe = pd.DataFrame(columns=["image_name"])
+
+ if os.path.exists(new_path):
+ raise Exception("path "+new_path+" already exists!")
+
+ os.mkdir(new_path)
+ os.mkdir(os.path.join(new_path,'images'))
+ for path, sub_folders, file_list in os.walk(folder):
+
+ for f in file_list:
+ try:
+ im_data = Image.open(os.path.join(path,f))
+ except IOError:
+ print("Skipping file: ", f)
+ im_data = None
+
+ if im_data is not None:
+ shutil.copy(os.path.join(path,f), os.path.join(new_path,'images',f))
+ image_name = f
+ label_vals = os.path.split(os.path.relpath(path,folder))
+ if label_names is not None:
+ current_label_names = label_names[:]
+ else:
+ current_label_names = []
+
+ if len(label_vals) > current_label_names:
+ labels_to_add = ["label_"+str(i) for i in range(len(current_label_names), len(label_vals))]
+ current_label_names += labels_to_add
+ elif len(label_vals) < current_label_names:
+ current_label_names = current_label_names[:len(label_vals)]
+
+ vals = [f] + list(label_vals)
+ cols = ['image_name']+current_label_names
+ new_frame = pd.DataFrame([vals],columns=cols)
+
+ image_dataframe = image_dataframe.append(new_frame,ignore_index=True)
+
+ image_dataframe.to_hdf(os.path.join(new_path,'label_dataframe.h5'),'labels')
+
+ # @staticmethod
+ # def add_object_to_image(image, object_image):
+ #
+ # new_image = image.copy()
+ # new_image[np.isfinite(object_image)] = object_image[np.isfinite(object_image)]
+ # return new_image
+
+ @staticmethod
+ def add_object_to_template(template, object_image):
+
+ if template.ndim==3:
+ T,y,x = template.shape
+ elif template.ndim==4:
+ T,y,x,K = template.shape
+ else:
+ raise Exception("template.ndim must be 3 or 4")
+
+ if object_image.ndim < template.ndim-1:
+ object_image=object_image[:,:,np.newaxis]
+
+ new_template = template.copy()
+ new_template[:,np.isfinite(object_image)] = object_image[np.isfinite(object_image)]
+
+ return new_template
+
+ def add_objects_to_foreground(self, object_dict):
+
+ template_list = []
+
+ if self.label_dataframe is None:
+ self.label_dataframe = pd.DataFrame(columns=['object'])
+
+ new_label_dataframe_list = []
+
+ for obj in object_dict:
+ template_list.append(self.add_object_to_template(self.stim_template,object_dict[obj]))
+ obj_dataframe = self.label_dataframe.copy()
+ obj_dataframe['object'] = [ obj for i in range(self.num_images) ]
+ new_label_dataframe_list.append(obj_dataframe)
+
+ self.stim_template = np.vstack(template_list)
+ self.label_dataframe = pd.concat(new_label_dataframe_list,ignore_index=True)
+
+ self.num_images = self.stim_template.shape[0]
+
+ self.stim_table = self.generate_stim_table(self.num_images,start_time=self.start_time,trial_length=self.trial_length)
+
+
+ @staticmethod
+ def create_object_dict(folder, background_shape=(64,112), dtype=np.float32, rotations=False):
+
+ from scipy.misc import imresize
+
+ # resize function to preserve the nans in the background
+ def resize_im(im,new_shape):
+ def mask_for_nans():
+ mask = np.ones(im.shape)
+ mask[np.isfinite(im)] = 0
+ mask = imresize(mask,new_shape,interp='nearest')
+
+ return mask.astype(np.bool)
+
+ new_im = im.copy()
+ new_im = new_im.astype(dtype)
+ new_im[np.isnan(new_im)] = -1.
+ new_im = imresize(new_im,new_shape,interp='nearest')
+
+ new_im = new_im.astype(dtype)
+ new_im[mask_for_nans()] = np.nan
+
+ return new_im
+
+ def im_on_background(im, shift=None):
+ bg = np.empty(background_shape)
+ bg[:] = np.nan
+
+ buffer_x = (background_shape[1] - im.shape[1])/2
+ buffer_y = (background_shape[0] - im.shape[0])/2
+
+ bg[buffer_y:im.shape[0]+buffer_y, buffer_x:im.shape[1]+buffer_x] = im
+
+ return bg
+
+ im_list = os.listdir(folder)
+
+ obj_dict = {}
+
+ for im_file in im_list:
+ try:
+ im = np.load(os.path.join(folder,im_file))
+ except IOError:
+ print("skipping file: ", im_file)
+ im = None
+
+ if im is not None:
+ new_shape = (np.min(background_shape), np.min(background_shape))
+ im = resize_im(im,new_shape)
+ obj_dict[im_file[:-4]] = im_on_background(im)
+ if rotations:
+ im_rot=im.copy()
+ for i in range(3):
+ im_rot = np.rot90(im_rot)
+ obj_dict[im_file[:-4]+'_'+str(90*(i+1))] = im_on_background(im_rot)
+
+ return obj_dict
diff --git a/bmtk-vb/bmtk/simulator/utils/stimulus/StaticGratings.py b/bmtk-vb/bmtk/simulator/utils/stimulus/StaticGratings.py
new file mode 100644
index 0000000..c7bf9cb
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/stimulus/StaticGratings.py
@@ -0,0 +1,100 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import pandas as pd
+
+
+class StaticGratings (object):
+
+ def __init__(self,orientations=30.0*np.arange(6),spatial_frequencies=0.01*(2.0**np.arange(1,6)),phases=0.25*np.arange(4),num_trials=50, start_time=0, trial_length=250):
+
+ self.orientations = orientations
+ self.spatial_frequencies = spatial_frequencies
+ self.phases = phases
+ self.num_trials = num_trials
+ self.start_time = start_time
+ self.trial_length = trial_length
+
+ trial_stims = np.array([ [orientation, spat_freq, phase] for orientation in self.orientations for spat_freq in self.spatial_frequencies for phase in self.phases ])
+
+ trial_stims = np.tile(trial_stims,(num_trials,1))
+
+ indices = np.random.permutation(trial_stims.shape[0])
+ trial_stims = trial_stims[indices]
+
+ self.stim_table = pd.DataFrame(trial_stims,columns=['orientation','spatial_frequency','phase'])
+
+ T = self.stim_table.shape[0]
+ self.T = T
+ start_time_array = trial_length*np.arange(self.T) + start_time
+ end_time_array = start_time_array + trial_length
+
+ self.stim_table['start'] = start_time_array
+ self.stim_table['end'] = end_time_array
+
+ def get_image_input(self,new_size=(64,112),pix_per_degree=1.0, dtype=np.float32, add_channels=False):
+
+ y, x = new_size
+ stim_template = np.empty([self.T, y, x],dtype=dtype)
+
+ for t, row in self.stim_table.iterrows():
+ ori, sf, ph = row[0], row[1], row[2]
+
+ theta = ori*np.pi/180.0 #convert to radians
+
+ k = (sf/pix_per_degree) # radians per pixel
+ ph = ph*np.pi*2.0
+
+ X,Y = np.meshgrid(np.arange(x),np.arange(y))
+ X = X - x/2
+ Y = Y - y/2
+ Xp, Yp = self.rotate(X,Y,theta)
+
+ stim_template[t] = np.cos(2.0*np.pi*Xp*k + ph)
+
+ self.stim_template = stim_template
+
+ if add_channels:
+ return stim_template[:,:,:,np.newaxis]
+ else:
+ return stim_template
+
+ @staticmethod
+ def rotate(X,Y, theta):
+
+ Xp = X*np.cos(theta) - Y*np.sin(theta)
+ Yp = X*np.sin(theta) + Y*np.cos(theta)
+
+ return Xp, Yp
+
+ @classmethod
+ def with_brain_observatory_stimulus(cls, num_trials=50):
+
+ orientations = 30.0*np.arange(6)
+ spatial_frequencies = 0.01*(2.0**np.arange(1,6))
+ phases = 0.25*np.arange(4)
+
+ start_time = 0
+ trial_length = 250
+
+ return cls(orientations=orientations,spatial_frequencies=spatial_frequencies,phases=phases,num_trials=num_trials,start_time=start_time,trial_length=trial_length)
diff --git a/bmtk-vb/bmtk/simulator/utils/stimulus/__init__.py b/bmtk-vb/bmtk/simulator/utils/stimulus/__init__.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/stimulus/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/bmtk/simulator/utils/stimulus/lsn.npy b/bmtk-vb/bmtk/simulator/utils/stimulus/lsn.npy
new file mode 100644
index 0000000..f4358ae
Binary files /dev/null and b/bmtk-vb/bmtk/simulator/utils/stimulus/lsn.npy differ
diff --git a/bmtk-vb/bmtk/simulator/utils/tools/__init__.py b/bmtk-vb/bmtk/simulator/utils/tools/__init__.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/tools/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/bmtk/simulator/utils/tools/process_spikes.py b/bmtk-vb/bmtk/simulator/utils/tools/process_spikes.py
new file mode 100644
index 0000000..0f5519a
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/tools/process_spikes.py
@@ -0,0 +1,207 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import h5py
+import numpy as np
+import pandas as pd
+import os
+
+
+def read_spk_txt(f_name):
+
+ '''
+
+ Parameters
+ ----------
+ f_name: string
+ Full path to a file containing cell IDs and spike times.
+
+ Returns
+ -------
+ A dataframe containing two columns: spike times and cell IDs.
+
+ Usage:
+ x = read_spk_txt('output/spk.dat')
+
+ '''
+
+ df = pd.read_csv(f_name, header=None, sep=' ')
+ df.columns = ['t', 'gid']
+
+ return df
+
+
+def read_spk_h5(f_name):
+
+ '''
+
+ Parameters
+ ----------
+ f_name: string
+ Full path to a file containing cell IDs and spike times.
+
+ Returns
+ -------
+ A dataframe containing two columns: spike times and cell IDs.
+
+ Usage:
+ x = read_spk_h5('output/spk.h5')
+
+ '''
+
+ f = h5py.File(f_name, 'r' , libver='latest')
+ spikes = {}
+
+ t = np.array([])
+ gids = np.array([])
+ for i, gid in enumerate(f.keys()): # save spikes of all gids
+ if (i % 1000 == 0):
+ print(i)
+ spike_times = f[gid][...]
+ t = np.append(t, spike_times)
+ gids = np.append(gids, np.ones(spike_times.size)*int(gid))
+
+ f.close()
+
+ df = pd.DataFrame(columns=['t', 'gid'])
+ df['t'] = t
+ df['gid'] = gids
+
+ return df
+
+
+def spikes_to_mean_f_rate(cells_f, spk_f, t_window, **kwargs):
+
+ '''
+
+ Parameters
+ ----------
+ cells_f: string
+ Full path to a file containing information about all cells (in particular, all cell IDs,
+ and not just those that fired spikes in a simulation).
+ spk_f: string
+ Full path to a file containing cell IDs and spike times.
+ t_window: a tuple of two floats
+ Start and stop time for the window within which the firing rate is computed.
+ **kwargs
+ spk_f_type: string with accepted values 'txt' or 'h5'
+ Type of the file from which spike times should be extracted.
+
+
+ Assumptions
+ -----------
+ It is assumed here that TIME IS in ms and the RATES ARE RETURNED in Hz.
+
+
+ Returns
+ -------
+ A dataframe containing a column of cell IDs and a column of corresponding
+ average firing rates.
+
+ Usage:
+ x = spikes_to_mean_f_rate('../network_model/cells.csv', 'output/spk.dat', (500.0, 3000.0))
+
+ '''
+
+ # Make sure the time window's start and stop times are reasonable.
+ t_start = t_window[0]
+ t_stop = t_window[1]
+ delta_t = t_stop - t_start
+ if (delta_t <= 0.0):
+ print('spikes_to_mean_f_rate: stop time %f is <= start time %f; exiting.' % (t_stop, t_start))
+ quit()
+
+ # Read information about all cells.
+ cells_df = pd.read_csv(cells_f, sep=' ')
+ gids = cells_df['id'].values
+
+ # By default, the spk file type is "None", in which case it should be chosen
+ # based on the extension of the supplied spk file name.
+ spk_f_type = kwargs.get('spk_f_type', None)
+ if (spk_f_type == None):
+ spk_f_ext = spk_f.split('.')[-1]
+ if (spk_f_ext in ['txt', 'dat']):
+ spk_f_type = 'txt' # Assume this is an ASCII file.
+ elif (spk_f_ext in ['h5']):
+ spk_f_type = 'h5' # Assume this is an HDF5 file.
+ else:
+ print('spikes_to_mean_f_rate: unrecognized file extension. Use the flag spk_f_type=\'txt\' or \'h5\' to override this message. Exiting.')
+ quit()
+
+ # In case the spk_f_type was provided directly, check that the value is among those the code recognizes.
+ if (spk_f_type not in ['txt', 'h5']):
+ print('spikes_to_mean_f_rate: unrecognized value of spk_f_type. The recognized values are \'txt\' or \'h5\'. Exiting.')
+ quit()
+
+ # Read spikes.
+ # If the spike file has zero size, create a dataframe with all rates equal to zero.
+ # Otherwise, use spike times from the file to fill the dataframe.
+ if (os.stat(spk_f).st_size == 0):
+ f_rate_df = pd.DataFrame(columns=['gid', 'f_rate'])
+ f_rate_df['gid'] = gids
+ f_rate_df['f_rate'] = np.zeros(gids.size)
+ else:
+ # Use the appropriate function to read the spikes.
+ if (spk_f_type == 'txt'):
+ df = read_spk_txt(spk_f)
+ elif(spk_f_type == 'h5'):
+ df = read_spk_h5(spk_f)
+
+ # Keep only those entries that have spike times within the time window.
+ df = df[(df['t'] >= t_start) & (df['t'] <= t_stop)]
+
+ # Compute rates.
+ f_rate_df = df.groupby('gid').count() * 1000.0 / delta_t # Time is in ms and rate is in Hz.
+ f_rate_df.columns = ['f_rate']
+ # The 'gid' label is now used as index (after the groupby operation).
+ # Convert it to a column; then change the index name to none, as in default.
+ f_rate_df['gid'] = f_rate_df.index
+ f_rate_df.index.names = ['']
+
+ # Find cell IDs from the spk file that are not in the cell file.
+ # Remove them from the dataframe with rates.
+ gids_not_in_cells_f = f_rate_df['gid'].values[~np.in1d(f_rate_df['gid'].values, gids)]
+ f_rate_df = f_rate_df[~f_rate_df['gid'].isin(gids_not_in_cells_f)]
+
+ # Find cell IDs from the cell file that do not have counterparts in the spk file
+ # (for example, because those cells did not fire).
+ # Add these cell IDs to the dataframe; fill rates with zeros.
+ gids_not_in_spk = gids[~np.in1d(gids, f_rate_df['gid'].values)]
+ f_rate_df = f_rate_df.append(pd.DataFrame(np.array([gids_not_in_spk, np.zeros(gids_not_in_spk.size)]).T, columns=['gid', 'f_rate']))
+
+ # Sort the rows according to the cell IDs.
+ f_rate_df = f_rate_df.sort('gid', ascending=True)
+
+ return f_rate_df
+
+
+# Tests.
+
+#x = spikes_to_mean_f_rate('/data/mat/yazan/corticalCol/ice/sims/column/build/net_structure/cells.csv', '/data/mat/yazan/corticalCol/ice/sims/column/full_preliminary_runs/output008/spikes.txt', (500.0, 2500.0))
+#print x
+
+#x = spikes_to_mean_f_rate('/data/mat/yazan/corticalCol/ice/sims/column/build/net_structure/cells.csv', '/data/mat/yazan/corticalCol/ice/sims/column/full_preliminary_runs/output008/spikes.h5', (500.0, 2500.0))
+#print x
+
+#x = spikes_to_mean_f_rate('/data/mat/yazan/corticalCol/ice/sims/column/build/net_structure/cells.csv', '/data/mat/yazan/corticalCol/ice/sims/column/full_preliminary_runs/output008/spikes.txt', (500.0, 2500.0), spk_f_type='txt')
+#print x
+
diff --git a/bmtk-vb/bmtk/simulator/utils/tools/spatial.py b/bmtk-vb/bmtk/simulator/utils/tools/spatial.py
new file mode 100644
index 0000000..9b331d0
--- /dev/null
+++ b/bmtk-vb/bmtk/simulator/utils/tools/spatial.py
@@ -0,0 +1,26 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+
+def example():
+ print('OK')
diff --git a/bmtk-vb/bmtk/test.py~ b/bmtk-vb/bmtk/test.py~
new file mode 100644
index 0000000..2d77926
--- /dev/null
+++ b/bmtk-vb/bmtk/test.py~
@@ -0,0 +1,3 @@
+from bmtk.builder.networks import NetworkBuilder
+
+net = NetworkBuilder("cortical-column")
diff --git a/bmtk-vb/bmtk/tests/builder/test_connection_map.py b/bmtk-vb/bmtk/tests/builder/test_connection_map.py
new file mode 100644
index 0000000..9043d7b
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/builder/test_connection_map.py
@@ -0,0 +1,116 @@
+import pytest
+from itertools import product
+
+from bmtk.builder.connection_map import ConnectionMap
+from bmtk.builder import NetworkBuilder
+
+
+@pytest.fixture
+def net():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(N=100, x=range(100), ei='i')
+ net.add_nodes(N=50, x=range(50), y='y', ei='e')
+ return net
+
+
+def test_connection_map_fnc(net):
+ cm = ConnectionMap(sources=net.nodes(ei='i'), targets=net.nodes(ei='e'),
+ connector=lambda s, t, a, b: s['node_id']*t['node_id'],
+ connector_params={'a': 1, 'b': 0}, iterator='one_to_one',
+ edge_type_properties={'prop1': 'prop1', 'edge_type_id': 101})
+ assert(len(cm.source_nodes) == 100)
+ assert(len(cm.target_nodes) == 50)
+ assert(cm.params == [])
+ assert(cm.iterator == 'one_to_one')
+ assert(len(cm.edge_type_properties.keys()) == 2)
+ assert(cm.edge_type_id == 101)
+ for v in cm.connection_itr():
+ src_id, trg_id, val = v
+ assert(val == src_id*trg_id)
+
+
+def test_connection_map_num(net):
+ cm = ConnectionMap(sources=net.nodes(ei='i'), targets=net.nodes(ei='e'), connector=10)
+ count = 0
+ for v in cm.connection_itr():
+ src_id, trg_id, val = v
+ assert(val == 10)
+ count += 1
+ assert(count == 5000)
+
+
+def test_connection_map_list(net):
+ cm = ConnectionMap(sources=net.nodes(ei='i'), targets=net.nodes(ei='e'),
+ connector=[s.node_id*t.node_id for s, t in product(net.nodes(ei='i'), net.nodes(ei='e'))])
+ count = 0
+ for v in cm.connection_itr():
+ src_id, trg_id, val = v
+ assert(val == src_id*trg_id)
+ count += 1
+ assert(count == 5000)
+
+
+def test_connection_map_dict(net):
+ cm = ConnectionMap(sources=net.nodes(ei='i'), targets=net.nodes(ei='e'), connector={'nsyn': 10})
+ for v in cm.connection_itr():
+ src_id, trg_id, val = v
+ assert('nsyn' in val and val['nsyn'] == 10)
+
+
+def test_cm_params1(net):
+ cm = ConnectionMap(sources=net.nodes(ei='i'), targets=net.nodes(ei='e'),
+ connector=lambda s, t: 3,
+ edge_type_properties={'prop1': 'prop1', 'edge_type_id': 101})
+ cm.add_properties(names='syn_weight', rule=lambda a: a+0.15, rule_params={'a': 0.20}, dtypes=float)
+
+ assert(len(cm.params) == 1)
+ edge_props_1 = cm.params[0]
+ assert(edge_props_1.names == 'syn_weight')
+ assert(edge_props_1.get_prop_dtype('syn_weight') == float)
+ for v in cm.connection_itr():
+ src_id, trg_id, nsyn = v
+ assert(nsyn == 3)
+ assert(edge_props_1.rule() == 0.35)
+
+
+def test_cm_params2(net):
+ cm = ConnectionMap(sources=net.nodes(ei='i'), targets=net.nodes(ei='e'),
+ connector=lambda s, t: 3,
+ edge_type_properties={'prop1': 'prop1', 'edge_type_id': 101})
+ cm.add_properties(names=['w', 'c'], rule=0.15, dtypes=[float, str])
+
+ assert(len(cm.params) == 1)
+ edge_props_1 = cm.params[0]
+ assert(edge_props_1.names == ['w', 'c'])
+ assert(edge_props_1.get_prop_dtype('w'))
+ assert (edge_props_1.get_prop_dtype('c'))
+ for v in cm.connection_itr():
+ src_id, trg_id, nsyn = v
+ assert(nsyn == 3)
+ assert(edge_props_1.rule() == 0.15)
+
+
+def test_cm_params3(net):
+ cm = ConnectionMap(sources=net.nodes(ei='i'), targets=net.nodes(ei='e'),
+ connector=lambda s, t: 3,
+ edge_type_properties={'prop1': 'prop1', 'edge_type_id': 101})
+ cm.add_properties(names=['w', 'c'], rule=0.15, dtypes=[float, str])
+ cm.add_properties(names='a', rule=(1, 2, 3), dtypes=dict)
+
+ assert(len(cm.params) == 2)
+ edge_props_1 = cm.params[0]
+ assert(edge_props_1.names == ['w', 'c'])
+ assert(edge_props_1.get_prop_dtype('w'))
+ assert(edge_props_1.get_prop_dtype('c'))
+ for v in cm.connection_itr():
+ src_id, trg_id, nsyn = v
+ assert(nsyn == 3)
+ assert(edge_props_1.rule() == 0.15)
+
+ edge_props_2 = cm.params[1]
+ assert(edge_props_2.names == 'a')
+ assert(edge_props_2.get_prop_dtype('a'))
+ assert(edge_props_2.rule() == (1, 2, 3))
+
+
+test_connection_map_fnc(net())
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/tests/builder/test_connector.py b/bmtk-vb/bmtk/tests/builder/test_connector.py
new file mode 100644
index 0000000..99aef6f
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/builder/test_connector.py
@@ -0,0 +1,44 @@
+from bmtk.builder import connector
+
+
+def test_fnc_params():
+ con_fnc = connector.create(connector=lambda x, p: x**p)
+ assert(con_fnc(2, 3) == 2**3)
+
+
+def test_fnc_noparams():
+ con_fnc = connector.create(connector=lambda x, p, a:x**p+a, a=10)
+ assert(con_fnc(2, 3) == 2**3+10)
+
+
+def test_literal():
+ con_fnc = connector.create(connector=100.0)
+ assert(con_fnc() == 100.0)
+
+ con_fnc1 = connector.create(connector=101.0, a=10, b='10') # parameters in literals should be ignored
+ assert(con_fnc1() == 101.0)
+
+
+def test_list():
+ con_fnc = connector.create(connector=['a', 'b', 'c'])
+ assert(con_fnc == ['a', 'b', 'c'])
+
+ con_fnc1 = connector.create(connector=[100, 200, 300], p1=1, p2='2', p34=(3,4))
+ assert(con_fnc1 == [100, 200, 300])
+
+
+def test_dict():
+ con_fnc = connector.create(connector={'a': 1, 'b': 'b', 'c': [5, 6]})
+ assert('a' in con_fnc())
+ assert('b' in con_fnc())
+ assert('c' in con_fnc())
+
+ con_fnc = connector.create(connector={'a': 1, 'b': 'b', 'c': [5, 6]}, p1='p1', p2=2)
+ assert('a' in con_fnc())
+ assert('b' in con_fnc())
+ assert('c' in con_fnc())
+
+
+#test_dict()
+#test_connector_fnc_params()
+#test_connector_fnc_noparams()
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/tests/builder/test_densenetwork.py b/bmtk-vb/bmtk/tests/builder/test_densenetwork.py
new file mode 100644
index 0000000..81dfdc7
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/builder/test_densenetwork.py
@@ -0,0 +1,307 @@
+import os
+import shutil
+import pytest
+import numpy as np
+import pandas as pd
+import h5py
+import tempfile
+
+from bmtk.builder import NetworkBuilder
+
+
+def test_create_network():
+ net = NetworkBuilder('NET1')
+ assert(net.name == 'NET1')
+ assert(net.nnodes == 0)
+ assert(net.nedges == 0)
+ assert(net.nodes_built is False)
+ assert(net.edges_built is False)
+
+
+def test_no_name():
+ with pytest.raises(Exception):
+ NetworkBuilder('')
+
+
+def test_build_nodes():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(N=100,
+ position=[(100.0, -50.0, 50.0)]*100,
+ tunning_angle=np.linspace(0, 365.0, 100, endpoint=False),
+ cell_type='Scnna1',
+ model_type='Biophys1',
+ location='V1',
+ ei='e')
+
+ net.add_nodes(N=25,
+ position=np.random.rand(25, 3)*[100.0, 50.0, 100.0],
+ model_type='intfire1',
+ location='V1',
+ ei='e')
+
+ net.add_nodes(N=150,
+ position=np.random.rand(150, 3)*[100.0, 50.0, 100.0],
+ tunning_angle=np.linspace(0, 365.0, 150, endpoint=False),
+ cell_type='SST',
+ model_type='Biophys1',
+ location='V1',
+ ei='i')
+
+ net.build()
+ assert(net.nodes_built is True)
+ assert(net.nnodes == 275)
+ assert(net.nedges == 0)
+ assert(len(net.nodes()) == 275)
+ assert(len(net.nodes(ei='e')) == 125)
+ assert(len(net.nodes(model_type='Biophys1')) == 250)
+ assert(len(net.nodes(location='V1', model_type='Biophys1')))
+
+ intfire_nodes = list(net.nodes(model_type='intfire1'))
+ assert(len(intfire_nodes) == 25)
+ node1 = intfire_nodes[0]
+ assert(node1['model_type'] == 'intfire1' and 'cell_type' not in node1)
+
+
+def test_build_nodes1():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(N=3, node_id=[100, 200, 300], node_type_id=101, name=['one', 'two', 'three'])
+ node_one = list(net.nodes(name='one'))[0]
+ assert(node_one['name'] == 'one')
+ assert(node_one['node_id'] == 100)
+ assert(node_one['node_type_id'] == 101)
+
+ node_three = list(net.nodes(name='three'))[0]
+ assert(node_three['name'] == 'three')
+ assert(node_three['node_id'] == 300)
+ assert(node_three['node_type_id'] == 101)
+
+
+def test_build_nodes_fail1():
+ net = NetworkBuilder('NET1')
+ with pytest.raises(Exception):
+ net.add_nodes(N=100, list1=[100]*99)
+
+
+def test_build_nodes_fail2():
+ net = NetworkBuilder('NET1')
+ with pytest.raises(Exception):
+ net.add_nodes(N=2, node_type_id=0)
+ net.add_nodes(N=2, node_type_id=0)
+
+
+def test_nsyn_edges():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(N=100, cell_type='Scnna1', ei='e')
+ net.add_nodes(N=100, cell_type='PV1', ei='i')
+ net.add_nodes(N=100, cell_type='PV2', ei='i')
+ net.add_edges(source={'ei': 'i'}, target={'ei': 'e'}, connection_rule=lambda s, t: 1) # 200*100 = 20000 edges
+ net.add_edges(source=net.nodes(cell_type='Scnna1'), target=net.nodes(cell_type='PV1'),
+ connection_rule=lambda s, t: 2) # 100*100*2 = 20000
+ net.build()
+ assert(net.nedges == 20000 + 20000)
+ assert(net.edges_built is True)
+ #print list(net.edges())
+
+
+def test_save_nsyn_table():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, cell_type='Scnna1', ei='e')
+ net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, cell_type='PV1', ei='i')
+ net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, tags=np.linspace(0, 100, 100), cell_type='PV2', ei='i')
+ net.add_edges(source={'ei': 'i'}, target={'ei': 'e'}, connection_rule=lambda s, t: 1,
+ p1='e2i', p2='e2i') # 200*100 = 20000 edges
+ net.add_edges(source=net.nodes(cell_type='Scnna1'), target=net.nodes(cell_type='PV1'),
+ connection_rule=lambda s, t: 2, p1='s2p') # 100*100*2 = 20000
+ net.build()
+ nodes_h5 = tempfile.NamedTemporaryFile(suffix='.h5')
+ nodes_csv = tempfile.NamedTemporaryFile(suffix='.csv')
+ edges_h5 = tempfile.NamedTemporaryFile(suffix='.h5')
+ edges_csv = tempfile.NamedTemporaryFile(suffix='.csv')
+
+ net.save_nodes(nodes_h5.name, nodes_csv.name)
+ net.save_edges(edges_h5.name, edges_csv.name)
+
+ assert(os.path.exists(nodes_h5.name) and os.path.exists(nodes_csv.name))
+ node_types_df = pd.read_csv(nodes_csv.name, sep=' ')
+ assert(len(node_types_df) == 3)
+ assert('cell_type' in node_types_df.columns)
+ assert('ei' in node_types_df.columns)
+ assert('positions' not in node_types_df.columns)
+
+ nodes_h5 = h5py.File(nodes_h5.name, 'r')
+ assert ('node_id' in nodes_h5['/nodes/NET1'])
+ assert (len(nodes_h5['/nodes/NET1/node_id']) == 300)
+ assert (len(nodes_h5['/nodes/NET1/node_type_id']) == 300)
+ assert (len(nodes_h5['/nodes/NET1/node_group_id']) == 300)
+ assert (len(nodes_h5['/nodes/NET1/node_group_index']) == 300)
+
+ node_groups = {nid: grp for nid, grp in nodes_h5['/nodes/NET1'].items() if isinstance(grp, h5py.Group)}
+ for grp in node_groups.values():
+ if len(grp) == 1:
+ assert ('position' in grp and len(grp['position']) == 200)
+
+ elif len(grp) == 2:
+ assert ('position' in grp and len(grp['position']) == 100)
+ assert ('tags' in grp and len(grp['tags']) == 100)
+
+ else:
+ assert False
+
+ assert(os.path.exists(edges_h5.name) and os.path.exists(edges_csv.name))
+ edge_types_df = pd.read_csv(edges_csv.name, sep=' ')
+ assert (len(edge_types_df) == 2)
+ assert ('p1' in edge_types_df.columns)
+ assert ('p2' in edge_types_df.columns)
+
+ edges_h5 = h5py.File(edges_h5.name, 'r')
+ assert('source_to_target' in edges_h5['/edges/NET1_to_NET1/indicies'])
+ assert ('target_to_source' in edges_h5['/edges/NET1_to_NET1/indicies'])
+ assert (len(edges_h5['/edges/NET1_to_NET1/target_node_id']) == 30000)
+ assert (len(edges_h5['/edges/NET1_to_NET1/source_node_id']) == 30000)
+
+ assert (edges_h5['/edges/NET1_to_NET1/target_node_id'][0] == 0)
+ assert (edges_h5['/edges/NET1_to_NET1/source_node_id'][0] == 100)
+ assert (edges_h5['/edges/NET1_to_NET1/edge_group_index'][0] == 0)
+ assert (edges_h5['/edges/NET1_to_NET1/edge_type_id'][0] == 100)
+ assert (edges_h5['/edges/NET1_to_NET1/0/nsyns'][0] == 1)
+
+ assert (edges_h5['/edges/NET1_to_NET1/target_node_id'][29999] == 199)
+ assert (edges_h5['/edges/NET1_to_NET1/source_node_id'][29999] == 99)
+ assert (edges_h5['/edges/NET1_to_NET1/edge_group_id'][29999] == 0)
+ assert (edges_h5['/edges/NET1_to_NET1/edge_type_id'][29999] == 101)
+ assert (edges_h5['/edges/NET1_to_NET1/0/nsyns'][29999] == 2)
+
+ #try:
+ # os.remove('tmp_nodes.h5')
+ # os.remove('tmp_node_types.csv')
+ # os.remove('tmp_edges.h5')
+ # os.remove('tmp_edge_types.csv')
+ #except:
+ # pass
+
+
+def test_save_weights():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, cell_type='Scnna1', ei='e')
+ net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, cell_type='PV1', ei='i')
+ net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, tags=np.linspace(0, 100, 100), cell_type='PV2', ei='i')
+ cm = net.add_edges(source={'ei': 'i'}, target={'ei': 'e'}, connection_rule=lambda s, t: 3,
+ p1='e2i', p2='e2i') # 200*100 = 60000 edges
+ cm.add_properties(names=['segment', 'distance'], rule=lambda s, t: [1, 0.5], dtypes=[np.int, np.float])
+
+ net.add_edges(source=net.nodes(cell_type='Scnna1'), target=net.nodes(cell_type='PV1'),
+ connection_rule=lambda s, t: 2, p1='s2p') # 100*100 = 20000'
+
+ net.build()
+ net_dir = tempfile.mkdtemp()
+ net.save_nodes('tmp_nodes.h5', 'tmp_node_types.csv', output_dir=net_dir)
+ net.save_edges('tmp_edges.h5', 'tmp_edge_types.csv', output_dir=net_dir)
+
+ edges_h5 = h5py.File('{}/tmp_edges.h5'.format(net_dir), 'r')
+ assert(net.nedges == 80000)
+ assert(len(edges_h5['/edges/NET1_to_NET1/0/distance']) == 60000)
+ assert(len(edges_h5['/edges/NET1_to_NET1/0/segment']) == 60000)
+ assert(len(edges_h5['/edges/NET1_to_NET1/1/nsyns']) == 10000)
+ assert(edges_h5['/edges/NET1_to_NET1/0/distance'][0] == 0.5)
+ assert(edges_h5['/edges/NET1_to_NET1/0/segment'][0] == 1)
+ assert(edges_h5['/edges/NET1_to_NET1/1/nsyns'][0] == 2)
+
+ #try:
+ # os.remove('tmp_nodes.h5')
+ # os.remove('tmp_node_types.csv')
+ # os.remove('tmp_edges.h5')
+ # os.remove('tmp_edge_types.csv')
+ #except:
+ # pass
+
+
+def test_save_multinetwork():
+ net1 = NetworkBuilder('NET1')
+ net1.add_nodes(N=100, position=[(0.0, 1.0, -1.0)] * 100, cell_type='Scnna1', ei='e')
+ net1.add_edges(source={'ei': 'e'}, target={'ei': 'e'}, connection_rule=5, ctype_1='n1_rec')
+ net1.build()
+
+ net2 = NetworkBuilder('NET2')
+ net2.add_nodes(N=10, position=[(0.0, 1.0, -1.0)] * 10, cell_type='PV1', ei='i')
+ net2.add_edges(connection_rule=10, ctype_1='n2_rec')
+ net2.add_edges(source=net1.nodes(), target={'ei': 'i'}, connection_rule=1, ctype_2='n1_n2')
+ net2.add_edges(target=net1.nodes(cell_type='Scnna1'), source={'cell_type': 'PV1'}, connection_rule=2,
+ ctype_2='n2_n1')
+ net2.build()
+
+ net_dir = tempfile.mkdtemp()
+ net1.save_edges(output_dir=net_dir)
+ net2.save_edges(output_dir=net_dir)
+
+ n1_n1_fname = '{}/{}_{}'.format(net_dir, 'NET1', 'NET1')
+ edges_h5 = h5py.File(n1_n1_fname + '_edges.h5', 'r')
+ assert(len(edges_h5['/edges/NET1_to_NET1/target_node_id']) == 100*100)
+ assert(len(edges_h5['/edges/NET1_to_NET1/0/nsyns']) == 100*100)
+ assert(edges_h5['/edges/NET1_to_NET1/0/nsyns'][0] == 5)
+ edge_types_csv = pd.read_csv(n1_n1_fname + '_edge_types.csv', sep=' ')
+ assert(len(edge_types_csv) == 1)
+ assert('ctype_2' not in edge_types_csv.columns.values)
+ assert(edge_types_csv['ctype_1'].iloc[0] == 'n1_rec')
+
+ n1_n2_fname = '{}/{}_{}'.format(net_dir, 'NET1', 'NET2')
+ edges_h5 = h5py.File(n1_n2_fname + '_edges.h5', 'r')
+ assert(len(edges_h5['/edges/NET1_to_NET2/target_node_id']) == 100*10)
+ assert(len(edges_h5['/edges/NET1_to_NET2/0/nsyns']) == 100*10)
+ assert(edges_h5['/edges/NET1_to_NET2/0/nsyns'][0] == 1)
+ edge_types_csv = pd.read_csv(n1_n2_fname + '_edge_types.csv', sep=' ')
+ assert(len(edge_types_csv) == 1)
+ assert('ctype_1' not in edge_types_csv.columns.values)
+ assert(edge_types_csv['ctype_2'].iloc[0] == 'n1_n2')
+
+ n2_n1_fname = '{}/{}_{}'.format(net_dir, 'NET2', 'NET1')
+ edges_h5 = h5py.File(n2_n1_fname + '_edges.h5', 'r')
+ assert(len(edges_h5['/edges/NET2_to_NET1/target_node_id']) == 100*10)
+ assert(len(edges_h5['/edges/NET2_to_NET1/0/nsyns']) == 100*10)
+ assert(edges_h5['/edges/NET2_to_NET1/0/nsyns'][0] == 2)
+ edge_types_csv = pd.read_csv(n2_n1_fname + '_edge_types.csv', sep=' ')
+ assert(len(edge_types_csv) == 1)
+ assert('ctype_1' not in edge_types_csv.columns.values)
+ assert(edge_types_csv['ctype_2'].iloc[0] == 'n2_n1')
+
+ n2_n2_fname = '{}/{}_{}'.format(net_dir, 'NET2', 'NET2')
+ edges_h5 = h5py.File(n2_n2_fname + '_edges.h5', 'r')
+ assert(len(edges_h5['/edges/NET2_to_NET2/target_node_id']) == 10*10)
+ assert(len(edges_h5['/edges/NET2_to_NET2/0/nsyns']) == 10*10)
+ assert(edges_h5['/edges/NET2_to_NET2/0/nsyns'][0] == 10)
+ edge_types_csv = pd.read_csv(n2_n2_fname + '_edge_types.csv', sep=' ')
+ assert(len(edge_types_csv) == 1)
+ assert('ctype_2' not in edge_types_csv.columns.values)
+ assert(edge_types_csv['ctype_1'].iloc[0] == 'n2_rec')
+
+
+def test_save_multinetwork_1():
+ net1 = NetworkBuilder('NET1')
+ net1.add_nodes(N=100, position=[(0.0, 1.0, -1.0)] * 100, cell_type='Scnna1', ei='e')
+ net1.add_edges(source={'ei': 'e'}, target={'ei': 'e'}, connection_rule=5, ctype_1='n1_rec')
+ net1.build()
+
+ net2 = NetworkBuilder('NET2')
+ net2.add_nodes(N=10, position=[(0.0, 1.0, -1.0)] * 10, cell_type='PV1', ei='i')
+ net2.add_edges(connection_rule=10, ctype_1='n2_rec')
+ net2.add_edges(source=net1.nodes(), target={'ei': 'i'}, connection_rule=1, ctype_2='n1_n2')
+ net2.add_edges(target=net1.nodes(cell_type='Scnna1'), source={'cell_type': 'PV1'}, connection_rule=2,
+ ctype_2='n2_n1')
+ net2.build()
+ net_dir = tempfile.mkdtemp()
+ net2.save_edges(edges_file_name='NET2_NET1_edges.h5', edge_types_file_name='NET2_NET1_edge_types.csv',
+ output_dir=net_dir, src_network='NET2')
+
+ n1_n2_fname = '{}/{}_{}'.format(net_dir, 'NET2', 'NET1')
+ edges_h5 = h5py.File(n1_n2_fname + '_edges.h5', 'r')
+ assert(len(edges_h5['/edges/NET2_to_NET1/target_node_id']) == 100*10)
+ assert(len(edges_h5['/edges/NET2_to_NET1/0/nsyns']) == 100*10)
+ assert(edges_h5['/edges/NET2_to_NET1/0/nsyns'][0] == 2)
+ edge_types_csv = pd.read_csv(n1_n2_fname + '_edge_types.csv', sep=' ')
+ assert(len(edge_types_csv) == 1)
+ assert('ctype_1' not in edge_types_csv.columns.values)
+ assert(edge_types_csv['ctype_2'].iloc[0] == 'n2_n1')
+
+
+if __name__ == '__main__':
+ test_save_weights()
+ # test_save_multinetwork_1()
diff --git a/bmtk-vb/bmtk/tests/builder/test_edge_iterator.py b/bmtk-vb/bmtk/tests/builder/test_edge_iterator.py
new file mode 100644
index 0000000..8d968fc
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/builder/test_edge_iterator.py
@@ -0,0 +1,72 @@
+import pytest
+
+from bmtk.builder import NetworkBuilder
+
+def test_itr_basic():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, cell_type='Scnna1', ei='e')
+ net.add_nodes(N=100, position=[(0.0, 1.0, -1.0)]*100, cell_type='PV1', ei='i')
+ net.add_edges(source={'ei': 'e'}, target={'ei': 'i'}, connection_rule=5, syn_type='e2i')
+ net.add_edges(source={'cell_type': 'PV1'}, target={'cell_type': 'Scnna1'}, connection_rule=5, syn_type='i2e')
+ net.build()
+
+ edges = net.edges()
+ assert(len(edges) == 100*100*2)
+ assert(edges[0]['nsyns'] == 5)
+
+
+def test_itr_advanced_search():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(N=1, cell_type='Scnna1', ei='e')
+ net.add_nodes(N=50, cell_type='PV1', ei='i')
+ net.add_nodes(N=100, cell_type='PV2', ei='i')
+ net.add_edges(source={'ei': 'e'}, target={'ei': 'i'}, connection_rule=5, syn_type='e2i', nm='A')
+ net.add_edges(source={'cell_type': 'PV1'}, target={'cell_type': 'PV2'}, connection_rule=5, syn_type='i2i', nm='B')
+ net.add_edges(source={'cell_type': 'PV2'}, target={'ei': 'i'}, connection_rule=5, syn_type='i2i', nm='C')
+ net.build()
+
+ edges = net.edges(target_nodes=net.nodes(cell_type='Scnna1'))
+ assert(len(edges) == 0)
+
+ edges = net.edges(source_nodes={'ei': 'e'}, target_nodes={'ei': 'i'})
+ assert(len(edges) == 50 + 100)
+
+ edges = net.edges(source_nodes=[n.node_id for n in net.nodes(ei='e')])
+ assert(len(edges) == 50 + 100)
+
+ edges = net.edges(source_nodes={'ei': 'i'})
+ assert(len(edges) == 100 * 100 * 2)
+ for e in edges:
+ assert(e['syn_type'] == 'i2i')
+
+ edges = net.edges(syn_type='i2i')
+ print(len(edges) == 100 * 100 * 2)
+ for e in edges:
+ assert(e['nm'] != 'A')
+
+ edges = net.edges(syn_type='i2i', nm='C')
+ assert(len(edges) == 100 * 150)
+
+
+def test_mulitnet_iterator():
+ net1 = NetworkBuilder('NET1')
+ net1.add_nodes(N=50, cell_type='Rorb', ei='e')
+ net1.build()
+
+ net2 = NetworkBuilder('NET2')
+ net2.add_nodes(N=100, cell_type='Scnna1', ei='e')
+ net2.add_nodes(N=100, cell_type='PV1', ei='i')
+ net2.add_edges(source={'ei': 'e'}, target={'ei': 'i'}, connection_rule=5, syn_type='e2i', net_type='rec')
+ net2.add_edges(source=net1.nodes(), target={'ei': 'e'}, connection_rule=1, syn_type='e2e', net_type='fwd')
+ net2.build()
+
+ assert(len(net2.edges()) == 50*100 + 100*100)
+ assert(len(net2.edges(source_network='NET2', target_network='NET1')) == 0)
+ assert(len(net2.edges(source_network='NET1', target_network='NET2')) == 50*100)
+ assert(len(net2.edges(target_network='NET2', net_type='rec')) == 100*100)
+
+ edges = net2.edges(source_network='NET1')
+ assert(len(edges) == 50*100)
+ for e in edges:
+ assert(e['net_type'] == 'fwd')
+
diff --git a/bmtk-vb/bmtk/tests/builder/test_id_generator.py b/bmtk-vb/bmtk/tests/builder/test_id_generator.py
new file mode 100644
index 0000000..ba9dc5e
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/builder/test_id_generator.py
@@ -0,0 +1,36 @@
+import pytest
+
+from bmtk.builder.id_generator import IDGenerator
+
+def test_generator():
+ generator = IDGenerator()
+ assert(generator.next() == 0)
+ assert(generator.next() == 1)
+ assert(generator.next() == 2)
+
+
+def test_generator_initval():
+ generator = IDGenerator(101)
+ assert(generator.next() == 101)
+ assert(generator.next() == 102)
+ assert(generator.next() == 103)
+
+
+def test_contains():
+ generator = IDGenerator(init_val=10)
+ gids = [generator.next() for _ in range(10)]
+ assert(len(gids) == 10)
+ assert(10 in generator)
+ assert(19 in generator)
+ assert(20 not in generator)
+
+
+def test_remove():
+ generator = IDGenerator(init_val=101)
+ assert(generator.next() == 101)
+ generator.remove_id(102)
+ generator.remove_id(104)
+ generator.remove_id(106)
+ assert(generator.next() == 103)
+ assert(generator.next() == 105)
+ assert(generator.next() == 107)
diff --git a/bmtk-vb/bmtk/tests/builder/test_iterator.py b/bmtk-vb/bmtk/tests/builder/test_iterator.py
new file mode 100644
index 0000000..82692d5
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/builder/test_iterator.py
@@ -0,0 +1,134 @@
+import pytest
+import itertools
+
+from bmtk.builder import connector, iterator
+from bmtk.builder import NetworkBuilder
+from bmtk.builder.node import Node
+
+
+@pytest.fixture
+def network():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(N=100, x=range(100), ei='i')
+ net.add_nodes(N=50, x=range(50), y='y', ei='e')
+ return net
+
+
+def test_one2one_fnc():
+ def connector_fnc(s, t):
+ assert(s['ei'] == 'i')
+ assert(t['ei'] == 'e')
+ return '100'
+
+ net = network()
+ conr = connector.create(connector_fnc)
+ itr = iterator.create('one_to_one', conr)
+ count = 0
+ for v in itr(net.nodes(ei='i'), net.nodes(ei='e'), conr):
+ src_id, trg_id, val = v
+ assert(src_id < 100)
+ assert(trg_id >= 100)
+ assert(val == '100')
+ count += 1
+ assert(count == 100*50)
+
+
+def test_one2all_fnc():
+ def connector_fnc(s, ts):
+ assert(isinstance(s, Node))
+ assert(s['ei'] == 'i')
+ assert(len(ts) == 50)
+ return [100]*50
+
+ net = network()
+ conr = connector.create(connector_fnc)
+ itr = iterator.create('one_to_all', conr)
+ count = 0
+ for v in itr(net.nodes(ei='i'), net.nodes(ei='e'), conr):
+ src_id, trg_id, val = v
+ assert(src_id < 100)
+ assert(trg_id >= 100)
+ assert(val == 100)
+ count += 1
+ assert(count == 5000)
+
+
+def test_all2one_fnc():
+ def connector_fnc(ss, t):
+ assert(isinstance(t, Node))
+ assert(t['ei'] == 'e')
+ assert(len(ss) == 100)
+ return [100]*100
+
+ net = network()
+ conr = connector.create(connector_fnc)
+ itr = iterator.create('all_to_one', conr)
+ count = 0
+ for v in itr(net.nodes(ei='i'), net.nodes(ei='e'), conr):
+ src_id, trg_id, val = v
+ assert(src_id < 100)
+ assert(trg_id >= 100)
+ assert(val == 100)
+ count += 1
+ assert(count == 5000)
+
+
+def test_literal():
+ net = network()
+ conr = connector.create(100)
+ itr = iterator.create('one_to_one', conr)
+ count = 0
+ for v in itr(net.nodes(ei='i'), net.nodes(ei='e'), conr):
+ src_id, trg_id, val = v
+ assert(src_id < 100)
+ assert(trg_id >= 100)
+ assert(val == 100)
+ count += 1
+
+ assert(count == 5000)
+
+
+def test_dict():
+ net = network()
+ conr = connector.create({'nsyn': 10, 'target': 'axon'})
+ itr = iterator.create('one_to_one', conr)
+ count = 0
+ for v in itr(net.nodes(ei='i'), net.nodes(ei='e'), conr):
+ src_id, trg_id, val = v
+ assert(src_id < 100)
+ assert(trg_id >= 100)
+ assert(val['nsyn'] == 10)
+ assert(val['target'] == 'axon')
+ count += 1
+
+ assert (count == 5000)
+
+
+def test_one2one_list():
+ net = network()
+ vals = [s.node_id*t.node_id for s,t in itertools.product(net.nodes(ei='i'), net.nodes(ei='e'))]
+ conr = connector.create(vals)
+ itr = iterator.create('one_to_one', conr)
+ for v in itr(net.nodes(ei='i'), net.nodes(ei='e'), conr):
+ src_id, trg_id, val = v
+ assert(src_id*trg_id == val)
+
+
+def test_one2all_list():
+ net = network()
+ vals = [v.node_id for v in net.nodes(ei='e')]
+ conr = connector.create(vals)
+ itr = iterator.create('one_to_all', conr)
+ for v in itr(net.nodes(ei='i'), net.nodes(ei='e'), conr):
+ src_id, trg_id, val = v
+ assert(trg_id == val)
+
+
+def test_all2one_list():
+ net = network()
+ vals = [v.node_id for v in net.nodes(ei='i')]
+ conr = connector.create(vals)
+ itr = iterator.create('all_to_one', conr)
+ for v in itr(net.nodes(ei='i'), net.nodes(ei='e'), conr):
+ src_id, trg_id, val = v
+ assert(src_id == val)
diff --git a/bmtk-vb/bmtk/tests/builder/test_node_pool.py b/bmtk-vb/bmtk/tests/builder/test_node_pool.py
new file mode 100644
index 0000000..65cd6b1
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/builder/test_node_pool.py
@@ -0,0 +1,81 @@
+import pytest
+
+from bmtk.builder import NetworkBuilder
+
+
+def test_single_node():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(prop1='prop1', prop2='prop2', param1=['param1'])
+ nodes = list(net.nodes())
+ assert(len(nodes) == 1)
+ assert(nodes[0]['param1'] == 'param1')
+ assert(nodes[0]['prop1'] == 'prop1')
+ assert(nodes[0]['prop2'] == 'prop2')
+
+
+def test_node_set():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(N=100, prop1='prop1', param1=range(100))
+ node_pool = net.nodes()
+ assert(node_pool.filter_str == '*')
+
+ nodes = list(node_pool)
+ assert(len(nodes) == 100)
+ assert(nodes[0]['prop1'] == 'prop1')
+ assert(nodes[0]['param1'] == 0)
+ assert(nodes[99]['prop1'] == 'prop1')
+ assert(nodes[99]['param1'] == 99)
+ assert(nodes[0]['node_type_id'] == nodes[99]['node_type_id'])
+ assert(nodes[0]['node_id'] != nodes[99]['node_id'])
+
+
+def test_node_sets():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(N=100, prop_n='prop1', pool1='p1', sp='sp', param1=range(100))
+ net.add_nodes(N=100, prop_n='prop2', pool2='p2', sp='sp', param1=range(100))
+ net.add_nodes(N=100, prop_n='prop3', pool3='p3', sp='sp', param1=range(100))
+ node_pool_1 = net.nodes(prop_n='prop1')
+ assert(len(node_pool_1) == 100)
+ assert(node_pool_1.filter_str == "prop_n=='prop1'")
+ for n in node_pool_1:
+ assert('pool1' in n and n['prop_n'] == 'prop1')
+
+ node_pool_2 = net.nodes(sp='sp')
+ assert(node_pool_2.filter_str == "sp=='sp'")
+ assert(len(node_pool_2) == 300)
+ for n in node_pool_2:
+ assert(n['sp'] == 'sp')
+
+ node_pool_3 = net.nodes(param1=10)
+ assert(len(node_pool_3) == 3)
+ assert(node_pool_3.filter_str == "param1=='10'")
+ nodes = list(node_pool_3)
+ assert(nodes[0]['node_id'] == 10)
+ assert(nodes[1]['node_id'] == 110)
+ assert(nodes[2]['node_id'] == 210)
+ assert(nodes[0]['node_type_id'] != nodes[1]['node_type_id'] != nodes[2]['node_type_id'])
+
+
+def test_multi_search():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(N=10, prop_n='prop1', sp='sp1', param1=range(0, 10))
+ net.add_nodes(N=10, prop_n='prop1', sp='sp2', param1=range(5, 15))
+ net.add_nodes(N=20, prop_n='prop2', sp='sp2', param1=range(20))
+ node_pool = net.nodes(prop_n='prop1', param1=5)
+ assert(len(node_pool) == 2)
+ nodes = list(node_pool)
+ assert(nodes[0]['node_id'] == 5)
+ assert(nodes[1]['node_id'] == 10)
+
+
+def test_failed_search():
+ net = NetworkBuilder('NET1')
+ net.add_nodes(N=100, p1='p1', q1=range(100))
+ node_pool = net.nodes(p1='p2')
+ assert(len(node_pool) == 0)
+
+ node_pool = net.nodes(q2=10)
+ assert(len(node_pool) == 0)
+
+
+test_failed_search()
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/tests/builder/test_node_set.py b/bmtk-vb/bmtk/tests/builder/test_node_set.py
new file mode 100644
index 0000000..34b7afa
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/builder/test_node_set.py
@@ -0,0 +1,52 @@
+import pytest
+from bmtk.builder.node_set import NodeSet
+from bmtk.builder.node import Node
+from bmtk.builder.id_generator import IDGenerator
+
+def test_node_set():
+ generator = IDGenerator()
+ node_set = NodeSet(N=100,
+ node_params={'p1': range(100), 'p2': range(0, 1000, 100)},
+ node_type_properties={'prop1': 'prop1', 'node_type_id': 1})
+ print(node_set.N)
+ print(node_set.node_type_id)
+ print(node_set.params_keys)
+
+ nodes = node_set.build(generator)
+ print(len(nodes) == 100)
+ print(nodes[1]['p1'] == 1)
+ print(nodes[1]['p2'] == 100)
+ print(nodes[1]['prop1'] == 'prop1')
+ print(nodes[1]['node_type_id'] == 1)
+
+
+def test_set_hash():
+ node_set1 = NodeSet(N=100,
+ node_params={'param1': range(100)},
+ node_type_properties={'prop1': 'prop1', 'node_type_id': 1})
+ node_set2 = NodeSet(N=100,
+ node_params = {'p1': range(100)},
+ node_type_properties={'prop1': 'prop2', 'node_type_id': 2})
+ node_set3 = NodeSet(N=10,
+ node_params={'p1': ['hello']*10},
+ node_type_properties={'prop1': 'prop3', 'node_type_id': 3})
+
+ assert(node_set1.params_hash != node_set2.params_hash)
+ assert(node_set2.params_hash == node_set3.params_hash)
+
+
+def test_node():
+ node_set1 = NodeSet(N=100,
+ node_params={'param1': range(100)},
+ node_type_properties={'prop1': 'prop1', 'node_type_id': 1})
+ nodes = node_set1.build(IDGenerator())
+ node_1 = nodes[0]
+ assert(node_1.node_id == 0)
+ assert(node_1['node_id'] == 0)
+ assert(node_1.node_type_id == 1)
+ assert(node_1['node_type_id'] == 1)
+ assert('prop1' in node_1.node_type_properties)
+ assert('param1' in node_1.params)
+ assert('node_id' in node_1.params)
+ assert('param1' in node_set1.params_keys)
+ assert(node_1.params_hash == node_set1.params_hash)
diff --git a/bmtk-vb/bmtk/tests/simulator/bionet/bionet_virtual_files.py b/bmtk-vb/bmtk/tests/simulator/bionet/bionet_virtual_files.py
new file mode 100644
index 0000000..9444329
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/bionet/bionet_virtual_files.py
@@ -0,0 +1,172 @@
+import numpy as np
+
+from bmtk.utils.io import tabular_network as tn
+
+
+class NodeRow(tn.NodeRow):
+ @property
+ def with_dynamics_params(self):
+ return False
+
+
+class EdgeRow(tn.EdgeRow):
+ @property
+ def with_dynamics_params(self):
+ return False
+
+
+class NodesFile(object):
+ def __init__(self, N):
+ self._network_name = 'test_bionet'
+ self._version = None
+ self._iter_index = 0
+ self._nrows = 0
+ self._node_types_table = None
+
+ self._N = N
+ self._rot_delta = 360.0/float(N)
+ self._node_types_table = {
+ 101: {
+ 'pop_name': 'bio_exc', 'node_type_id': 101, 'model_type': 'biophysical',
+ 'morphology': 'be_morphology.swc',
+ 'dynamics_params': 'be_dynamics.json',
+ 'ei': 'e'
+ },
+ 102: {
+ 'pop_name': 'point_exc', 'node_type_id': 102, 'model_type': 'point_IntFire1',
+ 'dynamics_params': 'pe_dynamics.json',
+ 'ei': 'e'
+ },
+ 103: {
+ 'pop_name': 'bio_inh', 'node_type_id': 103, 'model_type': 'biophysical',
+ 'morphology': 'bi_morphology.swc',
+ 'dynamics_params': 'bi_dynamics.json',
+ 'ei': 'i'
+ },
+ 104: {
+ 'pop_name': 'point_inh', 'node_type_id': 104, 'model_type': 'point_IntFire1',
+ 'dynamics_params': 'bi_dynamics.json',
+ 'ei': 'i'
+ }
+
+ }
+
+
+ @property
+ def name(self):
+ """name of network containing these nodes"""
+ return self._network_name
+
+ @property
+ def version(self):
+ return self._version
+
+ @property
+ def gids(self):
+ raise NotImplementedError()
+
+ @property
+ def node_types_table(self):
+ return self._node_types_table
+
+ def load(self, nodes_file, node_types_file):
+ raise NotImplementedError()
+
+ def get_node(self, gid, cache=False):
+ return self[gid]
+
+ def __len__(self):
+ return self._N
+
+ def __iter__(self):
+ self._iter_index = 0
+ return self
+
+ def next(self):
+ if self._iter_index >= len(self):
+ raise StopIteration
+
+ node_row = self[self._iter_index]
+ self._iter_index += 1
+ return node_row
+
+ def __getitem__(self, gid):
+ node_props = {'positions': np.random.rand(3), 'rotation': self._rot_delta*gid, 'weight': 0.0001*gid}
+ return NodeRow(gid, node_props, self.__get_node_type_props(gid))
+
+
+ def __get_node_type_props(self, gid):
+ if gid <= self._N/4:
+ return self._node_types_table[101]
+ elif gid <= self._N/2:
+ return self._node_types_table[102]
+ elif gid <= self._N*3/4:
+ return self._node_types_table[103]
+ else:
+ return self._node_types_table[104]
+
+
+class EdgesFile(tn.EdgesFile):
+ def __init__(self, target_nodes, source_nodes):
+ self._target_nodes = target_nodes
+ self._source_nodes = source_nodes
+ self._edge_type_props = [
+ {
+ 'node_type_id': 1,
+ 'target_query': 'model_type="biophysical"', 'source_query': 'ei="e"',
+ 'syn_weight': .10,
+ 'syn_targets': ['dend', 'apical'],
+ 'dynamics_params': 'biophys_exc.json'
+ },
+ {
+ 'node_type_id': 2,
+ 'target_query': 'model_type="point_IntFire1"', 'source_query': 'ei="e"',
+ 'syn_weight': .20,
+ 'dynamics_params': 'point_exc.json'
+ },
+ {
+ 'node_type_id': 3,
+ 'target_query': 'model_type="biophysical"', 'source_query': 'ei="i"',
+ 'syn_weight': -.10,
+ 'syn_targets': ['soma', 'dend'],
+ 'dynamics_params': 'biophys_inh.json'
+ },
+ {
+ 'node_type_id': 4,
+ 'target_query': 'model_type="point_IntFire1"', 'source_query': 'ei="i"',
+ 'syn_weight': -.20,
+ 'dynamics_params': 'point_inh.json'
+ }
+ ]
+
+
+ @property
+ def source_network(self):
+ """Name of network containing the source gids"""
+ return self._source_nodes.name
+
+ @property
+ def target_network(self):
+ """Name of network containing the target gids"""
+ return self._target_nodes.name
+
+ def load(self, edges_file, edge_types_file):
+ raise NotImplementedError()
+
+ def edges_itr(self, target_gid):
+ trg_node = self._target_nodes[target_gid]
+ for src_node in self._source_nodes:
+ edge_props = {'syn_weight': trg_node['weight']}
+ #edge_type_props = {'edge_type_id': 1}
+ yield EdgeRow(trg_node.gid, src_node.gid, edge_props, self.__get_edge_type_prop(src_node, trg_node))
+
+ #def __init__(self, trg_gid, src_gid, edge_props={}, edge_type_props={}):
+ #raise NotImplementedError()
+
+ def __len__(self):
+ return len(self._source_nodes)*len(self._target_nodes)
+
+ def __get_edge_type_prop(self, source_node, target_node):
+ indx = 0 if source_node['ei'] == 'e' else 2
+ indx += 0 if target_node['model_type'] == 'biophysical' else 1
+ return self._edge_type_props[indx]
diff --git a/bmtk-vb/bmtk/tests/simulator/bionet/set_cell_params.py b/bmtk-vb/bmtk/tests/simulator/bionet/set_cell_params.py
new file mode 100644
index 0000000..2a244b0
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/bionet/set_cell_params.py
@@ -0,0 +1,108 @@
+import json
+from neuron import h
+
+
+def IntFire1(cell_prop):
+ """Set parameters for the IntFire1 cell models."""
+ params_file = cell_prop['params_file']
+
+ with open(params_file) as params_file:
+ params = json.load(params_file)
+
+ hobj = h.IntFire1()
+ hobj.tau = params['tau'] * 1000.0 # Convert from seconds to ms.
+ hobj.refrac = params['refrac'] * 1000.0 # Convert from seconds to ms.
+
+ return hobj
+
+
+def Biophys1(cell_prop):
+ """
+ Set parameters for cells from the Allen Cell Types database
+ Prior to setting parameters will replace the axon with the stub
+ """
+ morphology_file_name = str(cell_prop['morphology'])
+ params_file_name = str(cell_prop['params_file'])
+
+ hobj = h.Biophys1(morphology_file_name)
+ fix_axon(hobj)
+ set_params_peri(hobj, params_file_name)
+
+ return hobj
+
+
+def set_params_peri(hobj, params_file_name):
+ """Set biophysical parameters for the cell
+
+ Parameters
+ ----------
+ hobj: instance of a Biophysical template
+ NEURON's cell object
+ params_file_name: string
+ name of json file containing biophysical parameters for cell's model which determine spiking behavior
+ """
+
+ with open(params_file_name) as biophys_params_file:
+ biophys_params = json.load(biophys_params_file)
+
+ passive = biophys_params['passive'][0]
+ conditions = biophys_params['conditions'][0]
+ genome = biophys_params['genome']
+
+ # Set passive properties
+ cm_dict = dict([(c['section'], c['cm']) for c in passive['cm']])
+ for sec in hobj.all:
+ sec.Ra = passive['ra']
+ sec.cm = cm_dict[sec.name().split(".")[1][:4]]
+ sec.insert('pas')
+
+ for seg in sec:
+ seg.pas.e = passive["e_pas"]
+
+ # Insert channels and set parameters
+
+ for p in genome:
+ sections = [s for s in hobj.all if s.name().split(".")[1][:4] == p["section"]]
+
+ for sec in sections:
+ if p["mechanism"] != "":
+ sec.insert(p["mechanism"])
+ setattr(sec, p["name"], p["value"])
+
+ # Set reversal potentials
+ for erev in conditions['erev']:
+ sections = [s for s in hobj.all if s.name().split(".")[1][:4] == erev["section"]]
+ for sec in sections:
+ sec.ena = erev["ena"]
+ sec.ek = erev["ek"]
+
+
+def fix_axon(hobj):
+ '''
+ Replace reconstructed axon with a stub
+
+ Parameters
+ ----------
+ hobj: instance of a Biophysical template
+ NEURON's cell object
+ '''
+
+ for sec in hobj.axon:
+ h.delete_section(sec=sec)
+
+ h.execute('create axon[2]', hobj)
+
+ for sec in hobj.axon:
+ sec.L = 30
+ sec.diam = 1
+ hobj.axonal.append(sec=sec)
+ hobj.all.append(sec=sec) # need to remove this comment
+
+ hobj.axon[0].connect(hobj.soma[0], 0.5, 0)
+ hobj.axon[1].connect(hobj.axon[0], 1, 0)
+
+ h.define_shape()
+
+
+
+
diff --git a/bmtk-vb/bmtk/tests/simulator/bionet/set_syn_params.py b/bmtk-vb/bmtk/tests/simulator/bionet/set_syn_params.py
new file mode 100644
index 0000000..b2b4732
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/bionet/set_syn_params.py
@@ -0,0 +1,31 @@
+from neuron import h
+
+
+def exp2syn(syn_params, xs, secs):
+ '''
+ Create a list of exp2syn synapses
+
+ Parameters
+ ----------
+ syn_params: dict
+ parameters of a synapse
+ xs: float
+ normalized distance along the section
+
+ secs: hoc object
+ target section
+
+ Returns
+ -------
+ syns: synapse objects
+
+ '''
+ syns = []
+
+ for x, sec in zip(xs, secs):
+ syn = h.Exp2Syn(x, sec=sec)
+ syn.e = syn_params['erev']
+ syn.tau1 = syn_params['tau1']
+ syn.tau2 = syn_params['tau2']
+ syns.append(syn)
+ return syns
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/tests/simulator/bionet/set_weights.py b/bmtk-vb/bmtk/tests/simulator/bionet/set_weights.py
new file mode 100644
index 0000000..cdaa454
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/bionet/set_weights.py
@@ -0,0 +1,19 @@
+import math
+
+
+def gaussianLL(tar_prop,src_prop,con_prop):
+ src_tuning = src_prop['tuning_angle']
+ tar_tuning = tar_prop['tuning_angle']
+
+ w0 = con_prop["weight_max"]
+ sigma = con_prop["weight_sigma"]
+
+ delta_tuning = abs(abs(abs(180.0 - abs(float(tar_tuning) - float(src_tuning)) % 360.0) - 90.0) - 90.0)
+ weight = w0*math.exp(-(delta_tuning / sigma) ** 2)
+
+ return weight
+
+
+def wmax(tar_prop,src_prop,con_prop):
+ w0 = con_prop["weight_max"]
+ return w0
diff --git a/bmtk-vb/bmtk/tests/simulator/bionet/test_biograph.py b/bmtk-vb/bmtk/tests/simulator/bionet/test_biograph.py
new file mode 100644
index 0000000..68ee18f
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/bionet/test_biograph.py
@@ -0,0 +1,70 @@
+import pytest
+import os
+import json
+import tempfile
+
+import bionet_virtual_files as bvf
+from bmtk.simulator import bionet
+
+
+@pytest.mark.skip()
+def test_add_nodes():
+ nodes = bvf.NodesFile(N=100)
+
+ net = bionet.BioNetwork()
+ net.add_component('morphologies_dir', '.')
+ net.add_component('biophysical_neuron_models_dir', '.')
+ net.add_component('point_neuron_models_dir', '.')
+ net.add_nodes(nodes)
+
+ assert(net.networks == [nodes.name])
+ assert(net.get_internal_nodes() == net.get_nodes(nodes.name))
+ for bionode in net.get_internal_nodes():
+ node_id = bionode.node_id
+ orig_node = nodes[node_id]
+ assert(node_id == orig_node.gid)
+ assert(len(bionode.positions) == 3)
+ assert(bionode['ei'] == orig_node['ei'])
+ assert(bionode['model_type'] == orig_node['model_type'])
+ assert(bionode['rotation'] == orig_node['rotation'])
+ assert(os.path.basename(bionode.model_params) == orig_node['dynamics_params'])
+
+
+@pytest.mark.skip()
+def test_add_edges():
+ nodes = bvf.NodesFile(N=100)
+ edges = bvf.EdgesFile(nodes, nodes)
+
+ net = bionet.BioNetwork()
+ net.add_component('morphologies_dir', '.')
+ net.add_component('biophysical_neuron_models_dir', '.')
+ net.add_component('point_neuron_models_dir', '.')
+ net.add_component('synaptic_models_dir', '.')
+
+ with open('biophys_exc.json', 'w') as fp:
+ json.dump({}, fp)
+
+ with open('biophys_inh.json', 'w') as fp:
+ json.dump({}, fp)
+
+ with open('point_exc.json', 'w') as fp:
+ json.dump({}, fp)
+
+ with open('point_inh.json', 'w') as fp:
+ json.dump({}, fp)
+
+ net.add_nodes(nodes)
+ net.add_edges(edges)
+
+ count = 0
+ for trg_node in net.get_internal_nodes():
+ #print bionode.node_id
+ for e in net.edges_iterator(trg_node.node_id, nodes.name):
+ _, src_node, edge = e
+ assert(edge['syn_weight'] == trg_node['weight'])
+ count += 1
+ assert(count == 10000)
+
+
+if __name__ == '__main__':
+ test_add_nodes()
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/tests/simulator/bionet/test_nrn.py b/bmtk-vb/bmtk/tests/simulator/bionet/test_nrn.py
new file mode 100644
index 0000000..a65c333
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/bionet/test_nrn.py
@@ -0,0 +1,148 @@
+import pytest
+from bmtk.simulator.bionet.pyfunction_cache import *
+
+
+def test_weight():
+ def wmax(v1, v2):
+ return max(v1, v2)
+
+ def wmin(v1, v2):
+ return min(v1, v2)
+
+ add_weight_function(wmax)
+ add_weight_function(wmin, 'minimum')
+
+ assert('wmax' in py_modules.synaptic_weights)
+ assert('minimum' in py_modules.synaptic_weights)
+ assert('wmin' not in py_modules.synaptic_weights)
+ wmax_fnc = py_modules.synaptic_weight('wmax')
+ assert(wmax_fnc(1, 2) == 2)
+
+ wmin_fnc = py_modules.synaptic_weight('minimum')
+ assert(wmin_fnc(1, 2) == 1)
+ py_modules.clear()
+
+
+def test_weight_decorator():
+ @synaptic_weight
+ def wmax(v1, v2):
+ return max(v1, v2)
+
+ @synaptic_weight(name='minimum')
+ def wmin(v1, v2):
+ return min(v1, v2)
+
+ assert('wmax' in py_modules.synaptic_weights)
+ assert('minimum' in py_modules.synaptic_weights)
+ assert('wmin' not in py_modules.synaptic_weights)
+ wmax_fnc = py_modules.synaptic_weight('wmax')
+ assert(wmax_fnc(1, 2) == 2)
+
+ wmin_fnc = py_modules.synaptic_weight('minimum')
+ assert(wmin_fnc(1, 2) == 1)
+ py_modules.clear()
+
+
+def test_synapse_model():
+ def syn1():
+ return 'Syn1'
+
+ def syn2(p1, p2):
+ return p1, p2
+
+ add_synapse_model(syn1)
+ add_synapse_model(syn2, 'synapse_2')
+
+ assert('syn1' in py_modules.synapse_models)
+ assert('synapse_2' in py_modules.synapse_models)
+ assert('syn2' not in py_modules.synapse_models)
+
+ syn_fnc = py_modules.synapse_model('syn1')
+ assert(syn_fnc() == 'Syn1')
+
+ syn_fnc = py_modules.synapse_model('synapse_2')
+ assert(syn_fnc(1, 2) == (1, 2))
+ py_modules.clear()
+
+
+def test_synapse_model_decorator():
+ @synapse_model
+ def syn1():
+ return 'Syn1'
+
+ @synapse_model(name='synapse_2')
+ def syn2(p1, p2):
+ return p1, p2
+
+ assert('syn1' in py_modules.synapse_models)
+ assert('synapse_2' in py_modules.synapse_models)
+ assert('syn2' not in py_modules.synapse_models)
+
+ syn_fnc = py_modules.synapse_model('syn1')
+ assert(syn_fnc() == 'Syn1')
+
+ syn_fnc = py_modules.synapse_model('synapse_2')
+ assert(syn_fnc(1, 2) == (1, 2))
+ py_modules.clear()
+
+
+@pytest.mark.skip()
+def test_cell_model():
+ def hoc1():
+ return "hoc"
+
+ def hoc2(p1):
+ return p1
+
+ add_cell_model(hoc1)
+ add_cell_model(hoc2, name='hoc_function')
+
+ assert('hoc1' in py_modules.cell_models)
+ assert('hoc_function' in py_modules.cell_models)
+ assert('hoc2' not in py_modules.cell_models)
+
+ hoc_fnc = py_modules.cell_model('hoc1')
+ assert(hoc_fnc() == 'hoc')
+
+ hoc_fnc = py_modules.cell_model('hoc_function')
+ assert(hoc_fnc(1.0) == 1.0)
+
+
+@pytest.mark.skip()
+def test_cell_model_decorator():
+ @cell_model
+ def hoc1():
+ return "hoc"
+
+ @cell_model(name='hoc_function')
+ def hoc2(p1):
+ return p1
+
+ assert('hoc1' in py_modules.cell_models)
+ assert('hoc_function' in py_modules.cell_models)
+ assert('hoc2' not in py_modules.cell_models)
+
+ hoc_fnc = py_modules.cell_model('hoc1')
+ assert(hoc_fnc() == 'hoc')
+
+ hoc_fnc = py_modules.cell_model('hoc_function')
+ assert(hoc_fnc(1.0) == 1.0)
+
+
+@pytest.mark.skip()
+def test_load_py_modules():
+ import set_weights
+ import set_syn_params
+ import set_cell_params
+
+ load_py_modules(cell_models=set_cell_params, syn_models=set_syn_params, syn_weights=set_weights)
+ assert(all(n in py_modules.cell_models for n in ['Biophys1', 'IntFire1']))
+ assert(isinstance(py_modules.cell_model('Biophys1'), types.FunctionType))
+ assert (isinstance(py_modules.cell_model('IntFire1'), types.FunctionType))
+
+ assert (all(n in py_modules.synapse_models for n in ['exp2syn']))
+ assert (isinstance(py_modules.synapse_model('exp2syn'), types.FunctionType))
+
+ assert (all(n in py_modules.synaptic_weights for n in ['wmax', 'gaussianLL']))
+ assert (isinstance(py_modules.synaptic_weight('wmax'), types.FunctionType))
+ assert (isinstance(py_modules.synaptic_weight('gaussianLL'), types.FunctionType))
diff --git a/bmtk-vb/bmtk/tests/simulator/pointnet/pointnet_virtual_files.py b/bmtk-vb/bmtk/tests/simulator/pointnet/pointnet_virtual_files.py
new file mode 100644
index 0000000..46e3c26
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/pointnet/pointnet_virtual_files.py
@@ -0,0 +1,158 @@
+import numpy as np
+
+from bmtk.utils.io import tabular_network as tn
+
+
+class NodeRow(tn.NodeRow):
+ @property
+ def with_dynamics_params(self):
+ return False
+
+
+class NodesFile(tn.NodesFile):
+ def __init__(self, N):
+ self._network_name = 'test_bionet'
+ self._version = None
+ self._iter_index = 0
+ self._nrows = 0
+ self._node_types_table = None
+
+ self._N = N
+ self._rot_delta = 360.0/float(N)
+ self._node_types_table = {
+ 101: {
+ 'pop_name': 'Rorb', 'node_type_id': 101, 'model_type': 'iaf_psc_alpha',
+ 'dynamics_params': 'iaf_dynamics.json',
+ 'ei': 'e'
+ },
+
+ 102: {
+ 'pop_name': 'PV1', 'node_type_id': 102, 'model_type': 'izhikevich',
+ 'dynamics_params': 'iz_dynamics.json',
+ 'ei': 'i'
+ }
+ }
+
+ @property
+ def name(self):
+ """name of network containing these nodes"""
+ return self._network_name
+
+ @property
+ def version(self):
+ return self._version
+
+ @property
+ def gids(self):
+ raise NotImplementedError()
+
+ @property
+ def node_types_table(self):
+ return self._node_types_table
+
+ def load(self, nodes_file, node_types_file):
+ raise NotImplementedError()
+
+ def get_node(self, gid, cache=False):
+ return self[gid]
+
+ def __len__(self):
+ return self._N
+
+ def __iter__(self):
+ self._iter_index = 0
+ return self
+
+ def next(self):
+ if self._iter_index >= len(self):
+ raise StopIteration
+
+ node_row = self[self._iter_index]
+ self._iter_index += 1
+ return node_row
+
+ def __getitem__(self, gid):
+ node_props = {'positions': np.random.rand(3), 'rotation': self._rot_delta*gid, 'weight': 0.0001*gid}
+ return NodeRow(gid, node_props, self.__get_node_type_props(gid))
+
+
+ def __get_node_type_props(self, gid):
+ if gid <= self._N/2:
+ return self._node_types_table[101]
+ else:
+ return self._node_types_table[102]
+
+
+class EdgeRow(tn.EdgeRow):
+ @property
+ def with_dynamics_params(self):
+ return False
+
+
+class EdgesFile(tn.EdgesFile):
+ def __init__(self, target_nodes, source_nodes):
+ self._target_nodes = target_nodes
+ self._source_nodes = source_nodes
+ self._edge_type_props = [
+ {
+ 'node_type_id': 1,
+ 'target_query': 'model_type="iaf_psc_alpha"', 'source_query': 'ei="e"',
+ 'syn_weight': .10,
+ 'delay': 2.0,
+ 'dynamics_params': 'iaf_exc.json'
+ },
+ {
+ 'node_type_id': 2,
+ 'target_query': 'model_type="iaf_psc_alpha"', 'source_query': 'ei="i"',
+ 'syn_weight': -.10,
+ 'delay': 2.0,
+ 'dynamics_params': 'iaf_inh.json'
+ },
+ {
+ 'node_type_id': 3,
+ 'target_query': 'model_type="izhikevich"', 'source_query': 'ei="e"',
+ 'syn_weight': .20,
+ 'delay': 2.0,
+ 'dynamics_params': 'izh_exc.json'
+ },
+ {
+ 'node_type_id': 4,
+ 'target_query': 'model_type="izhikevich"', 'source_query': 'ei="i"',
+ 'syn_weight': -.20,
+ 'delay': 2.0,
+ 'dynamics_params': 'izh_inh.json'
+ }
+ ]
+
+
+
+ @property
+ def source_network(self):
+ """Name of network containing the source gids"""
+ return self._source_nodes.name
+
+ @property
+ def target_network(self):
+ """Name of network containing the target gids"""
+ return self._target_nodes.name
+
+ def load(self, edges_file, edge_types_file):
+ raise NotImplementedError()
+
+ def edges_itr(self, target_gid):
+ trg_node = self._target_nodes[target_gid]
+ for src_node in self._source_nodes:
+ edge_props = {'syn_weight': trg_node['weight']}
+ #edge_type_props = {'edge_type_id': 1}
+ yield EdgeRow(trg_node.gid, src_node.gid, edge_props, self.__get_edge_type_prop(src_node, trg_node))
+
+ #def __init__(self, trg_gid, src_gid, edge_props={}, edge_type_props={}):
+ #raise NotImplementedError()
+
+ def __len__(self):
+ return len(self._source_nodes)*len(self._target_nodes)
+
+ def __get_edge_type_prop(self, source_node, target_node):
+ indx = 0 if source_node['model_type'] == 'iaf_psc_alpha' else 2
+ indx += 0 if target_node['ei'] == 'e' else 1
+ return self._edge_type_props[indx]
diff --git a/bmtk-vb/bmtk/tests/simulator/pointnet/test_pointgraph.py b/bmtk-vb/bmtk/tests/simulator/pointnet/test_pointgraph.py
new file mode 100644
index 0000000..a1c18c9
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/pointnet/test_pointgraph.py
@@ -0,0 +1,77 @@
+import pytest
+import os
+import json
+import tempfile
+
+from pointnet_virtual_files import NodesFile, EdgesFile
+from bmtk.simulator import pointnet
+
+
+@pytest.mark.skip()
+def test_add_nodes():
+ nodes = NodesFile(N=100)
+
+ net = pointnet.PointNetwork()
+ if not os.path.exists('tmp/'):
+ os.mkdir('tmp/')
+ net.add_component('models_dir', '.')
+ with open('iaf_dynamics.json', 'w') as fp:
+ json.dump({}, fp)
+
+ with open('iz_dynamics.json', 'w') as fp:
+ json.dump({}, fp)
+
+ net.add_nodes(nodes)
+ assert(net.networks == [nodes.name])
+ assert(net.get_internal_nodes() == net.get_nodes(nodes.name))
+ count = 0
+ for pointnode in net.get_internal_nodes():
+ node_id = pointnode.node_id
+ orig_node = nodes[node_id]
+ assert(node_id == orig_node.gid)
+ assert(pointnode['ei'] == orig_node['ei'])
+ assert(pointnode['model_type'] == orig_node['model_type'])
+ assert(pointnode['rotation'] == orig_node['rotation'])
+ assert(pointnode.model_params == {})
+ count += 1
+ assert(count == 100)
+
+
+@pytest.mark.skip()
+def test_add_edges():
+ nodes = NodesFile(N=100)
+ edges = EdgesFile(nodes, nodes)
+
+ net = pointnet.PointNetwork()
+ net.add_component('models_dir', '.')
+ net.add_component('synaptic_models_dir', '.')
+
+ with open('iaf_dynamics.json', 'w') as fp:
+ json.dump({}, fp)
+
+ with open('iz_dynamics.json', 'w') as fp:
+ json.dump({}, fp)
+
+ with open('iaf_exc.json', 'w') as fp:
+ json.dump({}, fp)
+
+ with open('iaf_inh.json', 'w') as fp:
+ json.dump({}, fp)
+
+ with open('izh_exc.json', 'w') as fp:
+ json.dump({}, fp)
+
+ with open('izh_inh.json', 'w') as fp:
+ json.dump({}, fp)
+
+ net.add_nodes(nodes)
+ net.add_edges(edges)
+
+ count = 0
+ for trg_node in net.get_internal_nodes():
+ for e in net.edges_iterator(trg_node.node_id, nodes.name):
+ _, src_node, edge = e
+ assert(edge['syn_weight'] == trg_node['weight'])
+ count += 1
+ assert(count == 10000)
+
diff --git a/bmtk-vb/bmtk/tests/simulator/popnet/popnet_virtual_files.py b/bmtk-vb/bmtk/tests/simulator/popnet/popnet_virtual_files.py
new file mode 100644
index 0000000..eb95216
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/popnet/popnet_virtual_files.py
@@ -0,0 +1,159 @@
+import numpy as np
+
+from bmtk.utils.io import tabular_network as tn
+
+
+class NodeRow(tn.NodeRow):
+ @property
+ def with_dynamics_params(self):
+ return False
+
+
+class NodesFile(tn.NodesFile):
+ def __init__(self, N):
+ self._network_name = 'test_bionet'
+ self._version = None
+ self._iter_index = 0
+ self._nrows = 0
+ self._node_types_table = None
+
+ self._N = N
+ self._rot_delta = 360.0/float(N)
+ self._node_types_table = {
+ 101: {
+ 'pop_name': 'internal_exc', 'node_type_id': 101, 'model_type': 'internal',
+ 'dynamics_params': 'exc_dynamics.json',
+ 'ei': 'e'
+ },
+
+ 102: {
+ 'pop_name': 'internal_inh', 'node_type_id': 102, 'model_type': 'internal',
+ 'dynamics_params': 'inh_dynamics.json',
+ 'ei': 'i'
+ },
+ 103: {
+ 'pop_name': 'external_exc', 'node_type_id': 103, 'model_type': 'external',
+ 'dynamics_params': 'external_dynamics.json',
+ 'ei': 'external_exc.json'
+ }
+ }
+
+ @property
+ def name(self):
+ """name of network containing these nodes"""
+ return self._network_name
+
+ @property
+ def version(self):
+ return self._version
+
+ @property
+ def gids(self):
+ raise NotImplementedError()
+
+ @property
+ def node_types_table(self):
+ return self._node_types_table
+
+ def load(self, nodes_file, node_types_file):
+ raise NotImplementedError()
+
+ def get_node(self, gid, cache=False):
+ return self[gid]
+
+ def __len__(self):
+ return self._N
+
+ def __iter__(self):
+ self._iter_index = 0
+ return self
+
+ def next(self):
+ if self._iter_index >= len(self):
+ raise StopIteration
+
+ node_row = self[self._iter_index]
+ self._iter_index += 1
+ return node_row
+
+ def __getitem__(self, gid):
+ node_props = {'positions': np.random.rand(3), 'rotation': self._rot_delta*gid, 'weight': 0.0001*gid}
+ return NodeRow(gid, node_props, self.__get_node_type_props(gid))
+
+ def __get_node_type_props(self, gid):
+ if gid <= self._N/3:
+ return self._node_types_table[101]
+ elif gid <= self._N*2/3:
+ return self._node_types_table[102]
+ else:
+ return self._node_types_table[103]
+
+
+class EdgeRow(tn.EdgeRow):
+ @property
+ def with_dynamics_params(self):
+ return False
+
+
+class EdgesFile(tn.EdgesFile):
+ def __init__(self, target_nodes, source_nodes):
+ self._target_nodes = target_nodes
+ self._source_nodes = source_nodes
+ self._edge_type_props = [
+ {
+ 'node_type_id': 1,
+ 'target_query': 'model_type="iaf_psc_alpha"', 'source_query': 'ei="e"',
+ 'syn_weight': .10,
+ 'delay': 2.0,
+ 'dynamics_params': 'iaf_exc.json'
+ },
+ {
+ 'node_type_id': 2,
+ 'target_query': 'model_type="iaf_psc_alpha"', 'source_query': 'ei="i"',
+ 'syn_weight': -.10,
+ 'delay': 2.0,
+ 'dynamics_params': 'iaf_inh.json'
+ },
+ {
+ 'node_type_id': 3,
+ 'target_query': 'model_type="izhikevich"', 'source_query': 'ei="e"',
+ 'syn_weight': .20,
+ 'delay': 2.0,
+ 'dynamics_params': 'izh_exc.json'
+ },
+ {
+ 'node_type_id': 4,
+ 'target_query': 'model_type="izhikevich"', 'source_query': 'ei="i"',
+ 'syn_weight': -.20,
+ 'delay': 2.0,
+ 'dynamics_params': 'izh_inh.json'
+ }
+ ]
+
+ @property
+ def source_network(self):
+ """Name of network containing the source gids"""
+ return self._source_nodes.name
+
+ @property
+ def target_network(self):
+ """Name of network containing the target gids"""
+ return self._target_nodes.name
+
+ def load(self, edges_file, edge_types_file):
+ raise NotImplementedError()
+
+ def edges_itr(self, target_gid):
+ trg_node = self._target_nodes[target_gid]
+ for src_node in self._source_nodes:
+ edge_props = {'syn_weight': trg_node['weight']}
+ yield EdgeRow(trg_node.gid, src_node.gid, edge_props, self.__get_edge_type_prop(src_node, trg_node))
+
+
+ def __len__(self):
+ return len(self._source_nodes)*len(self._target_nodes)
+
+ def __get_edge_type_prop(self, source_node, target_node):
+ indx = 0 if source_node['model_type'] == 'iaf_psc_alpha' else 2
+ indx += 0 if target_node['ei'] == 'e' else 1
+ return self._edge_type_props[indx]
diff --git a/bmtk-vb/bmtk/tests/simulator/popnet/test_popgraph.py b/bmtk-vb/bmtk/tests/simulator/popnet/test_popgraph.py
new file mode 100644
index 0000000..318422c
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/popnet/test_popgraph.py
@@ -0,0 +1,39 @@
+import pytest
+import os
+import json
+
+import popnet_virtual_files as pvf
+from bmtk.simulator import popnet
+
+
+@pytest.mark.skip()
+def test_add_nodes():
+ nodes = pvf.NodesFile(N=100)
+
+ net = popnet.PopNetwork()
+ net.add_component('models_dir', '.')
+ with open('exc_dynamics.json', 'w') as fp:
+ json.dump({'tau_m': 0.1}, fp)
+
+ with open('inh_dynamics.json', 'w') as fp:
+ json.dump({'tau_m': 0.2}, fp)
+
+ net.add_nodes(nodes)
+ assert(net.networks == [nodes.name])
+ assert(len(net.get_internal_nodes()) == 2)
+ assert(len(net.get_populations(nodes.name)) == 3)
+ assert(net.get_populations(nodes.name))
+
+ pop_e = net.get_population(nodes.name, 101)
+ assert (pop_e['ei'] == 'e')
+ assert (pop_e.is_internal == True)
+ assert (pop_e.pop_id == 101)
+ assert (pop_e.tau_m == 0.1)
+
+ pop_i = net.get_population(nodes.name, 102)
+ assert (pop_i['ei'] == 'i')
+ assert (pop_i.is_internal == True)
+ assert (pop_i.tau_m == 0.2)
+
+
+#test_add_nodes()
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/tests/simulator/utils/files/circuit_config.json b/bmtk-vb/bmtk/tests/simulator/utils/files/circuit_config.json
new file mode 100755
index 0000000..3da4ff1
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/utils/files/circuit_config.json
@@ -0,0 +1,43 @@
+{
+ "target_simulator":"NEURON",
+
+ "components": {
+ "morphologies": "$COMPONENT_DIR/morphologies",
+ "synaptic_models": "$COMPONENT_DIR/synapse_dynamics",
+ "mechanisms":"$COMPONENT_DIR/mechanisms",
+ "biophysical_neuron_models": "$COMPONENT_DIR/biophysical_neuron_dynamics",
+ "point_neuron_models": "$COMPONENT_DIR/point_neuron_dynamics",
+ "templates": "$COMPONENT_DIR/hoc_templates"
+
+ },
+
+ "networks": {
+ "node_files": [
+ {
+ "nodes": "$NETWORK_DIR/V1/v1_nodes.h5",
+ "node_types": "$NETWORK_DIR/V1/v1_node_types.csv"
+ },
+ {
+ "nodes": "$NETWORK_DIR/LGN/lgn_nodes.h5",
+ "node_types": "$NETWORK_DIR/LGN/lgn_node_types.csv"
+ }
+ ],
+
+ "edge_files": [
+ {
+ "edges": "$NETWORK_DIR/V1/v1_edges.h5",
+ "edge_types": "$NETWORK_DIR/V1/v1_edge_types.csv"
+ },
+ {
+ "edges": "$NETWORK_DIR/LGN/lgn_v1_edges.h5",
+ "edge_types": "$NETWORK_DIR/LGN/lgn_v1_edge_types.csv"
+ }
+ ]
+ },
+
+ "manifest": {
+ "$BASE_DIR": "${configdir}",
+ "$NETWORK_DIR": "$BASE_DIR/networks",
+ "$COMPONENT_DIR": "$BASE_DIR/components"
+ }
+}
diff --git a/bmtk-vb/bmtk/tests/simulator/utils/files/config.json b/bmtk-vb/bmtk/tests/simulator/utils/files/config.json
new file mode 100644
index 0000000..27539c1
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/utils/files/config.json
@@ -0,0 +1,9 @@
+{
+ "manifest": {
+ "$BASE_DIR": "${configdir}"
+ },
+
+ "simulation": "${BASE_DIR}/simulator_config.json",
+ "network": "$BASE_DIR/circuit_config.json"
+
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/tests/simulator/utils/files/simulator_config.json b/bmtk-vb/bmtk/tests/simulator/utils/files/simulator_config.json
new file mode 100644
index 0000000..1606d3d
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/utils/files/simulator_config.json
@@ -0,0 +1,50 @@
+{
+ "run": {
+ "tstop": 3000.0,
+ "dt": 0.025,
+ "dL": 20,
+ "overwrite_output_dir": true,
+ "spike_threshold": -15,
+ "save_state":false,
+ "start_from_state": false,
+ "nsteps_block":5000,
+ "save_cell_vars": ["v", "cai"],
+ "calc_ecp": false,
+ "connect_internal": true,
+ "connect_external": {"lgn": true, "tw": true}
+ },
+
+ "conditions": {
+ "celsius": 34.0,
+ "v_init": -80
+ },
+
+ "groups": {
+ "save_vars": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
+ },
+
+ "input": [
+ {
+ "type": "external_spikes",
+ "format": "nwb",
+ "file": "lgn_spike_trains.txt",
+ "network": "lgn",
+ "trial": "trial_0"
+ }
+ ],
+
+ "output": {
+ "log": "$OUTPUT_DIR/log.txt",
+ "spikes_ascii": "$OUTPUT_DIR/spikes.txt",
+ "spikes_h5": "$OUTPUT_DIR/spikes.h5",
+ "cell_vars_dir": "$OUTPUT_DIR/cellvars",
+ "extra_cell_vars": "$OUTPUT_DIR/extra_cell_vars.h5",
+ "ecp_file": "$OUTPUT_DIR/ecp.h5",
+ "state_dir": "$OUTPUT_DIR/state",
+ "output_dir": "$OUTPUT_DIR"
+ },
+
+ "manifest": {
+ "$OUTPUT_DIR": "./output"
+ }
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/tests/simulator/utils/test_config.py b/bmtk-vb/bmtk/tests/simulator/utils/test_config.py
new file mode 100644
index 0000000..414666d
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/utils/test_config.py
@@ -0,0 +1,148 @@
+import os
+import pytest
+
+import bmtk.simulator.utils.config as cfg
+
+
+def config_path(rel_path):
+ c_path = os.path.dirname(os.path.realpath(__file__))
+ return os.path.join(c_path, rel_path)
+
+
+def test_load_parent_config():
+ """Test a parent config file can pull in children configs"""
+ cfg_full_path = config_path('files/config.json')
+ config = cfg.from_json(cfg_full_path)
+ assert(config['config_path'] == cfg_full_path)
+ assert('components' in config)
+ assert('networks' in config)
+ assert('run' in config)
+
+
+def test_load_network_config():
+ cfg_full_path = config_path('files/circuit_config.json')
+ config = cfg.from_json(cfg_full_path)
+ manifest = config['manifest']
+ assert(config['config_path'] == cfg_full_path)
+ assert(config['components']['morphologies'] == os.path.join(manifest['$COMPONENT_DIR'], 'morphologies'))
+ assert(config['networks']['node_files'][0]['nodes'] == os.path.join(manifest['$NETWORK_DIR'], 'V1/v1_nodes.h5'))
+
+
+def test_load_simulator_config():
+ cfg_full_path = config_path('files/simulator_config.json')
+ config = cfg.from_json(cfg_full_path)
+ manifest = config['manifest']
+ assert('run' in config)
+ assert(config['output']['log'] == os.path.join(manifest['$OUTPUT_DIR'], 'log.txt'))
+
+
+def test_build_manifest1():
+ """Test simple manifest"""
+ config_file = {'manifest': {
+ '$BASE_DIR': '/base',
+ '$TMP_DIR': '$BASE_DIR/tmp',
+ '$SHARE_DIR': '${TMP_DIR}_1/share'
+ }}
+
+ manifest = cfg.__build_manifest(config_file)
+ assert(manifest['$BASE_DIR'] == '/base')
+ assert(manifest['$TMP_DIR'] == '/base/tmp')
+ assert(manifest['$SHARE_DIR'] == '/base/tmp_1/share')
+
+
+def test_build_manifest2():
+ config_file = {'manifest': {
+ '$DIR_DATA': 'data',
+ '$DIR_MAT': 'mat',
+ '$APPS': '/${DIR_DATA}/$DIR_MAT/apps'
+ }}
+
+ manifest = cfg.__build_manifest(config_file)
+ assert(manifest['$APPS'] == '/data/mat/apps')
+
+
+def test_build_manifest_fail1():
+ """Test exception occurs when variable is missing"""
+ config_file = {'manifest': {
+ '$BASE': '/base',
+ '$TMP': '$VAR/Smat',
+ }}
+ with pytest.raises(Exception):
+ cfg.__build_manifest(config_file)
+
+
+def test_build_manifest_fail2():
+ """Test recursive definition"""
+ config_file = {'manifest': {
+ '$BASE': '$TMP/share',
+ '$TMP': '$BASE/share',
+ }}
+ with pytest.raises(Exception):
+ cfg.__build_manifest(config_file)
+
+
+def test_resolve_var_str():
+ """Check that a variable can be resolved in a string"""
+ config_file = {
+ 'manifest': {
+ '$BASE': 'path'
+ },
+ 's1': '$BASE/test',
+ 'i1': 9
+ }
+ conf = cfg.from_dict(config_file)
+ assert(conf['s1'] == 'path/test')
+ assert(conf['i1'] == 9)
+
+
+def test_resolve_var_list():
+ """Check variables can be resolved in list"""
+ config_file = {
+ 'manifest': {
+ '$p1': 'a',
+ '$p2': 'b'
+ },
+ 'l1': ['$p1/test', '${p2}/test', 9]
+ }
+ conf = cfg.from_dict(config_file)
+ assert(conf['l1'][0] == 'a/test')
+ assert(conf['l1'][1] == 'b/test')
+ assert(conf['l1'][2] == 9)
+
+
+def test_resolve_var_dict():
+ """Check variables can be resolved in dictionary"""
+ config_file = {
+ 'manifest': {
+ '$v1': 'a',
+ '$v2': 'c'
+ },
+ 'd1': {
+ 'k1': '$v1',
+ 'k2': 'B',
+ 'k3': ['${v2}'],
+ 'k4': 4
+ }
+ }
+ conf = cfg.from_dict(config_file)
+ assert(conf['d1']['k1'] == 'a')
+ assert(conf['d1']['k2'] == 'B')
+ assert(conf['d1']['k3'] == ['c'])
+ assert(conf['d1']['k4'] == 4)
+
+
+def test_time_vars():
+ config_file = {
+ 'd1': {
+ 'k1': 'k1_${date}',
+ 'k2': 'k2/$time',
+ 'k3': ['${datetime}'],
+ 'k4': 4
+ }
+ }
+
+ conf = cfg.from_dict(config_file)
+
+
+
+#test_time_vars()
diff --git a/bmtk-vb/bmtk/tests/simulator/utils/test_nwb.py b/bmtk-vb/bmtk/tests/simulator/utils/test_nwb.py
new file mode 100644
index 0000000..c92f058
--- /dev/null
+++ b/bmtk-vb/bmtk/tests/simulator/utils/test_nwb.py
@@ -0,0 +1,341 @@
+import pytest
+import numpy as np
+from bmtk.simulator.utils import nwb
+import os
+import h5py
+
+
+def test_create_blank_file():
+ nwb.create_blank_file()
+ f = nwb.create_blank_file(close=False)
+ file_name = f.filename
+ f.close()
+
+ nwb.create_blank_file(file_name, force=True)
+ os.remove(file_name)
+
+
+def test_create_blank_file_force():
+ temp_file_name = nwb.get_temp_file_name()
+ nwb.create_blank_file(temp_file_name, force=True)
+ try:
+ nwb.create_blank_file(temp_file_name)
+ except IOError:
+ exception_caught = True
+ assert exception_caught
+ os.remove(temp_file_name)
+
+
+def test_different_scales():
+ y_values = 10*np.ones(10)
+ f = nwb.create_blank_file(close=False)
+ scale = nwb.DtScale(.1, 'time', 'second')
+ data = nwb.FiringRate(y_values, scale=scale)
+ data.add_to_stimulus(f)
+
+ spike_train = nwb.FiringRate.get_stimulus(f, 0)
+ y_values_new = spike_train.data[:]
+ np.testing.assert_almost_equal(y_values_new, y_values)
+ f.close()
+
+
+def test_set_data_file_handle():
+ f = nwb.create_blank_file(close=False)
+ s0 = nwb._set_scale(f, '0', np.arange(3), 'time', 'second', "Scale")
+ s1 = nwb._set_scale(f, '1', np.arange(4), 'time', 'second', "Scale")
+ s2 = nwb._set_scale(f, '2', np.arange(5), 'time', 'second', "Scale")
+ nwb._set_data(f, '1D', np.zeros(3), s0, 'firing_rate', 'hertz')
+ nwb._set_data(f, '2D', np.zeros((3, 4)), (s0, s1), 'firing_rate', 'hertz')
+ nwb._set_data(f, '3D', np.zeros((3, 4, 5)), (s0, s1, s2), 'firing_rate', 'hertz')
+
+ file_name = f.filename
+ f.close()
+ os.remove(file_name)
+
+
+def test_set_data_force():
+ f = nwb.create_blank_file(close=False)
+ s0 = nwb._set_scale(f, '0', np.arange(3), 'time', 'second', "Scale")
+ nwb._set_data(f, 'test_force', np.zeros(3), s0, 'firing_rate', 'hertz')
+ nwb._set_data(f, 'test_force', np.zeros(3), s0, 'firing_rate', 'hertz', force=True)
+
+ file_name = f.filename
+ f.close()
+ os.remove(file_name)
+
+
+def test_get_data():
+ s0_tuple = '0', np.arange(3), 'distance', 'pixel', "Scale"
+ s1_tuple = '1', np.arange(4), 'distance', 'pixel', "Scale"
+ data, dimension, unit = np.ones((3, 4)), 'brightness', 'intensity'
+
+ f = nwb.create_blank_file(close=False)
+ s0 = nwb._set_scale(f, *s0_tuple)
+ s1 = nwb._set_scale(f, *s1_tuple)
+ scales = (s0, s1)
+ nwb._set_data(f, 'test', data, scales, dimension, unit)
+ data_new, scales_new, dimension_new, unit_new, metadata = nwb._get_data(f['test'])
+ np.testing.assert_almost_equal(data, data_new)
+ assert len(metadata) == 0
+ assert dimension == dimension_new
+ assert unit == unit_new
+ for scale_tuple, scale_new in zip((s0_tuple, s1_tuple), scales_new):
+ np.testing.assert_almost_equal(scale_tuple[1], scale_new[:])
+ assert scale_tuple[2] == scale_new.attrs['dimension']
+ assert scale_tuple[3] == scale_new.attrs['unit']
+
+ file_name = f.filename
+ f.close()
+ os.remove(file_name)
+
+
+def test_metadata():
+ f = nwb.create_blank_file(close=False)
+ s0 = nwb._set_scale(f, '0', np.arange(3), 'time', 'second', "Scale")
+ nwb._set_data(f, 'test_metadata', np.zeros(3), s0, 'firing_rate', 'hertz', metadata={'name':'foo'})
+ _, _, _, _, metadata = nwb._get_data(f['test_metadata'])
+ assert metadata['name'] == 'foo'
+ file_name = f.filename
+ f.close()
+ os.remove(file_name)
+
+
+def test_add_shared_scale():
+ f = nwb.create_blank_file(close=False, force=True)
+ t_values = np.arange(10)
+ shared_scale = nwb.Scale(t_values, 'time', 'second')
+ data_0 = nwb.FiringRate(10*np.ones(10), scale=shared_scale)
+ data_0.add_to_stimulus(f)
+ data_1 = nwb.FiringRate(20*np.ones(10), scale=shared_scale)
+ data_1.add_to_stimulus(f)
+
+ round_trip_0 = nwb.FiringRate.get_stimulus(f, 0)
+ assert data_0 == round_trip_0
+
+ round_trip_1 = nwb.FiringRate.get_stimulus(f, 1)
+ assert data_1 == round_trip_1
+
+ rt0, rt1 = nwb.FiringRate.get_stimulus(f)
+ assert data_0 == rt0
+ assert data_1 == rt1
+
+ file_name = f.filename
+ f.close()
+ os.remove(file_name)
+
+
+def test_firing_rate():
+ t_values = np.arange(10)
+ y_values = 10*np.ones(10)
+
+ f = nwb.create_blank_file(close=False, force=True)
+
+ scale = nwb.Scale(t_values, 'time', 'second')
+ data = nwb.FiringRate(y_values, scale=scale)
+ data.add_to_stimulus(f)
+ data.add_to_acquisition(f)
+ data.add_to_processing(f, 'step_0')
+ data.add_to_analysis(f, 'step_0')
+
+ round_trip = nwb.FiringRate.get_stimulus(f, 0)
+ assert round_trip == data
+
+ file_name = f.filename
+ f.close()
+ os.remove(file_name)
+
+
+def test_spike_train():
+ t_values = np.arange(5)*.1
+ y_values = np.array([0, 1, 2, 2, 1])
+
+ f = nwb.create_blank_file(close=False, force=True)
+ scale = nwb.Scale(t_values, 'time', 'second')
+ data = nwb.SpikeTrain(y_values, scale=scale)
+ data.add_to_stimulus(f)
+ data.add_to_acquisition(f)
+ data.add_to_processing(f, 'step_0')
+ data.add_to_analysis(f, 'step_0')
+
+ round_trip = nwb.SpikeTrain.get_stimulus(f, 0)
+ assert round_trip == data
+
+ file_name = f.filename
+ f.close()
+ os.remove(file_name)
+
+
+def test_grayscale_movie():
+ t_values = np.arange(20)*.1
+ row_values = np.arange(5)
+ col_values = np.arange(10)
+ data_values = np.empty((20, 5, 10))
+
+ f = nwb.create_blank_file(close=False, force=True)
+ t_scale = nwb.Scale(t_values, 'time', 'second')
+ row_scale = nwb.Scale(row_values, 'distance', 'pixel')
+ col_scale = nwb.Scale(col_values, 'distance', 'pixel')
+
+ data = nwb.GrayScaleMovie(data_values, scale=(t_scale, row_scale, col_scale), metadata={'foo': 5})
+ data.add_to_stimulus(f)
+ data.add_to_acquisition(f)
+ data.add_to_processing(f, 'step_0')
+ data.add_to_analysis(f, 'step_0')
+
+ round_trip = nwb.GrayScaleMovie.get_stimulus(f, 0)
+ np.testing.assert_almost_equal(round_trip.data[:], data.data[:], 12)
+
+ round_trip = nwb.GrayScaleMovie.get_acquisition(f, 0)
+ np.testing.assert_almost_equal(round_trip.data[:], data.data[:], 12)
+
+ round_trip = nwb.GrayScaleMovie.get_processing(f, 'step_0', 0)
+ np.testing.assert_almost_equal(round_trip.data[:], data.data[:], 12)
+
+ round_trip = nwb.GrayScaleMovie.get_analysis(f, 'step_0', 0)
+ np.testing.assert_almost_equal(round_trip.data[:], data.data[:], 12)
+ f.close()
+
+
+def test_processing():
+ t_values = np.arange(10)
+ y_values = 10*np.ones(10)
+
+ f = nwb.create_blank_file(close=False)
+
+ scale = nwb.Scale(t_values, 'time', 'second')
+ data = nwb.FiringRate(y_values, scale=scale)
+ data.add_to_processing(f, 'step_0')
+
+ scale = nwb.Scale(t_values, 'time', 'second')
+ data = nwb.FiringRate(y_values, scale=scale)
+ data.add_to_processing(f, 'step_0')
+
+ scale = nwb.Scale(t_values, 'time', 'second')
+ data = nwb.FiringRate(y_values, scale=scale)
+ data.add_to_processing(f, 'step_1')
+
+ file_name = f.filename
+ f.close()
+ os.remove(file_name)
+
+
+def test_analysis():
+ t_values = np.arange(10)
+ y_values = 10*np.ones(10)
+
+ f = nwb.create_blank_file(close=False)
+
+ scale = nwb.Scale(t_values, 'time', 'second')
+ data = nwb.FiringRate(y_values, scale=scale)
+ data.add_to_analysis(f, 'step_0')
+
+ scale = nwb.Scale(t_values, 'time', 'second')
+ data = nwb.FiringRate(y_values, scale=scale)
+ data.add_to_analysis(f, 'step_0')
+
+ scale = nwb.Scale(t_values, 'time', 'second')
+ data = nwb.FiringRate(y_values, scale=scale)
+ data.add_to_analysis(f, 'step_1')
+
+ file_name = f.filename
+ f.close()
+ os.remove(file_name)
+
+
+def test_writable():
+ y_values = 10*np.ones(10)
+ scale = nwb.DtScale(.1, 'time', 'second')
+ data = nwb.FiringRate(y_values, scale=scale)
+
+ f = nwb.create_blank_file(close=True)
+ try:
+ data.add_to_stimulus(f)
+ except TypeError as e:
+ assert str(e).replace('\'', '') == "NoneType object has no attribute __getitem__"
+
+ f = nwb.create_blank_file(close=False)
+ f.close()
+ try:
+ data.add_to_stimulus(f)
+ except Exception as e:
+ assert str(e) == 'File not valid: '
+
+
+@pytest.mark.skip(reason='Ability to add 0-lenght datasetset has been removed in newer version of h5py')
+def test_nullscale():
+ y_values = np.array([.1, .5, .51])
+
+ f = nwb.create_blank_file(force=True)
+ data = nwb.SpikeTrain(y_values, unit='second')
+ data.add_to_stimulus(f)
+
+ spike_train = nwb.SpikeTrain.get_stimulus(f)
+ y_values_new = spike_train.data[:]
+ np.testing.assert_almost_equal(y_values, y_values_new)
+ assert isinstance(spike_train.scales[0], nwb.NullScale)
+ f.close()
+
+
+def test_timeseries():
+ y_values = np.array([.1, .2, .1])
+ f = nwb.create_blank_file()
+ scale = nwb.DtScale(.1, 'time', 'second')
+ nwb.TimeSeries(y_values, scale=scale, dimension='voltage', unit='volt').add_to_acquisition(f)
+
+ data = nwb.TimeSeries.get_acquisition(f)
+ assert data.scales[0].dt == .1
+ assert data.scales[0].unit == 'second'
+ np.testing.assert_almost_equal(data.data[:], y_values)
+ assert data.unit == 'volt'
+
+ file_name = f.filename
+ f.close()
+ os.remove(file_name)
+
+
+def test_external_link():
+ data_original = np.zeros(10)
+ f = nwb.create_blank_file(force=True)
+ scale = nwb.Scale(np.zeros(10), 'time', 'second')
+ nwb.TimeSeries(data_original, scale=scale, dimension='voltage', unit='volt',
+ metadata={'foo': 1}).add_to_acquisition(f)
+ temp_file_name = f.filename
+ f.close()
+
+ f = h5py.File(temp_file_name, 'r')
+ f2 = nwb.create_blank_file(force=True)
+ data = nwb.TimeSeries.get_acquisition(f, 0)
+ data.add_to_acquisition(f2)
+ f.close()
+ temp_file_name_2 = f2.filename
+ f2.close()
+
+ f = h5py.File(temp_file_name_2)
+ data = nwb.TimeSeries.get_acquisition(f, 0)
+ np.testing.assert_almost_equal(data.data, data_original)
+ assert data.data.file.filename == temp_file_name
+
+ f.close()
+ os.remove(temp_file_name)
+ os.remove(temp_file_name_2)
+
+
+if __name__ == "__main__":
+ test_create_blank_file() # pragma: no cover
+ test_create_blank_file_force() # pragma: no cover
+ test_set_data_file_handle() # pragma: no cover
+ test_set_data_force() # pragma: no cover
+ test_get_data() # pragma: no cover
+ test_metadata() # pragma: no cover
+ test_add_shared_scale() # pragma: no cover
+ test_firing_rate() # pragma: no cover
+ test_processing() # pragma: no cover
+ test_analysis() # pragma: no cover
+ test_spike_train() # pragma: no cover
+ test_grayscale_movie() # pragma: no cover
+# test_get_stimulus() # pragma: no cover
+ test_different_scales()
+ test_writable()
+ #test_nullscale()
+ test_timeseries()
+ test_external_link()
diff --git a/bmtk-vb/bmtk/utils/__init__.py b/bmtk-vb/bmtk/utils/__init__.py
new file mode 100644
index 0000000..1c9c088
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/__init__.py
@@ -0,0 +1,24 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import logging
+
+
diff --git a/bmtk-vb/bmtk/utils/__init__.pyc b/bmtk-vb/bmtk/utils/__init__.pyc
new file mode 100644
index 0000000..ccd0172
Binary files /dev/null and b/bmtk-vb/bmtk/utils/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/utils/__pycache__/__init__.cpython-36.pyc b/bmtk-vb/bmtk/utils/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000..90d6bc1
Binary files /dev/null and b/bmtk-vb/bmtk/utils/__pycache__/__init__.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/utils/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/utils/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..aeb1c8a
Binary files /dev/null and b/bmtk-vb/bmtk/utils/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/cell_vars/__init__.py b/bmtk-vb/bmtk/utils/cell_vars/__init__.py
new file mode 100644
index 0000000..021345f
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/cell_vars/__init__.py
@@ -0,0 +1,6 @@
+from .var_reader import CellVarsFile
+
+
+
+
+
diff --git a/bmtk-vb/bmtk/utils/cell_vars/__init__.pyc b/bmtk-vb/bmtk/utils/cell_vars/__init__.pyc
new file mode 100644
index 0000000..0eba0b8
Binary files /dev/null and b/bmtk-vb/bmtk/utils/cell_vars/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/utils/cell_vars/var_reader.py b/bmtk-vb/bmtk/utils/cell_vars/var_reader.py
new file mode 100644
index 0000000..21da36d
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/cell_vars/var_reader.py
@@ -0,0 +1,134 @@
+import h5py
+import numpy as np
+
+
+class CellVarsFile(object):
+ VAR_UNKNOWN = 'Unknown'
+ UNITS_UNKNOWN = 'NA'
+
+ def __init__(self, filename, mode='r', **params):
+ self._h5_handle = h5py.File(filename, 'r')
+ self._h5_root = self._h5_handle[params['h5_root']] if 'h5_root' in params else self._h5_handle['/']
+ self._var_data = {}
+ self._var_units = {}
+
+ self._mapping = None
+
+ # Look for variabl and mapping groups
+ for var_name in self._h5_root.keys():
+ hf_grp = self._h5_root[var_name]
+
+ if var_name == 'data':
+ # According to the sonata format the /data table should be located at the root
+ var_name = self._h5_root['data'].attrs.get('variable_name', CellVarsFile.VAR_UNKNOWN)
+ self._var_data[var_name] = self._h5_root['data']
+ self._var_units[var_name] = self._find_units(self._h5_root['data'])
+
+ if not isinstance(hf_grp, h5py.Group):
+ continue
+
+ if var_name == 'mapping':
+ # Check for /mapping group
+ self._mapping = hf_grp
+ else:
+ # In the bmtk we can support multiple variables in the same file (not sonata compliant but should be)
+ # where each variable table is separated into its own group //data
+ if 'data' not in hf_grp:
+ print('Warning: could not find "data" dataset in {}. Skipping!'.format(var_name))
+ else:
+ self._var_data[var_name] = hf_grp['data']
+ self._var_units[var_name] = self._find_units(hf_grp['data'])
+
+ # create map between gids and tables
+ self._gid2data_table = {}
+ if self._mapping is None:
+ raise Exception('could not find /mapping group')
+ else:
+ gids_ds = self._mapping['gids']
+ index_pointer_ds = self._mapping['index_pointer']
+ for indx, gid in enumerate(gids_ds):
+ self._gid2data_table[gid] = (index_pointer_ds[indx], index_pointer_ds[indx+1]) # slice(index_pointer_ds[indx], index_pointer_ds[indx+1])
+
+ time_ds = self._mapping['time']
+ self._t_start = time_ds[0]
+ self._t_stop = time_ds[1]
+ self._dt = time_ds[2]
+ self._n_steps = int((self._t_stop - self._t_start) / self._dt)
+
+ @property
+ def variables(self):
+ return list(self._var_data.keys())
+
+ @property
+ def gids(self):
+ return list(self._gid2data_table.keys())
+
+ @property
+ def t_start(self):
+ return self._t_start
+
+ @property
+ def t_stop(self):
+ return self._t_stop
+
+ @property
+ def dt(self):
+ return self._dt
+
+ @property
+ def time_trace(self):
+ return np.linspace(self.t_start, self.t_stop, num=self._n_steps, endpoint=True)
+
+ @property
+ def h5(self):
+ return self._h5_root
+
+ def _find_units(self, data_set):
+ return data_set.attrs.get('units', CellVarsFile.UNITS_UNKNOWN)
+
+ def units(self, var_name=VAR_UNKNOWN):
+ return self._var_units[var_name]
+
+ def n_compartments(self, gid):
+ bounds = self._gid2data_table[gid]
+ return bounds[1] - bounds[0]
+
+ def compartment_ids(self, gid):
+ bounds = self._gid2data_table[gid]
+ return self._mapping['element_id'][bounds[0]:bounds[1]]
+
+ def compartment_positions(self, gid):
+ bounds = self._gid2data_table[gid]
+ return self._mapping['element_pos'][bounds[0]:bounds[1]]
+
+ def data(self, gid, var_name=VAR_UNKNOWN,time_window=None, compartments='origin'):
+ if var_name not in self.variables:
+ raise Exception('Unknown variable {}'.format(var_name))
+
+ if time_window is None:
+ time_slice = slice(0, self._n_steps)
+ else:
+ if len(time_window) != 2:
+ raise Exception('Invalid time_window, expecting tuple [being, end].')
+
+ window_beg = max(int((time_window[0] - self.t_start)/self.dt), 0)
+ window_end = min(int((time_window[1] - self.t_start)/self.dt), self._n_steps/self.dt)
+ time_slice = slice(window_beg, window_end)
+
+ multi_compartments = True
+ if compartments == 'origin' or self.n_compartments(gid) == 1:
+ # Return the first (and possibly only) compartment for said gid
+ gid_slice = self._gid2data_table[gid][0]
+ multi_compartments = False
+ elif compartments == 'all':
+ # Return all compartments
+ gid_slice = slice(self._gid2data_table[gid][0], self._gid2data_table[gid][1])
+ else:
+ # return all compartments with corresponding element id
+ compartment_list = list(compartments) if isinstance(compartments, (long, int)) else compartments
+ begin = self._gid2data_table[gid][0]
+ end = self._gid2data_table[gid][1]
+ gid_slice = [i for i in range(begin, end) if self._mapping[i] in compartment_list]
+
+ data = np.array(self._var_data[var_name][time_slice, gid_slice])
+ return data.T if multi_compartments else data
diff --git a/bmtk-vb/bmtk/utils/cell_vars/var_reader.pyc b/bmtk-vb/bmtk/utils/cell_vars/var_reader.pyc
new file mode 100644
index 0000000..a307d73
Binary files /dev/null and b/bmtk-vb/bmtk/utils/cell_vars/var_reader.pyc differ
diff --git a/bmtk-vb/bmtk/utils/converters/__init__.py b/bmtk-vb/bmtk/utils/converters/__init__.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/converters/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/bmtk/utils/converters/hoc_converter.py b/bmtk-vb/bmtk/utils/converters/hoc_converter.py
new file mode 100644
index 0000000..c500945
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/converters/hoc_converter.py
@@ -0,0 +1,299 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import json
+import os.path
+import re
+from collections import defaultdict
+from itertools import groupby
+from lxml import etree
+import bluepyopt.ephys as ephys
+from tqdm import tqdm
+import utils
+
+XML_NS = '{http://www.neuroml.org/schema/neuroml2}'
+MECHANISMS = [
+ 'channelDensity', 'channelDensityNernst', 'specificCapacitance', 'species',
+ 'resistivity', 'concentrationModel'
+]
+
+LOCATION_MAP = {
+ 'apic': 'apical',
+ 'soma': 'somatic',
+ 'dend': 'basal',
+ 'axon': 'axonal',
+ 'all': 'all'
+}
+
+
+def map_location_name(name):
+ return LOCATION_MAP[name]
+
+
+def load_json(json_path):
+ params = json.load(open(json_path))
+
+ scalar = ephys.parameterscalers.NrnSegmentLinearScaler()
+ mechanisms = {}
+ sections_lookup = {'soma': 'somatic', 'dend': 'basal', 'axon': 'axonal', 'apic': 'apical'}
+ def getNrnSeclist(loc_name):
+ return ephys.locations.NrnSeclistLocation(loc_name, seclist_name=loc_name)
+
+ parameters = []
+ for d in params['genome']:
+ section = sections_lookup[d['section']]
+ value = d['value']
+ name = d['name']
+ mech = 'pas' if name == 'g_pass' else d['mechanism']
+ mech_name = 'CaDynamics' if mech == 'CaDynamics' else '{}.{}'.format(name, d['section'])
+ p_name = '{}_{}'.format(name, section) if name == 'g_pass' else name
+
+ if mech_name not in mechanisms:
+ nrn_mech = ephys.mechanisms.NrnMODMechanism(name=mech_name, mod_path=None, suffix=mech,
+ locations=[getNrnSeclist(section)])
+ mechanisms[mech_name] = nrn_mech
+
+ parameters.append(ephys.parameters.NrnSectionParameter(name=p_name, param_name=name, value_scaler=scalar,
+ value=value, locations=[getNrnSeclist(section)]))
+
+ parameters.append(ephys.parameters.NrnSectionParameter(name='erev_na', param_name='ena', value_scaler=scalar,
+ value=params['conditions'][0]['erev'][0]['ena'],
+ locations=[getNrnSeclist('somatic')]))
+ parameters.append(ephys.parameters.NrnSectionParameter(name='erev_k', param_name='ek', value_scaler=scalar,
+ value=params['conditions'][0]['erev'][0]['ek'],
+ locations=[getNrnSeclist('somatic')]))
+ parameters.append(ephys.parameters.NrnSectionParameter(name='erev_pas', param_name='e_pas', value_scaler=scalar,
+ value=params['conditions'][0]['v_init'],
+ locations=[getNrnSeclist('somatic'), getNrnSeclist('axonal'),
+ getNrnSeclist('basal'), getNrnSeclist('apical')]))
+
+ parameters.append(ephys.parameters.NrnSectionParameter(name='erev_Ih', param_name='ehcn', value_scaler=scalar,
+ value=-45.0,
+ locations=[getNrnSeclist('somatic')]))
+
+ parameters.append(ephys.parameters.NrnSectionParameter(name='res_all', param_name='Ra', value_scaler=scalar,
+ value=params['passive'][0]['ra'],
+ locations=[getNrnSeclist('somatic')]))
+ for sec in params['passive'][0]['cm']:
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(name='{}_cap'.format(sec['section']), param_name='cm',
+ value_scaler=scalar,
+ value=sec['cm'],
+ locations=[getNrnSeclist(sec['section'])]))
+
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(name='ca', param_name='depth_CaDynamics', value_scaler=scalar,
+ value=0.1, locations=[getNrnSeclist('somatic')]))
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(name='ca', param_name='minCai_CaDynamics', value_scaler=scalar,
+ value=0.0001, locations=[getNrnSeclist('somatic')]))
+
+ return mechanisms.values(), parameters
+
+
+def load_neuroml(neuroml_path):
+ root = etree.parse(neuroml_path).getroot()
+ biophysics = defaultdict(list)
+ for mechanism in MECHANISMS:
+ xml_mechanisms = root.findall('.//' + XML_NS + mechanism)
+ for xml_mechanism in xml_mechanisms:
+ biophysics[mechanism].append(xml_mechanism.attrib)
+
+ return biophysics
+
+
+def define_mechanisms(biophysics):
+ def keyfn(x):
+ return x['segmentGroup']
+
+ channels = biophysics['channelDensity'] + biophysics[
+ 'channelDensityNernst']
+ segment_groups = [(k, list(g))
+ for k, g in groupby(
+ sorted(
+ channels, key=keyfn), keyfn)]
+ mechanisms = []
+ for sectionlist, channels in segment_groups:
+ loc_name = map_location_name(sectionlist)
+ seclist_loc = ephys.locations.NrnSeclistLocation(
+ loc_name, seclist_name=loc_name)
+ for channel in channels:
+ # print 'mechanisms.append(ephys.mechanisms.NrnMODMechanism(name={}.{}, mod_path=None, suffix={}, locations=[{}]))'.format(channel['ionChannel'], loc_name, channel['ionChannel'], seclist_loc)
+ mechanisms.append(
+ ephys.mechanisms.NrnMODMechanism(
+ name='%s.%s' % (channel['ionChannel'], loc_name),
+ mod_path=None,
+ suffix=channel['ionChannel'],
+ locations=[seclist_loc], ))
+ for elem in biophysics['species']:
+ section = map_location_name(elem['segmentGroup'])
+ section_loc = ephys.locations.NrnSeclistLocation(
+ section, seclist_name=section)
+ # print 'mechanisms.append(ephys.mechanisms.NrnMODMechanism(name={}, mod_path=None, suffix={}, location=[{}]))'.format(elem['concentrationModel'], elem['concentrationModel'], section_loc)
+ mechanisms.append(
+ ephys.mechanisms.NrnMODMechanism(
+ name=elem['concentrationModel'],
+ mod_path=None,
+ suffix=elem['concentrationModel'],
+ locations=[section_loc]))
+
+ return mechanisms
+
+
+def define_parameters(biophysics):
+ ''' for the time being all AIBS distribution are uniform '''
+ parameters = []
+
+ def keyfn(x):
+ return x['ionChannel']
+
+ NUMERIC_CONST_PATTERN = r'''[-+]? (?: (?: \d* \. \d+ ) | (?: \d+ \.? ) )(?: [Ee] [+-]? \d+ ) ?'''
+ rx = re.compile(NUMERIC_CONST_PATTERN, re.VERBOSE)
+
+ def get_cond_density(density_string):
+ m = re.match(rx, density_string)
+ return float(m.group())
+
+ scaler = ephys.parameterscalers.NrnSegmentLinearScaler()
+ MAP_EREV = {
+ 'Im': 'ek',
+ 'Ih': 'ehcn', # I am not sure of that one
+ 'Nap': 'ena',
+ 'K_P': 'ek',
+ 'K_T': 'ek',
+ 'SK': 'ek',
+ 'SKv3_1': 'ek',
+ 'NaTs': 'ena',
+ 'Kv3_1': 'ek',
+ 'NaV': 'ena',
+ 'Kd': 'ek',
+ 'Kv2like': 'ek',
+ 'Im_v2': 'ek',
+ 'pas': 'e_pas'
+ }
+ for mech_type in ['channelDensity', 'channelDensityNernst']:
+ mechanisms = biophysics[mech_type]
+ for mech in mechanisms:
+ section_list = map_location_name(mech['segmentGroup'])
+ seclist_loc = ephys.locations.NrnSeclistLocation(
+ section_list, seclist_name=section_list)
+
+ def map_name(name):
+ ''' this name has to match the name in the mod file '''
+ reg_name = re.compile('gbar\_(?P[\w]+)')
+ m = re.match(reg_name, name)
+ if m:
+ channel = m.group('channel')
+ return 'gbar' + '_' + channel
+ if name[:len('g_pas')] == 'g_pas':
+ ''' special case '''
+ return 'g_pas'
+ assert False, "name %s" % name
+
+ param_name = map_name(mech['id'])
+ # print 'parameters.append(ephys.parameters.NrnSectionParameter(name={}, param_name={}, value_scalar={}, value={}, locations=[{}]))'.format(mech['id'], param_name, scaler, get_cond_density(mech['condDensity']), seclist_loc)
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(
+ name=mech['id'],
+ param_name=param_name,
+ value_scaler=scaler,
+ value=get_cond_density(mech['condDensity']),
+ locations=[seclist_loc]))
+ if mech_type != 'channelDensityNernst':
+ # print 'parameters.append(ephys.parameters.NrnSectionParameter(name={}, param_name={}, value_scalar={}, value={}, locations=[{}]))'.format('erev' + mech['id'], MAP_EREV[mech['ionChannel']], scaler, get_cond_density(mech['erev']), seclist_loc)
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(
+ name='erev' + mech['id'],
+ param_name=MAP_EREV[mech['ionChannel']],
+ value_scaler=scaler,
+ value=get_cond_density(mech['erev']),
+ locations=[seclist_loc]))
+
+ # print ''
+ PARAM_NAME = {'specificCapacitance': 'cm', 'resistivity': 'Ra'}
+ for b_type in ['specificCapacitance', 'resistivity']:
+ for elem in biophysics[b_type]:
+ section = map_location_name(elem['segmentGroup'])
+ section_loc = ephys.locations.NrnSeclistLocation(
+ section, seclist_name=section)
+
+ # print 'parameters.append(ephys.parameters.NrnSectionParameter(name={}, param_name={}, value_scalar={}, value={}, locations=[{}]))'.format(elem['id'], PARAM_NAME[b_type], scaler, get_cond_density(elem['value']), seclist_loc)
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(
+ name=elem['id'],
+ param_name=PARAM_NAME[b_type],
+ value_scaler=scaler,
+ value=get_cond_density(elem['value']),
+ locations=[section_loc]))
+ concentrationModel = biophysics['concentrationModel'][0]
+
+ # print ''
+ for elem in biophysics['species']:
+ section = map_location_name(elem['segmentGroup'])
+ section_loc = ephys.locations.NrnSeclistLocation(
+ section, seclist_name=section)
+ for attribute in ['gamma', 'decay', 'depth', 'minCai']:
+ # print 'parameters.append(ephys.parameters.NrnSectionParameter(name={}, param_name={}, value_scalar={}, value={}, locations=[{}]))'.format(elem['id'], attribute + '_' + elem['concentrationModel'], scaler, get_cond_density(concentrationModel[attribute]), seclist_loc)
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(
+ name=elem['id'],
+ param_name=attribute + '_' + elem['concentrationModel'],
+ value_scaler=scaler,
+ value=get_cond_density(concentrationModel[attribute]),
+ locations=[section_loc]))
+
+ return parameters
+
+
+def create_hoc(neuroml_path, neuroml, morphologies, incr, output_dir):
+ if neuroml_path.endswith('json'):
+ mechanisms, parameters = load_json(neuroml_path)
+
+ else:
+ biophysics = load_neuroml(neuroml_path)
+ mechanisms = define_mechanisms(biophysics)
+ parameters = define_parameters(biophysics)
+
+ for morphology in morphologies:
+ ccell_name = utils.name_ccell(neuroml, morphology)
+ hoc = ephys.create_hoc.create_hoc(
+ mechs=mechanisms,
+ parameters=parameters,
+ template_name='ccell' + str(incr),
+ template_filename='cell_template_compatible.jinja2',
+ template_dir='.',
+ morphology=morphology + '.swc', )
+ with open(os.path.join(output_dir, ccell_name + '.hoc'), 'w') as f:
+ f.write(hoc)
+
+
+def convert_to_hoc(config, cells, output_dir):
+ to_convert = cells[['dynamics_params', 'morphology', 'neuroml']]
+ to_convert = to_convert.drop_duplicates()
+ neuroml_config_path = config['components']['biophysical_neuron_models_dir']
+ incr = 0
+ for name, g in tqdm(to_convert.groupby('dynamics_params'), 'creating hoc files'):
+ neuroml_path = os.path.join(neuroml_config_path, name)
+ create_hoc(neuroml_path,
+ list(g['neuroml'])[0],
+ set(g['morphology']), incr, output_dir)
+ incr += 1
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/converters/sonata/__init__.py b/bmtk-vb/bmtk/utils/converters/sonata/__init__.py
new file mode 100644
index 0000000..c473e5d
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/converters/sonata/__init__.py
@@ -0,0 +1,2 @@
+from edge_converters import convert_edges
+from node_converters import convert_nodes
diff --git a/bmtk-vb/bmtk/utils/converters/sonata/edge_converters.py b/bmtk-vb/bmtk/utils/converters/sonata/edge_converters.py
new file mode 100644
index 0000000..335d4f5
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/converters/sonata/edge_converters.py
@@ -0,0 +1,278 @@
+import os
+from functools import partial
+
+import numpy as np
+import pandas as pd
+import h5py
+
+column_renames = {
+ 'params_file': 'dynamics_params',
+ 'level_of_detail': 'model_type',
+ 'morphology': 'morphology',
+ 'x_soma': 'x',
+ 'y_soma': 'y',
+ 'z_soma': 'z',
+ 'weight_max': 'syn_weight',
+ 'set_params_function': 'model_template'
+}
+
+
+def convert_edges(edges_file, edge_types_file, **params):
+ is_flat_h5 = False
+ is_new_h5 = False
+ try:
+ h5file = h5py.File(edges_file, 'r')
+ print
+ if 'edges' in h5file:
+ is_new_h5 = True
+ elif 'num_syns' in h5file:
+ is_flat_h5 = True
+ except Exception as e:
+ pass
+
+ if is_flat_h5:
+ update_aibs_edges(edges_file, edge_types_file, **params)
+ return
+ elif is_new_h5:
+ update_h5_edges(edges_file, edge_types_file, **params)
+ return
+
+ try:
+ edges_csv2h5(edges_file, **params)
+ return
+ except Exception as exc:
+ raise exc
+
+ raise Exception('Could not parse edges file')
+
+
+def update_edge_types_file(edge_types_file, src_network=None, trg_network=None, output_dir='network'):
+ edge_types_csv = pd.read_csv(edge_types_file, sep=' ')
+
+ # rename required columns
+ edge_types_csv = edge_types_csv.rename(index=str, columns=column_renames)
+
+ edge_types_output_fn = os.path.join(output_dir, '{}_{}_edge_types.csv'.format(src_network, trg_network))
+ edge_types_csv.to_csv(edge_types_output_fn, sep=' ', index=False, na_rep='NONE')
+
+
+def update_h5_edges(edges_file, edge_types_file, src_network=None, population_name=None, trg_network=None,
+ output_dir='network'):
+ population_name = population_name if population_name is not None else '{}_to_{}'.format(src_network, trg_network)
+ input_h5 = h5py.File(edges_file, 'r')
+
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
+
+ edges_output_fn = os.path.join(output_dir, '{}_{}_edges.h5'.format(src_network, trg_network))
+ with h5py.File(edges_output_fn, 'w') as h5:
+ edges_path = '/edges/{}'.format(population_name)
+ h5.copy(input_h5['/edges'], edges_path)
+ grp = h5[edges_path]
+ grp.move('source_gid', 'source_node_id')
+ grp.move('target_gid', 'target_node_id')
+ grp.move('edge_group', 'edge_group_id')
+
+ if 'network' in grp['source_node_id'].attrs:
+ del grp['source_node_id'].attrs['network']
+ grp['source_node_id'].attrs['node_population'] = src_network
+
+ if 'network' in grp['target_node_id'].attrs:
+ del grp['target_node_id'].attrs['network']
+ grp['target_node_id'].attrs['node_population'] = trg_network
+
+ create_index(input_h5['edges/target_gid'], grp, index_type=INDEX_TARGET)
+ create_index(input_h5['edges/source_gid'], grp, index_type=INDEX_SOURCE)
+
+ update_edge_types_file(edge_types_file, src_network, trg_network, output_dir)
+
+
+def update_aibs_edges(edges_file, edge_types_file, trg_network, src_network, population_name=None, output_dir='output'):
+ population_name = population_name if population_name is not None else '{}_to_{}'.format(src_network, trg_network)
+
+ edges_h5 = h5py.File(edges_file, 'r')
+ src_gids = edges_h5['/src_gids']
+ n_edges = len(src_gids)
+ trg_gids = np.zeros(n_edges, dtype=np.uint64)
+ start = edges_h5['/edge_ptr'][0]
+ for trg_gid, end in enumerate(edges_h5['/edge_ptr'][1:]):
+ trg_gids[start:end] = [trg_gid]*(end-start)
+ start = end
+
+ edges_output_fn = os.path.join(output_dir, '{}_{}_edges.h5'.format(src_network, trg_network))
+ if not os.path.exists(output_dir):
+ os.mkdir(output_dir)
+
+ with h5py.File(edges_output_fn, 'w') as hf:
+ grp = hf.create_group('/edges/{}'.format(population_name))
+
+ grp.create_dataset('target_node_id', data=trg_gids, dtype='uint64')
+ grp['target_node_id'].attrs['node_population'] = trg_network
+ grp.create_dataset('source_node_id', data=edges_h5['src_gids'], dtype='uint64')
+ grp['source_node_id'].attrs['node_population'] = src_network
+ grp.create_dataset('edge_group_id', data=np.zeros(n_edges), dtype='uint32')
+ grp.create_dataset('edge_group_index', data=np.arange(0, n_edges))
+ grp.create_dataset('edge_type_id', data=edges_h5['edge_types'])
+ grp.create_dataset('0/nsyns', data=edges_h5['num_syns'], dtype='uint32')
+ grp.create_group('0/dynamics_params')
+
+ create_index(trg_gids, grp, index_type=INDEX_TARGET)
+ create_index(src_gids, grp, index_type=INDEX_SOURCE)
+
+ update_edge_types_file(edge_types_file, src_network, trg_network, output_dir)
+
+
+def edges_csv2h5(edges_file, edge_types_file, src_network, src_nodes, src_node_types, trg_network, trg_nodes,
+ trg_node_types, output_dir='network', src_label='location', trg_label='pop_name'):
+ """Used to convert oldest (isee engine) edges files
+
+ :param edges_file:
+ :param edge_types_file:
+ :param src_network:
+ :param src_nodes:
+ :param src_node_types:
+ :param trg_network:
+ :param trg_nodes:
+ :param trg_node_types:
+ :param output_dir:
+ :param src_label:
+ :param trg_label:
+ """
+ column_renames = {
+ 'target_model_id': 'node_type_id',
+ 'weight': 'weight_max',
+ 'weight_function': 'weight_func',
+ }
+
+ columns_order = ['edge_type_id', 'target_query', 'source_query']
+
+ edges_h5 = h5py.File(edges_file, 'r')
+ edge_types_df = pd.read_csv(edge_types_file, sep=' ')
+ n_edges = len(edges_h5['src_gids'])
+ n_targets = len(edges_h5['indptr']) - 1
+
+ # rename specified columns in edge-types
+ edge_types_df = edge_types_df.rename(columns=column_renames)
+
+ # Add a "target_query" and "source_query" columns from target_label and source_label
+ def query_col(row, labels, search_col):
+ return '&'.join("{}=='{}'".format(l, row[search_col]) for l in labels)
+ trg_query_fnc = partial(query_col, labels=['node_type_id', trg_label], search_col='target_label')
+ src_query_fnc = partial(query_col, labels=[src_label], search_col='source_label')
+
+ edge_types_df['target_query'] = edge_types_df.apply(trg_query_fnc, axis=1)
+ edge_types_df['source_query'] = edge_types_df.apply(src_query_fnc, axis=1)
+
+ # Add an edge_type_id column
+ edge_types_df['edge_type_id'] = np.arange(100, 100 + len(edge_types_df.index), dtype='uint32')
+
+ nodes_tmp = pd.read_csv(src_nodes, sep=' ', index_col=['id'])
+ node_types_tmp = pd.read_csv(src_node_types, sep=' ')
+ src_nodes_df = pd.merge(nodes_tmp, node_types_tmp, on='model_id')
+
+ nodes_tmp = pd.read_csv(trg_nodes, sep=' ', index_col=['id'])
+ node_types_tmp = pd.read_csv(trg_node_types, sep=' ')
+ trg_nodes_df = pd.merge(nodes_tmp, node_types_tmp, on='model_id')
+
+ # For assigning edge types to each edge. For a given src --> trg pair we need to lookup source_label and
+ # target_label values of the nodes, then use it to find the corresponding edge_types row.
+ print('Processing edge_type_id dataset')
+ edge_types_ids = np.zeros(n_edges, dtype='uint32')
+ edge_types_df = edge_types_df.set_index(['node_type_id', 'target_label', 'source_label'])
+ ten_percent = int(n_targets*.1) # for keepting track of progress
+ index = 0 # keeping track of row index
+ for trg_gid in xrange(n_targets):
+ # for the target find value node_type_id and target_label
+ nodes_row = trg_nodes_df.loc[trg_gid]
+ model_id = nodes_row['model_id']
+ trg_label_val = nodes_row[trg_label]
+
+ # iterate through all the sources
+ idx_begin = edges_h5['indptr'][trg_gid]
+ idx_end = edges_h5['indptr'][trg_gid+1]
+ for src_gid in edges_h5['src_gids'][idx_begin:idx_end]:
+ # find each source_label, use value to find edge_type_id
+ # TODO: may be faster to filter by model_id, trg_label_val before iterating through the sources
+ src_label_val = src_nodes_df.loc[src_gid][src_label]
+ edge_type_id = edge_types_df.loc[model_id, trg_label_val, src_label_val]['edge_type_id']
+ edge_types_ids[index] = edge_type_id
+ index += 1
+
+ if trg_gid % ten_percent == 0 and trg_gid != 0:
+ print(' processed {} out of {} targets'.format(trg_gid, n_targets))
+
+ # Create the target_gid table
+ print('Creating target_gid dataset')
+ trg_gids = np.zeros(n_edges)
+ for trg_gid in xrange(n_targets):
+ idx_begin = edges_h5['indptr'][trg_gid]
+ idx_end = edges_h5['indptr'][trg_gid+1]
+ trg_gids[idx_begin:idx_end] = [trg_gid]*(idx_end - idx_begin)
+
+ # Save edges.h5
+ edges_output_fn = '{}/{}_{}_edges.h5'.format(output_dir, src_network, trg_network)
+ print('Saving edges to {}.'.format(edges_output_fn))
+ with h5py.File(edges_output_fn, 'w') as hf:
+ hf.create_dataset('edges/target_gid', data=trg_gids, dtype='uint64')
+ hf['edges/target_gid'].attrs['node_population'] = trg_network
+ hf.create_dataset('edges/source_gid', data=edges_h5['src_gids'], dtype='uint64')
+ hf['edges/source_gid'].attrs['node_population'] = trg_network
+ hf.create_dataset('edges/index_pointer', data=edges_h5['indptr'])
+ hf.create_dataset('edges/edge_group', data=np.zeros(n_edges), dtype='uint32')
+ hf.create_dataset('edges/edge_group_index', data=np.arange(0, n_edges))
+ hf.create_dataset('edges/edge_type_id', data=edge_types_ids)
+
+ hf.create_dataset('edges/0/nsyns', data=edges_h5['nsyns'], dtype='uint32')
+
+ # Save edge_types.csv
+ update_edge_types_file(edge_types_file, src_network, trg_network, output_dir)
+ '''
+ edges_types_output_fn = '{}/{}_{}_edge_types.csv'.format(output_dir, src_network, trg_network)
+ print('Saving edge-types to {}'.format(edges_types_output_fn))
+ edge_types_df = edge_types_df[edge_types_df['edge_type_id'].isin(np.unique(edge_types_ids))]
+ # reorder columns
+ reorderd_cols = columns_order + [cn for cn in edge_types_df.columns.tolist() if cn not in columns_order]
+ edge_types_df = edge_types_df[reorderd_cols]
+ edge_types_df.to_csv(edges_types_output_fn, sep=' ', index=False, na_rep='NONE')
+ '''
+
+
+INDEX_TARGET = 0
+INDEX_SOURCE = 1
+
+
+def create_index(node_ids_ds, output_grp, index_type=INDEX_TARGET):
+ if index_type == INDEX_TARGET:
+ edge_nodes = np.array(node_ids_ds, dtype=np.int64)
+ output_grp = output_grp.create_group('indicies/target_to_source')
+ elif index_type == INDEX_SOURCE:
+ edge_nodes = np.array(node_ids_ds, dtype=np.int64)
+ output_grp = output_grp.create_group('indicies/source_to_target')
+
+ edge_nodes = np.append(edge_nodes, [-1])
+ n_targets = np.max(edge_nodes)
+ ranges_list = [[] for _ in xrange(n_targets + 1)]
+
+ n_ranges = 0
+ begin_index = 0
+ cur_trg = edge_nodes[begin_index]
+ for end_index, trg_gid in enumerate(edge_nodes):
+ if cur_trg != trg_gid:
+ ranges_list[cur_trg].append((begin_index, end_index))
+ cur_trg = int(trg_gid)
+ begin_index = end_index
+ n_ranges += 1
+
+ node_id_to_range = np.zeros((n_targets+1, 2))
+ range_to_edge_id = np.zeros((n_ranges, 2))
+ range_index = 0
+ for node_index, trg_ranges in enumerate(ranges_list):
+ if len(trg_ranges) > 0:
+ node_id_to_range[node_index, 0] = range_index
+ for r in trg_ranges:
+ range_to_edge_id[range_index, :] = r
+ range_index += 1
+ node_id_to_range[node_index, 1] = range_index
+
+ output_grp.create_dataset('range_to_edge_id', data=range_to_edge_id, dtype='uint64')
+ output_grp.create_dataset('node_id_to_range', data=node_id_to_range, dtype='uint64')
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/converters/sonata/node_converters.py b/bmtk-vb/bmtk/utils/converters/sonata/node_converters.py
new file mode 100644
index 0000000..befab51
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/converters/sonata/node_converters.py
@@ -0,0 +1,399 @@
+import os
+
+import h5py
+import pandas as pd
+import numpy as np
+
+
+def convert_nodes(nodes_file, node_types_file, **params):
+ is_h5 = False
+ try:
+ h5file = h5py.File(nodes_file, 'r')
+ is_h5 = True
+ except Exception as e:
+ pass
+
+ if is_h5:
+ update_h5_nodes(nodes_file, node_types_file, **params)
+ return
+
+ update_csv_nodes(nodes_file, node_types_file, **params)
+
+
+# columns which need to be renamed, key is original name and value is the updated name
+column_renames = {
+ 'id': 'node_id',
+ 'model_id': 'node_type_id',
+ 'electrophysiology': 'dynamics_params',
+ 'level_of_detail': 'model_type',
+ 'morphology': 'morphology',
+ 'params_file': 'dynamics_params',
+ 'x_soma': 'x',
+ 'y_soma': 'y',
+ 'z_soma': 'z'
+}
+
+
+def update_h5_nodes(nodes_file, node_types_file, network_name, output_dir='output',
+ column_order=('node_type_id', 'model_type', 'model_template', 'model_processing', 'dynamics_params',
+ 'morphology')):
+ # open nodes and node-types into a single table
+ input_h5 = h5py.File(nodes_file, 'r')
+
+ output_name = '{}_nodes.h5'.format(network_name)
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
+ nodes_output_fn = os.path.join(output_dir, output_name)
+
+ # save nodes hdf5
+ with h5py.File(nodes_output_fn, 'w') as h5:
+ #h5.copy()
+ #grp = h5.create_group('/nodes/{}'.format(network_name))
+ #input_grp = input_h5['/nodes/']
+ nodes_path = '/nodes/{}'.format(network_name)
+ h5.copy(input_h5['/nodes/'], nodes_path)
+ grp = h5[nodes_path]
+ grp.move('node_gid', 'node_id')
+ grp.move('node_group', 'node_group_id')
+
+ node_types_csv = pd.read_csv(node_types_file, sep=' ')
+
+ node_types_csv = node_types_csv.rename(index=str, columns=column_renames)
+
+ # Change values for model type
+ model_type_map = {
+ 'biophysical': 'biophysical',
+ 'point_IntFire1': 'point_process',
+ 'intfire': 'point_process',
+ 'virtual': 'virtual',
+ 'iaf_psc_alpha': 'nest:iaf_psc_alpha',
+ 'filter': 'virtual'
+ }
+ node_types_csv['model_type'] = node_types_csv.apply(lambda row: model_type_map[row['model_type']], axis=1)
+
+ # Add model_template column
+ def model_template(row):
+ model_type = row['model_type']
+ if model_type == 'biophysical':
+ return 'ctdb:Biophys1.hoc'
+ elif model_type == 'point_process':
+ return 'nrn:IntFire1'
+ else:
+ return 'NONE'
+ node_types_csv['model_template'] = node_types_csv.apply(model_template, axis=1)
+
+ # Add model_processing column
+ def model_processing(row):
+ model_type = row['model_type']
+ if model_type == 'biophysical':
+ return 'aibs_perisomatic'
+ else:
+ return 'NONE'
+ node_types_csv['model_processing'] = node_types_csv.apply(model_processing, axis=1)
+
+ # Reorder columns
+ orig_columns = node_types_csv.columns
+ col_order = [cn for cn in column_order if cn in orig_columns]
+ col_order += [cn for cn in node_types_csv.columns if cn not in column_order]
+ node_types_csv = node_types_csv[col_order]
+
+ # Save node-types csv
+ node_types_output_fn = os.path.join(output_dir, '{}_node_types.csv'.format(network_name))
+ node_types_csv.to_csv(node_types_output_fn, sep=' ', index=False, na_rep='NONE')
+ # open nodes and node-types into a single table
+
+ '''
+ print('loading csv files')
+ nodes_tmp = pd.read_csv(nodes_file, sep=' ')
+ node_types_tmp = pd.read_csv(node_types_file, sep=' ')
+ nodes_df = pd.merge(nodes_tmp, node_types_tmp, on='node_type_id')
+ n_nodes = len(nodes_df.index)
+
+ # rename required columns
+ nodes_df = nodes_df.rename(index=str, columns=column_renames)
+
+ # Old versions of node_type_id may be set to strings/floats, convert to integers
+ dtype_ntid = nodes_df['node_type_id'].dtype
+ if dtype_ntid == 'object':
+ # if string, move model_id to pop_name and create an integer node_type_id column
+ if 'pop_name' in nodes_df.columns:
+ nodes_df = nodes_df.drop('pop_name', axis=1)
+ nodes_df = nodes_df.rename(index=str, columns={'node_type_id': 'pop_name'})
+ ntid_map = {pop_name: indx for indx, pop_name in enumerate(nodes_df['pop_name'].unique())}
+ nodes_df['node_type_id'] = nodes_df.apply(lambda row: ntid_map[row['pop_name']], axis=1)
+
+ elif dtype_ntid == 'float64':
+ nodes_df['node_type_id'] = nodes_df['node_type_id'].astype('uint64')
+
+ # divide columns up into nodes and node-types columns, and for nodes determine which columns are valid for every
+ # node-type. The rules are
+ # 1. If all values are the same for a node-type-id, column belongs in node_types csv. If there's any intra
+ # node-type heterogenity then the column belongs in the nodes h5.
+ # 2. For nodes h5 columns, a column belongs to a node-type-id if it contains at least one non-null value
+ print('parsing input')
+ opt_columns = [n for n in nodes_df.columns if n not in ['node_id', 'node_type_id']]
+ heterogeneous_cols = {cn: False for cn in opt_columns}
+ nonnull_cols = {} # for each node-type, a list of columns that contains at least one non-null value
+ for node_type_id, nt_group in nodes_df.groupby(['node_type_id']):
+ nonnull_cols[node_type_id] = set(nt_group.columns[nt_group.isnull().any() == False].tolist())
+ for col_name in opt_columns:
+ heterogeneous_cols[col_name] |= len(nt_group[col_name].unique()) > 1
+
+ nodes_columns = set(cn for cn, val in heterogeneous_cols.items() if val)
+ nodes_types_columns = [cn for cn, val in heterogeneous_cols.items() if not val]
+
+ # Check for nodes columns that has non-numeric values, these will require some special processing to save to hdf5
+ string_nodes_columns = set()
+ for col_name in nodes_columns:
+ if nodes_df[col_name].dtype == 'object':
+ string_nodes_columns.add(col_name)
+ if len(string_nodes_columns) > 0:
+ print('Warning: column(s) {} have non-numeric values that vary within a node-type and will be stored in h5 format'.format(list(string_nodes_columns)))
+
+ # Divide the nodes columns into groups and create neccessary lookup tables. If two node-types share the same
+ # non-null columns then they belong to the same group
+ grp_idx2cols = {} # group-id --> group-columns
+ grp_cols2idx = {} # group-columns --> group-id
+ grp_id2idx = {} # node-type-id --> group-id
+ group_index = -1
+ for nt_id, cols in nonnull_cols.items():
+ group_columns = sorted(list(nodes_columns & cols))
+ col_key = tuple(group_columns)
+ if col_key in grp_cols2idx:
+ grp_id2idx[nt_id] = grp_cols2idx[col_key]
+ else:
+ group_index += 1
+ grp_cols2idx[col_key] = group_index
+ grp_idx2cols[group_index] = group_columns
+ grp_id2idx[nt_id] = group_index
+
+ # merge x,y,z columns, if they exists, into 'positions' dataset
+ grp_pos_cols = {}
+ for grp_idx, cols in grp_idx2cols.items():
+ pos_list = []
+ for coord in ['x', 'y', 'z']:
+ if coord in cols:
+ pos_list += coord
+ grp_idx2cols[grp_idx].remove(coord)
+ if len(pos_list) > 0:
+ grp_pos_cols[grp_idx] = pos_list
+
+ # Create the node_group and node_group_index columns
+ nodes_df['__bmtk_node_group'] = nodes_df.apply(lambda row: grp_id2idx[row['node_type_id']], axis=1)
+ nodes_df['__bmtk_node_group_index'] = [0]*n_nodes
+ for grpid in grp_idx2cols.keys():
+ group_size = len(nodes_df[nodes_df['__bmtk_node_group'] == grpid])
+ nodes_df.loc[nodes_df['__bmtk_node_group'] == grpid, '__bmtk_node_group_index'] = range(group_size)
+
+ # Save nodes.h5 file
+ nodes_output_fn = os.path.join(output_dir, '{}_nodes.h5'.format(network_name))
+ node_types_output_fn = os.path.join(output_dir, '{}_node_types.csv'.format(network_name))
+ if not os.path.exists(output_dir):
+ os.mkdir(output_dir)
+
+ print('Creating {}'.format(nodes_output_fn))
+ with h5py.File(nodes_output_fn, 'w') as hf:
+ hf.create_dataset('nodes/node_gid', data=nodes_df['node_id'], dtype='uint64')
+ hf['nodes/node_gid'].attrs['network'] = network_name
+ hf.create_dataset('nodes/node_type_id', data=nodes_df['node_type_id'], dtype='uint64')
+ hf.create_dataset('nodes/node_group', data=nodes_df['__bmtk_node_group'], dtype='uint32')
+ hf.create_dataset('nodes/node_group_index', data=nodes_df['__bmtk_node_group_index'], dtype='uint64')
+
+ for grpid, cols in grp_idx2cols.items():
+ group_slice = nodes_df[nodes_df['__bmtk_node_group'] == grpid]
+ for col_name in cols:
+ dataset_name = 'nodes/{}/{}'.format(grpid, col_name)
+ if col_name in string_nodes_columns:
+ # for columns with non-numeric values
+ dt = h5py.special_dtype(vlen=bytes)
+ hf.create_dataset(dataset_name, data=group_slice[col_name], dtype=dt)
+ else:
+ hf.create_dataset(dataset_name, data=group_slice[col_name])
+
+ # special case for positions
+ if grpid in grp_pos_cols:
+ hf.create_dataset('nodes/{}/positions'.format(grpid),
+ data=group_slice.as_matrix(columns=grp_pos_cols[grpid]))
+
+ # Save the node_types.csv file
+ print('Creating {}'.format(node_types_output_fn))
+ node_types_table = nodes_df[['node_type_id'] + nodes_types_columns]
+ node_types_table = node_types_table.drop_duplicates()
+ if len(sort_order) > 0:
+ node_types_table = node_types_table.sort_values(by=sort_order)
+
+ node_types_table.to_csv(node_types_output_fn, sep=' ', index=False) # , na_rep='NONE')
+ '''
+
+
+def update_csv_nodes(nodes_file, node_types_file, network_name, output_dir='network',
+ column_order=('node_type_id', 'model_type', 'model_template', 'model_processing',
+ 'dynamics_params', 'morphology')):
+ # open nodes and node-types into a single table
+ print('loading csv files')
+ nodes_tmp = pd.read_csv(nodes_file, sep=' ')
+ node_types_tmp = pd.read_csv(node_types_file, sep=' ')
+ if 'model_id' in nodes_tmp:
+ nodes_df = pd.merge(nodes_tmp, node_types_tmp, on='model_id')
+ elif 'node_type_id' in nodes_tmp:
+ nodes_df = pd.merge(nodes_tmp, node_types_tmp, on='node_type_id')
+ else:
+ raise Exception('Could not find column to merge nodes and node_types')
+
+ n_nodes = len(nodes_df.index)
+
+ # rename required columns
+ nodes_df = nodes_df.rename(index=str, columns=column_renames)
+
+ # Old versions of node_type_id may be set to strings/floats, convert to integers
+ dtype_ntid = nodes_df['node_type_id'].dtype
+ if dtype_ntid == 'object':
+ # if string, move model_id to pop_name and create an integer node_type_id column
+ if 'pop_name' in nodes_df:
+ nodes_df = nodes_df.drop('pop_name', axis=1)
+
+ nodes_df = nodes_df.rename(index=str, columns={'node_type_id': 'pop_name'})
+
+ ntid_map = {pop_name: indx for indx, pop_name in enumerate(nodes_df['pop_name'].unique())}
+ nodes_df['node_type_id'] = nodes_df.apply(lambda row: ntid_map[row['pop_name']], axis=1)
+
+ elif dtype_ntid == 'float64':
+ nodes_df['node_type_id'] = nodes_df['node_type_id'].astype('uint64')
+
+ # divide columns up into nodes and node-types columns, and for nodes determine which columns are valid for every
+ # node-type. The rules are
+ # 1. If all values are the same for a node-type-id, column belongs in node_types csv. If there's any intra
+ # node-type heterogenity then the column belongs in the nodes h5.
+ # 2. For nodes h5 columns, a column belongs to a node-type-id if it contains at least one non-null value
+ print('parsing input')
+ opt_columns = [n for n in nodes_df.columns if n not in ['node_id', 'node_type_id']]
+ heterogeneous_cols = {cn: False for cn in opt_columns}
+ nonnull_cols = {} # for each node-type, a list of columns that contains at least one non-null value
+ for node_type_id, nt_group in nodes_df.groupby(['node_type_id']):
+ nonnull_cols[node_type_id] = set(nt_group.columns[nt_group.isnull().any() == False].tolist())
+ for col_name in opt_columns:
+ heterogeneous_cols[col_name] |= len(nt_group[col_name].unique()) > 1
+
+ nodes_columns = set(cn for cn, val in heterogeneous_cols.items() if val)
+ nodes_types_columns = [cn for cn, val in heterogeneous_cols.items() if not val]
+
+ # Check for nodes columns that has non-numeric values, these will require some special processing to save to hdf5
+ string_nodes_columns = set()
+ for col_name in nodes_columns:
+ if nodes_df[col_name].dtype == 'object':
+ string_nodes_columns.add(col_name)
+ if len(string_nodes_columns) > 0:
+ print('Warning: column(s) {} have non-numeric values that vary within a node-type and will be stored in h5 format'.format(list(string_nodes_columns)))
+
+ # Divide the nodes columns into groups and create neccessary lookup tables. If two node-types share the same
+ # non-null columns then they belong to the same group
+ grp_idx2cols = {} # group-id --> group-columns
+ grp_cols2idx = {} # group-columns --> group-id
+ grp_id2idx = {} # node-type-id --> group-id
+ group_index = -1
+ for nt_id, cols in nonnull_cols.items():
+ group_columns = sorted(list(nodes_columns & cols))
+ col_key = tuple(group_columns)
+ if col_key in grp_cols2idx:
+ grp_id2idx[nt_id] = grp_cols2idx[col_key]
+ else:
+ group_index += 1
+ grp_cols2idx[col_key] = group_index
+ grp_idx2cols[group_index] = group_columns
+ grp_id2idx[nt_id] = group_index
+
+ # merge x,y,z columns, if they exists, into 'positions' dataset
+ grp_pos_cols = {}
+ for grp_idx, cols in grp_idx2cols.items():
+ pos_list = []
+ for coord in ['x', 'y', 'z']:
+ if coord in cols:
+ pos_list += coord
+ grp_idx2cols[grp_idx].remove(coord)
+ if len(pos_list) > 0:
+ grp_pos_cols[grp_idx] = pos_list
+
+ # Create the node_group and node_group_index columns
+ nodes_df['__bmtk_node_group'] = nodes_df.apply(lambda row: grp_id2idx[row['node_type_id']], axis=1)
+ nodes_df['__bmtk_node_group_index'] = [0]*n_nodes
+ for grpid in grp_idx2cols.keys():
+ group_size = len(nodes_df[nodes_df['__bmtk_node_group'] == grpid])
+ nodes_df.loc[nodes_df['__bmtk_node_group'] == grpid, '__bmtk_node_group_index'] = range(group_size)
+
+ # Save nodes.h5 file
+ nodes_output_fn = os.path.join(output_dir, '{}_nodes.h5'.format(network_name))
+ node_types_output_fn = os.path.join(output_dir, '{}_node_types.csv'.format(network_name))
+ if not os.path.exists(output_dir):
+ os.mkdir(output_dir)
+
+ print('Creating {}'.format(nodes_output_fn))
+ with h5py.File(nodes_output_fn, 'w') as hf:
+ grp = hf.create_group('/nodes/{}'.format(network_name))
+ grp.create_dataset('node_id', data=nodes_df['node_id'], dtype='uint64')
+ grp.create_dataset('node_type_id', data=nodes_df['node_type_id'], dtype='uint64')
+ grp.create_dataset('node_group_id', data=nodes_df['__bmtk_node_group'], dtype='uint32')
+ grp.create_dataset('node_group_index', data=nodes_df['__bmtk_node_group_index'], dtype='uint64')
+
+ for grpid, cols in grp_idx2cols.items():
+ group_slice = nodes_df[nodes_df['__bmtk_node_group'] == grpid]
+ for col_name in cols:
+ dataset_name = '{}/{}'.format(grpid, col_name)
+ if col_name in string_nodes_columns:
+ # for columns with non-numeric values
+ dt = h5py.special_dtype(vlen=bytes)
+ grp.create_dataset(dataset_name, data=group_slice[col_name], dtype=dt)
+ else:
+ grp.create_dataset(dataset_name, data=group_slice[col_name])
+
+ # special case for positions
+ if grpid in grp_pos_cols:
+ grp.create_dataset('{}/positions'.format(grpid),
+ data=group_slice.as_matrix(columns=grp_pos_cols[grpid]))
+
+ # Create empty dynamics_params
+ grp.create_group('{}/dynamics_params'.format(grpid))
+
+ # Save the node_types.csv file
+ print('Creating {}'.format(node_types_output_fn))
+ node_types_table = nodes_df[['node_type_id'] + nodes_types_columns]
+ node_types_table = node_types_table.drop_duplicates()
+
+ # Change values for model type
+ model_type_map = {
+ 'biophysical': 'biophysical',
+ 'point_IntFire1': 'point_process',
+ 'virtual': 'virtual',
+ 'intfire': 'point_process',
+ 'filter': 'virtual'
+ }
+ node_types_table['model_type'] = node_types_table.apply(lambda row: model_type_map[row['model_type']], axis=1)
+ if 'set_params_function' in node_types_table:
+ node_types_table = node_types_table.drop('set_params_function', axis=1)
+
+ # Add model_template column
+ def model_template(row):
+ model_type = row['model_type']
+ if model_type == 'biophysical':
+ return 'ctdb:Biophys1.hoc'
+ elif model_type == 'point_process':
+ return 'nrn:IntFire1'
+ else:
+ return 'NONE'
+ node_types_table['model_template'] = node_types_table.apply(model_template, axis=1)
+
+ # Add model_processing column
+ def model_processing(row):
+ model_type = row['model_type']
+ if model_type == 'biophysical':
+ return 'aibs_perisomatic'
+ else:
+ return 'NONE'
+ node_types_table['model_processing'] = node_types_table.apply(model_processing, axis=1)
+
+ # Reorder columns
+ orig_columns = node_types_table.columns
+ col_order = [cn for cn in column_order if cn in orig_columns]
+ col_order += [cn for cn in node_types_table.columns if cn not in column_order]
+ node_types_table = node_types_table[col_order]
+
+ node_types_table.to_csv(node_types_output_fn, sep=' ', index=False, na_rep='NONE')
diff --git a/bmtk-vb/bmtk/utils/io/Final_Sp2019_20190512.docx b/bmtk-vb/bmtk/utils/io/Final_Sp2019_20190512.docx
new file mode 100644
index 0000000..a65f351
Binary files /dev/null and b/bmtk-vb/bmtk/utils/io/Final_Sp2019_20190512.docx differ
diff --git a/bmtk-vb/bmtk/utils/io/__init__.py b/bmtk-vb/bmtk/utils/io/__init__.py
new file mode 100644
index 0000000..aaccbcd
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/io/__init__.py
@@ -0,0 +1,27 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+#from tabular_network_v1 import TabularNetwork
+#from tabular_network_v0 import TabularNetwork as TabularNetwork_AI
+
+def log_warning(message):
+ pass
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/io/__init__.pyc b/bmtk-vb/bmtk/utils/io/__init__.pyc
new file mode 100644
index 0000000..34e44ce
Binary files /dev/null and b/bmtk-vb/bmtk/utils/io/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/utils/io/__pycache__/__init__.cpython-36.pyc b/bmtk-vb/bmtk/utils/io/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000..27ece15
Binary files /dev/null and b/bmtk-vb/bmtk/utils/io/__pycache__/__init__.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/utils/io/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/utils/io/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..a618ec9
Binary files /dev/null and b/bmtk-vb/bmtk/utils/io/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/io/__pycache__/cell_vars.cpython-36.pyc b/bmtk-vb/bmtk/utils/io/__pycache__/cell_vars.cpython-36.pyc
new file mode 100644
index 0000000..a29d147
Binary files /dev/null and b/bmtk-vb/bmtk/utils/io/__pycache__/cell_vars.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/utils/io/__pycache__/cell_vars.cpython-37.pyc b/bmtk-vb/bmtk/utils/io/__pycache__/cell_vars.cpython-37.pyc
new file mode 100644
index 0000000..6a0e1fa
Binary files /dev/null and b/bmtk-vb/bmtk/utils/io/__pycache__/cell_vars.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/io/__pycache__/spike_trains.cpython-37.pyc b/bmtk-vb/bmtk/utils/io/__pycache__/spike_trains.cpython-37.pyc
new file mode 100644
index 0000000..d550496
Binary files /dev/null and b/bmtk-vb/bmtk/utils/io/__pycache__/spike_trains.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/io/_test_cell_vars.py~ b/bmtk-vb/bmtk/utils/io/_test_cell_vars.py~
new file mode 100644
index 0000000..ac01c7a
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/io/_test_cell_vars.py~
@@ -0,0 +1,3 @@
+from bmtk.builder.nerworks import NetworkBuilder
+
+# in cortical-column (add to PYTHONPATH)
diff --git a/bmtk-vb/bmtk/utils/io/cell_vars.py b/bmtk-vb/bmtk/utils/io/cell_vars.py
new file mode 100644
index 0000000..36fe8cf
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/io/cell_vars.py
@@ -0,0 +1,361 @@
+import os
+import h5py
+import numpy as np
+
+from bmtk.utils import io
+from bmtk.utils.sonata.utils import add_hdf5_magic, add_hdf5_version
+
+
+try:
+ from mpi4py import MPI
+ comm = MPI.COMM_WORLD
+ rank = comm.Get_rank()
+ nhosts = comm.Get_size()
+
+except Exception as exc:
+ pass
+
+
+class CellVarRecorder(object):
+ """Used to save cell membrane variables (V, Ca2+, etc) to the described hdf5 format.
+
+ For parallel simulations this class will write to a seperate tmp file on each rank, then use the merge method to
+ combine the results. This is less efficent, but doesn't require the user to install mpi4py and build h5py in
+ parallel mode. For better performance use the CellVarRecorderParrallel class instead.
+ """
+ _io = io
+
+ class DataTable(object):
+ """A small struct to keep track of different */data (and buffer) tables"""
+ def __init__(self, var_name):
+ self.var_name = var_name
+ # If buffering data, buffer_block will be an in-memory array and will write to data_block during when
+ # filled. If not buffering buffer_block is an hdf5 dataset and data_block is ignored
+ self.data_block = None
+ self.buffer_block = None
+
+ def __init__(self, file_name, tmp_dir, variables, buffer_data=True, mpi_rank=0, mpi_size=1):
+ self._file_name = file_name
+ self._h5_handle = None
+ self._tmp_dir = tmp_dir
+ self._variables = variables if isinstance(variables, list) else [variables]
+ self._n_vars = len(self._variables) # Used later to keep track if more than one var is saved to the same file.
+
+ self._mpi_rank = mpi_rank
+ self._mpi_size = mpi_size
+ self._tmp_files = []
+ self._saved_file = file_name
+
+ if mpi_size > 1 and not isinstance(self, CellVarRecorderParallel):
+ self._io.log_warning('Was unable to run h5py in parallel (mpi) mode.' +
+ ' Saving of membrane variable(s) may slow down.')
+ tmp_fname = os.path.basename(file_name) # make sure file names don't clash if there are multiple reports
+ self._tmp_files = [os.path.join(tmp_dir, '__bmtk_tmp_cellvars_{}_{}'.format(r, tmp_fname))
+ for r in range(self._mpi_size)]
+ self._file_name = self._tmp_files[self._mpi_rank]
+
+ self._mapping_gids = [] # list of gids in the order they appear in the data
+ self._gid_map = {} # table for looking up the gid offsets
+
+ self._mapping_element_ids = [] # sections
+ self._mapping_element_pos = [] # segments
+ self._mapping_index = [0] # index_pointer
+
+ self._buffer_data = buffer_data
+ self._data_blocks = {var_name: self.DataTable(var_name) for var_name in self._variables}
+ self._last_save_indx = 0 # for buffering, used to keep track of last timestep data was saved to disk
+
+ self._buffer_block_size = 0
+ self._total_steps = 0
+
+ # Keep track of gids across the different ranks
+ self._n_gids_all = 0
+ self._n_gids_local = 0
+ self._gids_beg = 0
+ self._gids_end = 0
+
+ # Keep track of segment counts across the different ranks
+ self._n_segments_all = 0
+ self._n_segments_local = 0
+ self._seg_offset_beg = 0
+ self._seg_offset_end = 0
+
+ self._tstart = 0.0
+ self._tstop = 0.0
+ self._dt = 0.01
+ self._is_initialized = False
+
+ @property
+ def tstart(self):
+ return self._tstart
+
+ @tstart.setter
+ def tstart(self, time_ms):
+ self._tstart = time_ms
+
+ @property
+ def tstop(self):
+ return self._tstop
+
+ @tstop.setter
+ def tstop(self, time_ms):
+ self._tstop = time_ms
+
+ @property
+ def dt(self):
+ return self._dt
+
+ @dt.setter
+ def dt(self, time_ms):
+ self._dt = time_ms
+
+ @property
+ def is_initialized(self):
+ return self._is_initialized
+
+ def _calc_offset(self):
+ self._n_segments_all = self._n_segments_local
+ self._seg_offset_beg = 0
+ self._seg_offset_end = self._n_segments_local
+
+ self._n_gids_all = self._n_gids_local
+ self._gids_beg = 0
+ self._gids_end = self._n_gids_local
+
+ def _create_h5_file(self):
+ self._h5_handle = h5py.File(self._file_name, 'w')
+ add_hdf5_version(self._h5_handle)
+ add_hdf5_magic(self._h5_handle)
+
+ def add_cell(self, gid, sec_list, seg_list):
+ assert(len(sec_list) == len(seg_list))
+ # TODO: Check the same gid isn't added twice
+ n_segs = len(seg_list)
+ self._gid_map[gid] = (self._n_segments_local, self._n_segments_local + n_segs)
+ self._mapping_gids.append(gid)
+ self._mapping_element_ids.extend(sec_list)
+ self._mapping_element_pos.extend(seg_list)
+ self._mapping_index.append(self._mapping_index[-1] + n_segs)
+ self._n_segments_local += n_segs
+ self._n_gids_local += 1
+
+ def _create_big_dataset(self, where, name, shape, dtype):
+ """
+ Create and return a dataset that doesn't get filled right when created
+ """
+ spaceid = h5py.h5s.create_simple(shape)
+ plist = h5py.h5p.create(h5py.h5p.DATASET_CREATE)
+ plist.set_fill_time(h5py.h5d.FILL_TIME_NEVER)
+ if shape[0] < 500 or shape[1] < 512:
+ chunkshape = shape
+ else:
+ chunkshape = (shape[0]/500, shape[1]/512) # TODO: don't use fixed values?
+ plist.set_chunk(chunkshape)
+ datasetid = h5py.h5d.create(where.id,name,h5py.h5t.NATIVE_FLOAT, spaceid, plist)
+ return h5py.Dataset(datasetid)
+
+ def initialize(self, n_steps, buffer_size=0):
+ self._calc_offset()
+ self._create_h5_file()
+
+ var_grp = self._h5_handle.create_group('/mapping')
+ var_grp.create_dataset('gids', shape=(self._n_gids_all,), dtype=np.uint)
+ var_grp.create_dataset('element_id', shape=(self._n_segments_all,), dtype=np.uint)
+ var_grp.create_dataset('element_pos', shape=(self._n_segments_all,), dtype=np.float)
+ var_grp.create_dataset('index_pointer', shape=(self._n_gids_all+1,), dtype=np.uint64)
+ var_grp.create_dataset('time', data=[self.tstart, self.tstop, self.dt])
+
+ var_grp['gids'][self._gids_beg:self._gids_end] = self._mapping_gids
+ var_grp['element_id'][self._seg_offset_beg:self._seg_offset_end] = self._mapping_element_ids
+ var_grp['element_pos'][self._seg_offset_beg:self._seg_offset_end] = self._mapping_element_pos
+ var_grp['index_pointer'][self._gids_beg:(self._gids_end+1)] = self._mapping_index
+
+ self._total_steps = n_steps
+ self._buffer_block_size = buffer_size
+ if not self._buffer_data:
+ # If data is not being buffered and instead written to the main block, we have to add a rank offset
+ # to the gid offset
+ for gid, gid_offset in self._gid_map.items():
+ self._gid_map[gid] = (gid_offset[0] + self._seg_offset_beg, gid_offset[1] + self._seg_offset_beg)
+
+ for var_name, data_tables in self._data_blocks.items():
+ # If users are trying to save multiple variables in the same file put data table in its own /{var} group
+ # (not sonata compliant). Otherwise the data table is located at the root
+ data_grp = self._h5_handle if self._n_vars == 1 else self._h5_handle.create_group('/{}'.format(var_name))
+ if self._buffer_data:
+ # Set up in-memory block to buffer recorded variables before writing to the dataset
+ data_tables.buffer_block = np.zeros((buffer_size, self._n_segments_local), dtype=np.float)
+ # data_tables.data_block = data_grp.create_dataset('data', shape=(n_steps, self._n_segments_all),
+ # dtype=np.float, chunks=True)
+ data_tables.data_block = self._create_big_dataset(data_grp, b'data', (n_steps, self._n_segments_all), np.float)
+ data_tables.data_block.attrs['variable_name'] = var_name
+ else:
+ # Since we are not buffering data, we just write directly to the on-disk dataset
+ data_tables.buffer_block = data_grp.create_dataset('data', shape=(n_steps, self._n_segments_all),
+ dtype=np.float, chunks=True)
+ data_tables.buffer_block.attrs['variable_name'] = var_name
+
+ self._is_initialized = True
+
+ def record_cell(self, gid, var_name, seg_vals, tstep):
+ """Record cell parameters.
+
+ :param gid: gid of cell.
+ :param var_name: name of variable being recorded.
+ :param seg_vals: list of all segment values
+ :param tstep: time step
+ """
+ gid_beg, gid_end = self._gid_map[gid]
+ buffer_block = self._data_blocks[var_name].buffer_block
+ update_index = (tstep - self._last_save_indx)
+ buffer_block[update_index, gid_beg:gid_end] = seg_vals
+
+ def record_cell_block(self, gid, var_name, seg_vals):
+ """Save cell parameters one block at a time
+
+ :param gid: gid of cell.
+ :param var_name: name of variable being recorded.
+ :param seg_vals: A vector/matrix of values being recorded
+ """
+ gid_beg, gid_end = self._gid_map[gid]
+ buffer_block = self._data_blocks[var_name].buffer_block
+ if gid_end - gid_beg == 1:
+ buffer_block[:, gid_beg] = seg_vals
+ else:
+ buffer_block[:, gid_beg:gid_end] = seg_vals
+
+ def flush(self):
+ """Move data from memory to dataset"""
+ if self._buffer_data:
+ blk_beg = self._last_save_indx
+ blk_end = blk_beg + self._buffer_block_size
+ if blk_end > self._total_steps:
+ # Need to handle the case that simulation doesn't end on a block step
+ blk_end = blk_beg + self._total_steps - blk_beg
+ seg_beg, seg_end = self._seg_offset_beg, self._seg_offset_end
+
+ block_size = blk_end - blk_beg
+ self._last_save_indx += block_size
+
+ for _, data_table in self._data_blocks.items():
+ dat, buf = data_table.data_block, data_table.buffer_block
+ dat[blk_beg:blk_end, seg_beg:seg_end] = buf[:block_size, :]
+
+ def close(self):
+ self._h5_handle.close()
+
+ def merge(self):
+ if self._mpi_size > 1 and self._mpi_rank == 0:
+ h5final = h5py.File(self._saved_file, 'w')
+ tmp_h5_handles = [h5py.File(name, 'r') for name in self._tmp_files]
+
+ # Find the gid and segment offsets for each temp h5 file
+ gid_ranges = [] # list of (gid-beg, gid-end)
+ gid_offset = 0
+ total_gid_count = 0 # total number of gids across all ranks
+
+ seg_ranges = []
+ seg_offset = 0
+ total_seg_count = 0 # total number of segments across all ranks
+ time_ds = None
+ for h5_tmp in tmp_h5_handles:
+ seg_count = len(h5_tmp['/mapping/element_pos'])
+ seg_ranges.append((seg_offset, seg_offset+seg_count))
+ seg_offset += seg_count
+ total_seg_count += seg_count
+
+ gid_count = len(h5_tmp['mapping/gids'])
+ gid_ranges.append((gid_offset, gid_offset+gid_count))
+ gid_offset += gid_count
+ total_gid_count += gid_count
+
+ time_ds = h5_tmp['mapping/time']
+
+ mapping_grp = h5final.create_group('mapping')
+ if time_ds:
+ mapping_grp.create_dataset('time', data=time_ds)
+ element_id_ds = mapping_grp.create_dataset('element_id', shape=(total_seg_count,), dtype=np.uint)
+ el_pos_ds = mapping_grp.create_dataset('element_pos', shape=(total_seg_count,), dtype=np.float)
+ gids_ds = mapping_grp.create_dataset('gids', shape=(total_gid_count,), dtype=np.uint)
+ index_pointer_ds = mapping_grp.create_dataset('index_pointer', shape=(total_gid_count+1,), dtype=np.uint)
+
+ # combine the /mapping datasets
+ for i, h5_tmp in enumerate(tmp_h5_handles):
+ tmp_mapping_grp = h5_tmp['mapping']
+ beg, end = seg_ranges[i]
+ element_id_ds[beg:end] = tmp_mapping_grp['element_id']
+ el_pos_ds[beg:end] = tmp_mapping_grp['element_pos']
+
+ # shift the index pointer values
+ index_pointer = np.array(tmp_mapping_grp['index_pointer'])
+ update_index = beg + index_pointer
+
+ beg, end = gid_ranges[i]
+ gids_ds[beg:end] = tmp_mapping_grp['gids']
+ index_pointer_ds[beg:(end+1)] = update_index
+
+ # combine the /var/data datasets
+ for var_name in self._variables:
+ data_name = '/data' if self._n_vars == 1 else '/{}/data'.format(var_name)
+ # data_name = '/{}/data'.format(var_name)
+ var_data = h5final.create_dataset(data_name, shape=(self._total_steps, total_seg_count), dtype=np.float)
+ var_data.attrs['variable_name'] = var_name
+ for i, h5_tmp in enumerate(tmp_h5_handles):
+ beg, end = seg_ranges[i]
+ var_data[:, beg:end] = h5_tmp[data_name]
+
+ for tmp_file in self._tmp_files:
+ os.remove(tmp_file)
+
+
+class CellVarRecorderParallel(CellVarRecorder):
+ """
+ Unlike the parent, this take advantage of parallel h5py to writting to the results file across different ranks.
+
+ """
+ def __init__(self, file_name, tmp_dir, variables, buffer_data=True, mpi_rank=0, mpi_size=1):
+ super(CellVarRecorderParallel, self).__init__(
+ file_name, tmp_dir, variables, buffer_data=buffer_data,
+ mpi_rank=mpi_rank, mpi_size=mpi_size
+ )
+
+ def _calc_offset(self):
+ # iterate through the ranks let rank r determine the offset from rank r-1
+ for r in range(comm.Get_size()):
+ if rank == r:
+ if rank > 0:
+ # get num of segments and gids from prev. rank and calculate offsets
+ offsets = np.empty(2, dtype=np.uint)
+ comm.Recv([offsets, MPI.UNSIGNED_INT], source=(r-1))
+ self._seg_offset_beg = offsets[0]
+ self._gids_beg = offsets[1]
+
+ # for some reason, np.uint64 + int = np.float64, so need cast to int
+ self._seg_offset_end = int(self._seg_offset_beg) \
+ + int(self._n_segments_local)
+ self._gids_end = int(self._gids_beg) + int(self._n_gids_local)
+
+ if rank < (nhosts - 1):
+ # pass the next rank its offset
+ offsets = np.array([self._seg_offset_end, self._gids_end], dtype=np.uint)
+ comm.Send([offsets, MPI.UNSIGNED_INT], dest=(rank+1))
+
+ comm.Barrier()
+
+ # broadcast the total num of gids/segments from the final rank to all the others
+ if rank == (nhosts - 1):
+ total_counts = np.array([self._seg_offset_end, self._gids_end], dtype=np.uint)
+ else:
+ total_counts = np.empty(2, dtype=np.uint)
+
+ comm.Bcast(total_counts, root=(nhosts-1))
+ self._n_segments_all = total_counts[0]
+ self._n_gids_all = total_counts[1]
+
+ def _create_h5_file(self):
+ self._h5_handle = h5py.File(self._file_name, 'w', driver='mpio', comm=MPI.COMM_WORLD)
+ add_hdf5_version(self._h5_handle)
+ add_hdf5_magic(self._h5_handle)
+
+ def merge(self):
+ pass
diff --git a/bmtk-vb/bmtk/utils/io/cell_vars.pyc b/bmtk-vb/bmtk/utils/io/cell_vars.pyc
new file mode 100644
index 0000000..6b5bdfb
Binary files /dev/null and b/bmtk-vb/bmtk/utils/io/cell_vars.pyc differ
diff --git a/bmtk-vb/bmtk/utils/io/firing_rates.py b/bmtk-vb/bmtk/utils/io/firing_rates.py
new file mode 100644
index 0000000..827cc21
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/io/firing_rates.py
@@ -0,0 +1,35 @@
+import pandas as pd
+import csv
+
+class RatesInput(object):
+ def __init__(self, params):
+ self._rates_df = pd.read_csv(params['rates'], sep=' ')
+ self._node_population = params['node_set']
+ self._rates_dict = {int(row['gid']): row['firing_rate'] for _, row in self._rates_df.iterrows()}
+
+ @property
+ def populations(self):
+ return [self._node_population]
+
+ def get_rate(self, gid):
+ return self._rates_dict[gid]
+
+
+class RatesWriter(object):
+ def __init__(self, file_name):
+ self._file_name = file_name
+ self._fhandle = open(file_name, 'a')
+ self._csv_writer = csv.writer(self._fhandle, delimiter=' ')
+
+ def add_rates(self, gid, times, rates):
+ for t, r in zip(times, rates):
+ self._csv_writer.writerow([gid, t, r])
+ self._fhandle.flush()
+
+ def to_csv(self, file_name):
+ pass
+
+ def to_h5(self, file_name):
+ raise NotImplementedError
+
+
diff --git a/bmtk-vb/bmtk/utils/io/orig_cell_vars.py b/bmtk-vb/bmtk/utils/io/orig_cell_vars.py
new file mode 100644
index 0000000..b84b293
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/io/orig_cell_vars.py
@@ -0,0 +1,356 @@
+import os
+import h5py
+import numpy as np
+from pynwb import NWBFile
+
+from bmtk.utils import io
+from bmtk.utils.sonata.utils import add_hdf5_magic, add_hdf5_version
+
+
+try:
+ from mpi4py import MPI
+ comm = MPI.COMM_WORLD
+ rank = comm.Get_rank()
+ nhosts = comm.Get_size()
+
+except Exception as exc:
+ pass
+
+
+class CellVarRecorder(object):
+ """Used to save cell membrane variables (V, Ca2+, etc) to the described hdf5 format.
+
+ For parallel simulations this class will write to a seperate tmp file on each rank, then use the merge method to
+ combine the results. This is less efficent, but doesn't require the user to install mpi4py and build h5py in
+ parallel mode. For better performance use the CellVarRecorderParrallel class instead.
+ """
+ _io = io
+
+ class DataTable(object):
+ """A small struct to keep track of different */data (and buffer) tables"""
+ def __init__(self, var_name):
+ self.var_name = var_name
+ # If buffering data, buffer_block will be an in-memory array and will write to data_block during when
+ # filled. If not buffering buffer_block is an hdf5 dataset and data_block is ignored
+ self.data_block = None
+ self.buffer_block = None
+
+ def __init__(self, file_name, tmp_dir, variables, buffer_data=True, mpi_rank=0, mpi_size=1):
+ self._file_name = file_name
+ self._h5_handle = None
+ self._tmp_dir = tmp_dir
+ self._variables = variables if isinstance(variables, list) else [variables]
+ self._n_vars = len(self._variables) # Used later to keep track if more than one var is saved to the same file.
+
+ self._mpi_rank = mpi_rank
+ self._mpi_size = mpi_size
+ self._tmp_files = []
+ self._saved_file = file_name
+
+ if mpi_size > 1:
+ self._io.log_warning('Was unable to run h5py in parallel (mpi) mode.' +
+ ' Saving of membrane variable(s) may slow down.')
+ tmp_fname = os.path.basename(file_name) # make sure file names don't clash if there are multiple reports
+ self._tmp_files = [os.path.join(tmp_dir, '__bmtk_tmp_cellvars_{}_{}'.format(r, tmp_fname))
+ for r in range(self._mpi_size)]
+ self._file_name = self._tmp_files[self._mpi_rank]
+
+ self._mapping_gids = [] # list of gids in the order they appear in the data
+ self._gid_map = {} # table for looking up the gid offsets
+ self._map_attrs = {} # Used for additonal attributes in /mapping
+
+ self._mapping_element_ids = [] # sections
+ self._mapping_element_pos = [] # segments
+ self._mapping_index = [0] # index_pointer
+
+ self._buffer_data = buffer_data
+ self._data_blocks = {var_name: self.DataTable(var_name) for var_name in self._variables}
+ self._last_save_indx = 0 # for buffering, used to keep track of last timestep data was saved to disk
+
+ self._buffer_block_size = 0
+ self._total_steps = 0
+
+ # Keep track of gids across the different ranks
+ self._n_gids_all = 0
+ self._n_gids_local = 0
+ self._gids_beg = 0
+ self._gids_end = 0
+
+ # Keep track of segment counts across the different ranks
+ self._n_segments_all = 0
+ self._n_segments_local = 0
+ self._seg_offset_beg = 0
+ self._seg_offset_end = 0
+
+ self._tstart = 0.0
+ self._tstop = 0.0
+ self._dt = 0.01
+ self._is_initialized = False
+
+ @property
+ def tstart(self):
+ return self._tstart
+
+ @tstart.setter
+ def tstart(self, time_ms):
+ self._tstart = time_ms
+
+ @property
+ def tstop(self):
+ return self._tstop
+
+ @tstop.setter
+ def tstop(self, time_ms):
+ self._tstop = time_ms
+
+ @property
+ def dt(self):
+ return self._dt
+
+ @dt.setter
+ def dt(self, time_ms):
+ self._dt = time_ms
+
+ @property
+ def is_initialized(self):
+ return self._is_initialized
+
+ def _calc_offset(self):
+ self._n_segments_all = self._n_segments_local
+ self._seg_offset_beg = 0
+ self._seg_offset_end = self._n_segments_local
+
+ self._n_gids_all = self._n_gids_local
+ self._gids_beg = 0
+ self._gids_end = self._n_gids_local
+
+ def _create_h5_file(self):
+ self._h5_handle = h5py.File(self._file_name, 'w')
+ add_hdf5_version(self._h5_handle)
+ add_hdf5_magic(self._h5_handle)
+
+ def add_cell(self, gid, sec_list, seg_list, **map_attrs):
+ assert(len(sec_list) == len(seg_list))
+ # TODO: Check the same gid isn't added twice
+ n_segs = len(seg_list)
+ self._gid_map[gid] = (self._n_segments_local, self._n_segments_local + n_segs)
+ self._mapping_gids.append(gid)
+ self._mapping_element_ids.extend(sec_list)
+ self._mapping_element_pos.extend(seg_list)
+ self._mapping_index.append(self._mapping_index[-1] + n_segs)
+ self._n_segments_local += n_segs
+ self._n_gids_local += 1
+ for k, v in map_attrs.items():
+ if k not in self._map_attrs:
+ self._map_attrs[k] = v
+ else:
+ self._map_attrs[k].extend(v)
+
+ def initialize(self, n_steps, buffer_size=0):
+ import ipdb; ipdb.set_trace()
+ self._calc_offset()
+ self._create_h5_file()
+
+ var_grp = self._h5_handle.create_group('/mapping')
+ var_grp.create_dataset('gids', shape=(self._n_gids_all,), dtype=np.uint)
+ var_grp.create_dataset('element_id', shape=(self._n_segments_all,), dtype=np.uint)
+ var_grp.create_dataset('element_pos', shape=(self._n_segments_all,), dtype=np.float)
+ var_grp.create_dataset('index_pointer', shape=(self._n_gids_all+1,), dtype=np.uint64)
+ var_grp.create_dataset('time', data=[self.tstart, self.tstop, self.dt])
+ for k, v in self._map_attrs.items():
+ var_grp.create_dataset(k, shape=(self._n_segments_all,), dtype=type(v[0]))
+
+ var_grp['gids'][self._gids_beg:self._gids_end] = self._mapping_gids
+ var_grp['element_id'][self._seg_offset_beg:self._seg_offset_end] = self._mapping_element_ids
+ var_grp['element_pos'][self._seg_offset_beg:self._seg_offset_end] = self._mapping_element_pos
+ var_grp['index_pointer'][self._gids_beg:(self._gids_end+1)] = self._mapping_index
+ for k, v in self._map_attrs.items():
+ var_grp[k][self._seg_offset_beg:self._seg_offset_end] = v
+
+ self._total_steps = n_steps
+ self._buffer_block_size = buffer_size
+ if not self._buffer_data:
+ # If data is not being buffered and instead written to the main block, we have to add a rank offset
+ # to the gid offset
+ for gid, gid_offset in self._gid_map.items():
+ self._gid_map[gid] = (gid_offset[0] + self._seg_offset_beg, gid_offset[1] + self._seg_offset_beg)
+
+ for var_name, data_tables in self._data_blocks.items():
+ # If users are trying to save multiple variables in the same file put data table in its own /{var} group
+ # (not sonata compliant). Otherwise the data table is located at the root
+ data_grp = self._h5_handle if self._n_vars == 1 else self._h5_handle.create_group('/{}'.format(var_name))
+ if self._buffer_data:
+ # Set up in-memory block to buffer recorded variables before writing to the dataset
+ data_tables.buffer_block = np.zeros((buffer_size, self._n_segments_local), dtype=np.float)
+ data_tables.data_block = data_grp.create_dataset('data', shape=(n_steps, self._n_segments_all),
+ dtype=np.float, chunks=True)
+ data_tables.data_block.attrs['variable_name'] = var_name
+ else:
+ # Since we are not buffering data, we just write directly to the on-disk dataset
+ data_tables.buffer_block = data_grp.create_dataset('data', shape=(n_steps, self._n_segments_all),
+ dtype=np.float, chunks=True)
+ data_tables.buffer_block.attrs['variable_name'] = var_name
+
+ self._is_initialized = True
+
+ def record_cell(self, gid, var_name, seg_vals, tstep):
+ """Record cell parameters.
+
+ :param gid: gid of cell.
+ :param var_name: name of variable being recorded.
+ :param seg_vals: list of all segment values
+ :param tstep: time step
+ """
+ gid_beg, gid_end = self._gid_map[gid]
+ buffer_block = self._data_blocks[var_name].buffer_block
+ update_index = (tstep - self._last_save_indx)
+ buffer_block[update_index, gid_beg:gid_end] = seg_vals
+
+ def record_cell_block(self, gid, var_name, seg_vals):
+ """Save cell parameters one block at a time
+
+ :param gid: gid of cell.
+ :param var_name: name of variable being recorded.
+ :param seg_vals: A vector/matrix of values being recorded
+ """
+ gid_beg, gid_end = self._gid_map[gid]
+ buffer_block = self._data_blocks[var_name].buffer_block
+ if gid_end - gid_beg == 1:
+ buffer_block[:, gid_beg] = seg_vals
+ else:
+ buffer_block[:, gid_beg:gid_end] = seg_vals
+
+ def flush(self):
+ """Move data from memory to dataset"""
+ if self._buffer_data:
+ blk_beg = self._last_save_indx
+ blk_end = blk_beg + self._buffer_block_size
+ if blk_end > self._total_steps:
+ # Need to handle the case that simulation doesn't end on a block step
+ blk_end = blk_beg + self._total_steps - blk_beg
+
+ block_size = blk_end - blk_beg
+ self._last_save_indx += block_size
+
+ for _, data_table in self._data_blocks.items():
+ data_table.data_block[blk_beg:blk_end, :] = data_table.buffer_block[:block_size, :]
+
+ def close(self):
+ self._h5_handle.close()
+
+ def merge(self):
+ if self._mpi_size > 1 and self._mpi_rank == 0:
+ h5final = h5py.File(self._saved_file, 'w')
+ tmp_h5_handles = [h5py.File(name, 'r') for name in self._tmp_files]
+
+ # Find the gid and segment offsets for each temp h5 file
+ gid_ranges = [] # list of (gid-beg, gid-end)
+ gid_offset = 0
+ total_gid_count = 0 # total number of gids across all ranks
+
+ seg_ranges = []
+ seg_offset = 0
+ total_seg_count = 0 # total number of segments across all ranks
+ time_ds = None
+ for h5_tmp in tmp_h5_handles:
+ seg_count = len(h5_tmp['/mapping/element_pos'])
+ seg_ranges.append((seg_offset, seg_offset+seg_count))
+ seg_offset += seg_count
+ total_seg_count += seg_count
+
+ gid_count = len(h5_tmp['mapping/gids'])
+ gid_ranges.append((gid_offset, gid_offset+gid_count))
+ gid_offset += gid_count
+ total_gid_count += gid_count
+
+ time_ds = h5_tmp['mapping/time']
+
+ mapping_grp = h5final.create_group('mapping')
+ if time_ds:
+ mapping_grp.create_dataset('time', data=time_ds)
+ element_id_ds = mapping_grp.create_dataset('element_id', shape=(total_seg_count,), dtype=np.uint)
+ el_pos_ds = mapping_grp.create_dataset('element_pos', shape=(total_seg_count,), dtype=np.float)
+ gids_ds = mapping_grp.create_dataset('gids', shape=(total_gid_count,), dtype=np.uint)
+ index_pointer_ds = mapping_grp.create_dataset('index_pointer', shape=(total_gid_count+1,), dtype=np.uint)
+ for k, v in self._map_attrs.items():
+ mapping_grp.create_dataset(k, shape=(total_seg_count,), dtype=type(v[0]))
+
+ # combine the /mapping datasets
+ for i, h5_tmp in enumerate(tmp_h5_handles):
+ tmp_mapping_grp = h5_tmp['mapping']
+ beg, end = seg_ranges[i]
+ element_id_ds[beg:end] = tmp_mapping_grp['element_id']
+ el_pos_ds[beg:end] = tmp_mapping_grp['element_pos']
+ for k, v in self._map_attrs.items():
+ mapping_grp[k][beg:end] = v
+
+ # shift the index pointer values
+ index_pointer = np.array(tmp_mapping_grp['index_pointer'])
+ update_index = beg + index_pointer
+
+ beg, end = gid_ranges[i]
+ gids_ds[beg:end] = tmp_mapping_grp['gids']
+ index_pointer_ds[beg:(end+1)] = update_index
+
+
+ # combine the /var/data datasets
+ for var_name in self._variables:
+ data_name = '/data' if self._n_vars == 1 else '/{}/data'.format(var_name)
+ # data_name = '/{}/data'.format(var_name)
+ var_data = h5final.create_dataset(data_name, shape=(self._total_steps, total_seg_count), dtype=np.float)
+ var_data.attrs['variable_name'] = var_name
+ for i, h5_tmp in enumerate(tmp_h5_handles):
+ beg, end = seg_ranges[i]
+ var_data[:, beg:end] = h5_tmp[data_name]
+
+ for tmp_file in self._tmp_files:
+ os.remove(tmp_file)
+
+
+class CellVarRecorderParallel(CellVarRecorder):
+ """
+ Unlike the parent, this take advantage of parallel h5py to writting to the results file across different ranks.
+
+ """
+ def __init__(self, file_name, tmp_dir, variables, buffer_data=True):
+ super(CellVarRecorder, self).__init__(file_name, tmp_dir, variables, buffer_data=buffer_data, mpi_rank=0,
+ mpi_size=1)
+
+ def _calc_offset(self):
+ # iterate through the ranks let rank r determine the offset from rank r-1
+ for r in range(comm.Get_size()):
+ if rank == r:
+ if rank < (nhosts - 1):
+ # pass the num of segments and num of gids to the next rank
+ offsets = np.array([self._n_segments_local, self._n_gids_local], dtype=np.uint)
+ comm.Send([offsets, MPI.UNSIGNED_INT], dest=(rank+1))
+
+ if rank > 0:
+ # get num of segments and gids from prev. rank and calculate offsets
+ offset = np.empty(2, dtype=np.uint)
+ comm.Recv([offsets, MPI.UNSIGNED_INT], source=(r-1))
+ self._seg_offset_beg = offsets[0]
+ self._seg_offset_end = self._seg_offset_beg + self._n_segments_local
+
+ self._gids_beg = offset[1]
+ self._gids_end = self._gids_beg + self._n_gids_local
+
+ comm.Barrier()
+
+ # broadcast the total num of gids/segments from the final rank to all the others
+ if rank == (nhosts - 1):
+ total_counts = np.array([self._seg_offset_end, self._gids_end], dtype=np.uint)
+ else:
+ total_counts = np.empty(2, dtype=np.uint)
+
+ comm.Bcast(total_counts, root=(nhosts-1))
+ self._n_segments_all = total_counts[0]
+ self._n_gids_all = total_counts[1]
+
+ def _create_h5_file(self):
+ self._h5_handle = h5py.File(self._file_name, 'w', driver='mpio', comm=MPI.COMM_WORLD)
+ add_hdf5_version(self._h5_handle)
+ add_hdf5_magic(self._h5_handle)
+
+ def merge(self):
+ pass
diff --git a/bmtk-vb/bmtk/utils/io/spike_trains.py b/bmtk-vb/bmtk/utils/io/spike_trains.py
new file mode 100644
index 0000000..73d9bfd
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/io/spike_trains.py
@@ -0,0 +1,312 @@
+import os
+import sys
+import csv
+
+import h5py
+import pandas as pd
+import numpy as np
+from bmtk.utils.sonata.utils import add_hdf5_magic, add_hdf5_version
+
+
+class SpikeTrainWriter(object):
+ class TmpFileMetadata(object):
+ def __init__(self, file_name, sort_order=None):
+ self.file_name = file_name
+ self.sort_order = sort_order
+
+ def __init__(self, tmp_dir, mpi_rank=0, mpi_size=1):
+ # For NEST/NEURON based simulations it is prefereable not to use mpi4py, so let the parent simulator determine
+ # MPI rank and size
+ self._mpi_rank = mpi_rank
+ self._mpi_size = mpi_size
+
+ # used to temporary save spike files since for large simulations saving spikes into memory can crash the
+ # system. Requires the user to create the directory
+ self._tmp_dir = tmp_dir
+ if self._tmp_dir is None or not os.path.exists(self._tmp_dir):
+ raise Exception('Directory path {} does not exists'.format(self._tmp_dir))
+ self._all_tmp_files = [self.TmpFileMetadata(self._get_tmp_filename(r)) for r in range(mpi_size)]
+ # TODO: Determine best buffer size.
+ self._tmp_file_handle = open(self._all_tmp_files[mpi_rank].file_name, 'w')
+
+ self._tmp_spikes_handles = [] # used when sorting mulitple file
+ self._spike_count = -1
+
+ # Nest gid files uses tab seperators and a different order for tmp spike files.
+ self.delimiter = ' ' # delimiter for temporary file
+ self.time_col = 0
+ self.gid_col = 1
+
+ def _get_tmp_filename(self, rank):
+ return os.path.join(self._tmp_dir, '_bmtk_tmp_spikes_{}.csv'.format(rank))
+
+ def _count_spikes(self):
+ if self._mpi_rank == 0:
+ if self._spike_count > -1:
+ return self._spike_count
+
+ self._spike_count = 0
+ for tmp_file in self._all_tmp_files:
+ with open(tmp_file.file_name, 'r') as csvfile:
+ csv_reader = csv.reader(csvfile, delimiter=self.delimiter)
+ self._spike_count += sum(1 for _ in csv_reader)
+
+ def _sort_tmp_file(self, filedata, sort_order):
+ # For now load spikes into pandas, it's the fastest way but may be an issue with memory
+ if sort_order is None or filedata.sort_order == sort_order:
+ return
+
+ file_name = filedata.file_name
+ tmp_spikes_ds = pd.read_csv(file_name, sep=' ', names=['time', 'gid'])
+ tmp_spikes_ds = tmp_spikes_ds.sort_values(by=sort_order)
+ tmp_spikes_ds.to_csv(file_name, sep=' ', index=False, header=False)
+ filedata.sort_order = sort_order
+
+ def _next_spike(self, rank):
+ try:
+ val = next(self._tmp_spikes_handles[rank])
+ return val[0], val[1], rank
+ except StopIteration:
+ return None
+
+ def add_spike(self, time, gid):
+ self._tmp_file_handle.write('{:.6f} {}\n'.format(time, gid))
+
+ def add_spikes(self, times, gid):
+ for t in times:
+ self.add_spike(t, gid)
+
+ def add_spikes_file(self, file_name, sort_order=None):
+ self._all_tmp_files.append(self.TmpFileMetadata(file_name, sort_order))
+
+ def _sort_files(self, sort_order, sort_column, file_write_fnc):
+ self._tmp_spikes_handles = []
+ for fdata in self._all_tmp_files:
+ self._sort_tmp_file(fdata, sort_order)
+ self._tmp_spikes_handles.append(csv.reader(open(fdata.file_name, 'r'), delimiter=self.delimiter))
+
+ spikes = []
+ for rank in range(len(self._tmp_spikes_handles)): # range(self._mpi_size):
+ spike = self._next_spike(rank)
+ if spike is not None:
+ spikes.append(spike)
+
+ # Iterate through all the ranks and find the first spike. Write that spike/gid to the output, then
+ # replace that data point with the next spike on the selected rank
+ indx = 0
+ while spikes:
+ # find which rank has the first spike
+ selected_index = 0
+ selected_val = spikes[0][sort_column]
+ for i, spike in enumerate(spikes[1:]):
+ if spike[sort_column] < selected_val:
+ selected_index = i + 1
+ selected_val = spike[sort_column]
+
+ # write the spike to the file
+ row = spikes.pop(selected_index)
+ file_write_fnc(float(row[self.time_col]), int(row[self.gid_col]), indx)
+ indx += 1
+
+ # get the next spike on that rank and replace in spikes table
+ another_spike = self._next_spike(row[2])
+ if another_spike is not None:
+ spikes.append(another_spike)
+
+ def _merge_files(self, file_write_fnc):
+ indx = 0
+ for fdata in self._all_tmp_files:
+ if not os.path.exists(fdata.file_name):
+ continue
+
+ with open(fdata.file_name, 'r') as csv_file:
+ csv_reader = csv.reader(csv_file, delimiter=self.delimiter)
+ for row in csv_reader:
+ file_write_fnc(float(row[self.time_col]), int(row[self.gid_col]), indx)
+ indx += 1
+
+ def _to_file(self, file_name, sort_order, file_write_fnc):
+ if sort_order is None:
+ sort_column = 0
+ elif sort_order == 'time':
+ sort_column = self.time_col
+ elif sort_order == 'gid':
+ sort_column = self.gid_col
+ else:
+ raise Exception('Unknown sort order {}'.format(sort_order))
+
+ # TODO: Need to make sure an MPI_Barrier is called beforehand
+ self._tmp_file_handle.close()
+ if self._mpi_rank == 0:
+ if sort_order is not None:
+ self._sort_files(sort_order, sort_column, file_write_fnc)
+ else:
+ self._merge_files(file_write_fnc)
+
+ def to_csv(self, csv_file, sort_order=None, gid_map=None):
+ # TODO: Need to call flush and then barrier
+ if self._mpi_rank == 0:
+ # For the single rank case don't just copy the tmp-csv to the new name. It will fail if user calls to_hdf5
+ # or to_nwb after calling to_csv.
+ self._count_spikes()
+ csv_handle = open(csv_file, 'w')
+ csv_writer = csv.writer(csv_handle, delimiter=' ')
+
+ def file_write_fnc_identity(time, gid, indx):
+ csv_writer.writerow([time, gid])
+
+ def file_write_fnc_transform(time, gid, indx):
+ # For the case when NEURON/NEST ids don't match with the user's gid table
+ csv_writer.writerow([time, gid_map[gid]])
+
+ file_write_fnc = file_write_fnc_identity if gid_map is None else file_write_fnc_transform
+ self._to_file(csv_file, sort_order, file_write_fnc)
+ csv_handle.close()
+
+ # TODO: Let user pass in in barrier and use it here
+
+ def to_nwb(self, nwb_file):
+ raise NotImplementedError
+
+ def to_hdf5(self, hdf5_file, sort_order=None, gid_map=None):
+ if self._mpi_rank == 0:
+ with h5py.File(hdf5_file, 'w') as h5:
+ add_hdf5_magic(h5)
+ add_hdf5_version(h5)
+
+ self._count_spikes()
+ spikes_grp = h5.create_group('/spikes')
+ spikes_grp.attrs['sorting'] = 'none' if sort_order is None else sort_order
+ time_ds = spikes_grp.create_dataset('timestamps', shape=(self._spike_count,), dtype=np.float)
+ gid_ds = spikes_grp.create_dataset('gids', shape=(self._spike_count,), dtype=np.uint64)
+
+ def file_write_fnc_identity(time, gid, indx):
+ time_ds[indx] = time
+ gid_ds[indx] = gid
+
+ def file_write_fnc_transform(time, gid, indx):
+ time_ds[indx] = time
+ gid_ds[indx] = gid_map[gid]
+
+ file_write_fnc = file_write_fnc_identity if gid_map is None else file_write_fnc_transform
+ self._to_file(hdf5_file, sort_order, file_write_fnc)
+
+ # TODO: Need to make sure a barrier is used here (before close is called)
+
+ def flush(self):
+ self._tmp_file_handle.flush()
+
+ def close(self):
+ if self._mpi_rank == 0:
+ for tmp_file in self._all_tmp_files:
+ if os.path.exists(tmp_file.file_name):
+ os.remove(tmp_file.file_name)
+
+
+class PoissonSpikesGenerator(object):
+ def __init__(self, gids, firing_rate, tstart=0.0, tstop=1000.0):
+ self._gids = gids
+ self._firing_rate = firing_rate / 1000.0
+ self._tstart = tstart
+ self._tstop = tstop
+
+ def to_hdf5(self, file_name, sort_order='gid'):
+ if sort_order == 'gid':
+ gid_list = []
+ times_list = []
+ if sort_order == 'gid':
+ for gid in self._gids:
+ c_time = self._tstart
+ while c_time < self._tstop:
+ interval = -np.log(1.0 - np.random.uniform()) / self._firing_rate
+ c_time += interval
+ gid_list.append(gid)
+ times_list.append(c_time)
+
+ with h5py.File(file_name, 'w') as h5:
+ h5.create_dataset('/spikes/gids', data=gid_list, dtype=np.uint)
+ h5.create_dataset('/spikes/timestamps', data=times_list, dtype=np.float)
+ h5['/spikes'].attrs['sorting'] = 'by_gid'
+
+ else:
+ raise NotImplementedError
+
+
+class SpikesInput(object):
+ def get_spikes(self, gid):
+ raise NotImplementedError()
+
+ @staticmethod
+ def load(name, module, input_type, params):
+ module_lc = module.lower()
+ if module_lc == 'nwb':
+ return SpikesInputNWBv1(name, module, input_type, params)
+ elif module_lc == 'h5' or module_lc == 'hdf5':
+ return SpikesInputH5(name, module, input_type, params)
+ elif module_lc == 'csv':
+ return SpikesInputCSV(name, module, input_type, params)
+ else:
+ raise Exception('Unable to load spikes for module type {}'.format(module))
+
+
+class SpikesInputNWBv1(SpikesInput):
+ def __init__(self, name, module, input_type, params):
+ self.input_file = params['input_file']
+ self._h5_handle = h5py.File(self.input_file, 'r')
+
+ if 'trial' in params:
+ self.trial = params['trial']
+ self._spike_trains_handles = {}
+ for node_id, h5grp in self._h5_handle['processing'][self.trial]['spike_train'].items():
+ self._spike_trains_handles[int(node_id)] = h5grp['data']
+
+ elif '/spikes' in self._h5_handle:
+ raise Exception
+
+ def get_spikes(self, gid):
+ return np.array(self._spike_trains_handles[gid])
+
+
+class SpikesInputH5(SpikesInput):
+ def __init__(self, name, module, input_type, params):
+ self._input_file = params['input_file']
+ self._h5_handle = h5py.File(self._input_file, 'r')
+ self._sort_order = self._h5_handle['/spikes'].attrs.get('sorting', None)
+ if sys.version_info[0] >= 3 and isinstance(self._sort_order, bytes):
+ # h5py attributes return str in py 2, bytes in py 3
+ self._sort_order = self._sort_order.decode()
+
+ self._gid_ds = self._h5_handle['/spikes/gids']
+ self._timestamps_ds = self._h5_handle['/spikes/timestamps']
+
+ self._gid_indicies = {}
+ self._build_indicies()
+
+ def _build_indicies(self):
+ if self._sort_order == 'by_gid':
+ indx_beg = 0
+ c_gid = self._gid_ds[0]
+ for indx, gid in enumerate(self._gid_ds):
+ if gid != c_gid:
+ self._gid_indicies[c_gid] = slice(indx_beg, indx)
+ c_gid = gid
+ indx_beg = indx
+ self._gid_indicies[c_gid] = slice(indx_beg, indx+1)
+
+ else:
+ raise NotImplementedError
+
+ def get_spikes(self, gid):
+ if gid in self._gid_indicies:
+ return self._timestamps_ds[self._gid_indicies[gid]]
+ else:
+ return []
+
+
+class SpikesInputCSV(SpikesInput):
+ def __init__(self, name, module, input_type, params):
+ self._spikes_df = pd.read_csv(params['input_file'], index_col='gid', sep=' ')
+
+ def get_spikes(self, gid):
+ spike_times_str = self._spikes_df.iloc[gid]['spike-times']
+ return np.array(spike_times_str.split(','), dtype=float)
diff --git a/bmtk-vb/bmtk/utils/io/spike_trains.pyc b/bmtk-vb/bmtk/utils/io/spike_trains.pyc
new file mode 100644
index 0000000..7b8de68
Binary files /dev/null and b/bmtk-vb/bmtk/utils/io/spike_trains.pyc differ
diff --git a/bmtk-vb/bmtk/utils/io/tabular_network.py b/bmtk-vb/bmtk/utils/io/tabular_network.py
new file mode 100644
index 0000000..9b594bd
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/io/tabular_network.py
@@ -0,0 +1,350 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import h5py
+
+
+"""
+An interface for reading network files.
+
+We are continuing to develop network file format this interface is a way to provide backward compatibility. This
+namespace should not be instantiated directly, and updates to the network standard should be given their own. The
+class TabularNetwork, NodeRow, NodesFile, EdgeRow and EdgesFile are abstract and should be overridden.
+
+In general the developed formats have all take schema:
+ * Networks are split between nodes (NodesFile) and edges (EdgesFile)
+ * Each type is made up of rows (NodeRow, EdgeRow)
+ * Each row has its own set column properties (ColumnProperty), depending on the file/group it belongs too.
+ * Each row also has properties from (edge/node)-type metadata.
+"""
+
+
+##########################################
+# Interface files
+##########################################
+class TabularNetwork(object):
+ """Factory for loading nodes and edges files."""
+ @staticmethod
+ def load_nodes(nodes_file, node_types_file):
+ raise NotImplementedError()
+
+ @staticmethod
+ def load_edges(edges_file, edge_types_files):
+ raise NotImplementedError()
+
+
+class NodeRow(object):
+ """Node file row.
+
+ Each row represents node/cell/population in a network and can include edge-type metadata and dynamics_params when
+ applicable. The only mandatory for a NodeRow is a unique gid (i.e cell_id, node_id). Properties can be accessed
+ like a dictionary.
+ """
+ def __init__(self, gid, node_props, types_props):
+ self._gid = gid
+ self._node_props = node_props # properties from the csv/hdf5 file
+ self._node_type_props = types_props # properties from the edge_types metadata file
+
+ @property
+ def gid(self):
+ return self._gid
+
+ @property
+ def with_dynamics_params(self):
+ """Set to true if dynamics_params subgroup attached to HDF5 properities"""
+ raise NotImplementedError()
+
+ @property
+ def dynamics_params(self):
+ raise NotImplementedError()
+
+ @property
+ def columns(self):
+ return self._node_props.keys() + self._node_type_props.keys()
+
+ @property
+ def node_props(self):
+ return self._node_props
+
+ @property
+ def node_type_props(self):
+ return self._node_type_props
+
+ def get(self, prop_key, default=None):
+ # First see if property existing in node file, then check node-types
+ if prop_key in self._node_props:
+ return self._node_props[prop_key]
+ elif prop_key in self._node_type_props:
+ return self._node_type_props[prop_key]
+ else:
+ return default
+
+ def __contains__(self, prop_key):
+ return prop_key in self._node_props.keys() or prop_key in self._node_type_props.keys()
+
+ def __getitem__(self, prop_key):
+ val = self.get(prop_key)
+ if val is None:
+ raise Exception('Invalid property key {}.'.format(prop_key))
+ return val
+
+ def __repr__(self):
+ return build_row_repr(self)
+
+
+class EdgeRow(object):
+ """Representation of a edge.
+
+ Edges must include a source and target node gid. Other properties, from the edges or edge-types files, can be
+ directly accessed like a dictionary.
+ """
+ def __init__(self, trg_gid, src_gid, edge_props={}, edge_type_props={}):
+ self._trg_gid = trg_gid
+ self._src_gid = src_gid
+ self._edge_props = edge_props
+ self._edge_type_props = edge_type_props
+
+ @property
+ def target_gid(self):
+ return self._trg_gid
+
+ @property
+ def source_gid(self):
+ return self._src_gid
+
+ @property
+ def with_dynamics_params(self):
+ raise NotImplementedError()
+
+ @property
+ def dynamics_params(self):
+ raise NotImplementedError()
+
+ @property
+ def columns(self):
+ return self._edge_props.keys() + self._edge_type_props.keys()
+
+ @property
+ def edge_props(self):
+ return self._edge_props
+
+ def __contains__(self, prop_key):
+ return prop_key in self._edge_props.keys() or prop_key in self._edge_type_props.keys()
+
+ def __getitem__(self, prop_key):
+ if prop_key in self._edge_props:
+ return self._edge_props[prop_key]
+ elif prop_key in self._edge_type_props:
+ return self._edge_type_props[prop_key]
+ else:
+ raise Exception('Invalid property name {}.'.format(prop_key))
+
+ def __repr__(self):
+ return build_row_repr(self)
+
+
+class NodesFile(object):
+ """Class for reading and iterating properties of each node in a nodes/node-types file.
+
+ Use the load method to load in the necessary node files. Nodes can be accessed using an interator:
+ nodes = NodesFile()
+ nodes.load(nodes_file.h5, node_types.csv)
+ for node in nodes:
+ print node['prop']
+ ...
+ Or indivdually by gid:
+ node = nodes[101]
+ print node['prop']
+ """
+ def __init__(self):
+ self._network_name = None
+ self._version = None
+ self._iter_index = 0
+ self._nrows = 0
+ self._node_types_table = None
+
+ @property
+ def name(self):
+ """name of network containing these nodes"""
+ return self._network_name
+
+ @property
+ def version(self):
+ return self._version
+
+ @property
+ def gids(self):
+ raise NotImplementedError()
+
+ @property
+ def node_types_table(self):
+ return self._node_types_table
+
+ def load(self, nodes_file, node_types_file):
+ raise NotImplementedError()
+
+ def get_node(self, gid, cache=False):
+ raise NotImplementedError()
+
+ def __len__(self):
+ raise NotImplementedError()
+
+ def __iter__(self):
+ self._iter_index = 0
+ return self
+
+ def next(self):
+ raise NotImplementedError()
+
+ def __getitem__(self, gid):
+ return self.get_node(gid)
+
+
+class EdgesFile(object):
+ """Class for reading and iterating over edge files.
+
+ Use the load() method to instantiate from the file. Edges can be accessed for any given edge with a target-gid
+ using the edges_itr() method:
+ edges = EdgesFile()
+ edges.load(edge_file.h5, edge_types.csv)
+ for edge_prop in edges.edges_itr(101):
+ assert(edge_prop.target_gid == 101)
+ source_node = nodes[edge_prop.source_gid]
+ print edge_prop['prop_name']
+ """
+ @property
+ def source_network(self):
+ """Name of network containing the source gids"""
+ raise NotImplementedError()
+
+ @property
+ def target_network(self):
+ """Name of network containing the target gids"""
+ raise NotImplementedError()
+
+ def load(self, edges_file, edge_types_file):
+ raise NotImplementedError()
+
+ def edges_itr(self, target_gid):
+ raise NotImplementedError()
+
+ def __len__(self):
+ raise NotImplementedError()
+
+
+##########################################
+# Helper functions
+##########################################
+class ColumnProperty(object):
+ """Representation of a column name and metadata from a hdf5 dataset, csv column, etc.
+
+ """
+ def __init__(self, name, dtype, dimension, attrs={}):
+ self._name = name
+ self._dtype = dtype
+ self._dim = dimension
+ self._attrs = attrs
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def dtype(self):
+ return self._dtype
+
+ @property
+ def dimension(self):
+ return self._dim
+
+ @property
+ def attributes(self):
+ return self._attrs
+
+ @classmethod
+ def from_h5(cls, hf_obj, name=None):
+ if isinstance(hf_obj, h5py.Dataset):
+ ds_name = name if name is not None else hf_obj.name.split('/')[-1]
+ ds_dtype = hf_obj.dtype
+
+ # If the dataset shape is in the form "(N, M)" then the dimension is M. If the shape is just "(N)" then the
+ # dimension is just 1
+ dim = 1 if len(hf_obj.shape) < 2 else hf_obj.shape[1]
+ return cls(ds_name, ds_dtype, dim, attrs=hf_obj.attrs)
+
+ elif isinstance(hf_obj, h5py.Group):
+ columns = []
+ for name, ds in hf_obj.items():
+ if isinstance(ds, h5py.Dataset):
+ columns.append(ColumnProperty.from_h5(ds, name))
+ return columns
+
+ else:
+ raise Exception('Unable to convert hdf5 object {} to a property or list of properties.'.format(hf_obj))
+
+ @classmethod
+ def from_csv(cls, pd_obj, name=None):
+ if isinstance(pd_obj, pd.Series):
+ c_name = name if name is not None else pd_obj.name
+ c_dtype = pd_obj.dtype
+ return cls(c_name, c_dtype, 1)
+
+ elif isinstance(pd_obj, pd.DataFrame):
+ return [cls(name, pd_obj[name].dtype, 1) for name in pd_obj.columns]
+
+ else:
+ raise Exception('Unable to convert pandas object {} to a property or list of properties.'.format(pd_obj))
+
+ def __hash__(self):
+ return hash(self._name)
+
+ def __repr__(self):
+ return '{} ({})'.format(self.name, self.dtype)
+
+
+class TypesTable(dict):
+ def __init__(self, types_file, index_column, seperator=' ', comment='#'):
+ super(TypesTable, self).__init__()
+
+ types_df = pd.read_csv(types_file, sep=seperator, comment=comment)
+ self._columns = ColumnProperty.from_csv(types_df)
+ for _, row in types_df.iterrows():
+ # TODO: iterrows does not preserve dtype and should be replaced with itertuples
+ type_id = row[index_column]
+ row = {col.name: row[col.name] for col in self._columns}
+ self.update({type_id: row})
+
+ @property
+ def columns(self):
+ return self._columns
+
+
+def build_row_repr(row):
+ columns = row.columns
+ if columns > 0:
+ rstr = "{"
+ for c in columns:
+ rstr += "'{}': {}, ".format(c, row[c])
+ return rstr[:-2] + "}"
+ else:
+ return "{}"
diff --git a/bmtk-vb/bmtk/utils/io/tabular_network_v0.py b/bmtk-vb/bmtk/utils/io/tabular_network_v0.py
new file mode 100644
index 0000000..711c177
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/io/tabular_network_v0.py
@@ -0,0 +1,160 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import h5py
+
+import tabular_network as tn
+
+"""
+This is for the original bionet network format developed at the AI in 2016-2017. nodes, node_types, and edge_types
+use csv format, while edges use an hdf5 format.
+
+"""
+class TabularNetwork(tn.TabularNetwork):
+ @staticmethod
+ def load_nodes(nodes_file, node_types_file):
+ nf = NodesFile()
+ nf.load(nodes_file, node_types_file)
+ return nf
+
+ @staticmethod
+ def load_edges(edges_file, edge_types_file):
+ ef = EdgesFile()
+ ef.load(edges_file, edge_types_file)
+ return ef
+
+
+class NodeRow(tn.NodeRow):
+ def __init__(self, gid, node_props, types_props, columns):
+ super(NodeRow, self).__init__(gid, node_props, types_props)
+ self._columns = columns
+
+ @property
+ def with_dynamics_params(self):
+ return False
+
+ @property
+ def dynamics_params(self):
+ return None
+
+
+class NodesFile(tn.NodesFile):
+ def __init__(self):
+ super(NodesFile, self).__init__()
+ self._network_name = 'NA'
+ self._version = 'v0.0'
+
+ self._nodes_df = None
+ self._nodes_columns = None
+ self._columns = None
+
+ @property
+ def gids(self):
+ return list(self._nodes_df.index)
+
+ def load(self, nodes_file, node_types_file):
+ self._nodes_df = pd.read_csv(nodes_file, sep=' ', index_col=['node_id'])
+ self._node_types_table = tn.TypesTable(node_types_file, 'node_type_id')
+
+ self._nrows = len(self._nodes_df.index)
+ self._nodes_columns = tn.ColumnProperty.from_csv(self._nodes_df)
+ self._columns = self._nodes_columns + self._node_types_table.columns
+
+ def get_node(self, gid, cache=False):
+ nodes_data = self._nodes_df.loc[gid]
+ node_type_data = self._node_types_table[nodes_data['node_type_id']]
+ return NodeRow(gid, nodes_data, node_type_data, self._columns)
+
+ def __len__(self):
+ return self._nrows
+
+ def next(self):
+ if self._iter_index >= len(self):
+ raise StopIteration
+ else:
+ gid = self._nodes_df.index.get_loc(self._iter_index)
+ self._iter_index += 1
+ return self.get_node(gid)
+
+
+class EdgeRow(tn.EdgeRow):
+ def __init__(self, trg_gid, src_gid, nsyns, edge_type_props):
+ super(EdgeRow, self).__init__(trg_gid, src_gid, edge_type_props=edge_type_props)
+ self._edge_props['nsyns'] = nsyns
+
+ @property
+ def with_dynamics_params(self):
+ return False
+
+ @property
+ def dynamics_params(self):
+ return None
+
+
+class EdgesFile(tn.EdgesFile):
+ def __init__(self):
+ self._nrows = 0
+ self._index_len = 0
+
+ self._edge_ptr_ds = None
+ self._num_syns_ds = None
+ self._src_gids_ds = None
+ self._edge_types_ds = None
+ self._edge_types_table = {}
+
+ @property
+ def source_network(self):
+ return None
+
+ @property
+ def target_network(self):
+ return None
+
+ def load(self, edges_file, edge_types_file):
+ edges_hf = h5py.File(edges_file, 'r')
+ self._edge_ptr_ds = edges_hf['edge_ptr']
+ self._num_syns_ds = edges_hf['num_syns']
+ self._src_gids_ds = edges_hf['src_gids']
+
+ # TODO: validate edge_types dataset keys
+ self._edge_types_ds = edges_hf['edge_types']
+ self._edge_types_table = tn.TypesTable(edge_types_file, 'edge_type_id')
+ self._index_len = len(self._edge_ptr_ds)
+ self._nrows = len(self._src_gids_ds)
+
+ def edges_itr(self, target_gid):
+ assert(isinstance(target_gid, int))
+ if target_gid+1 >= self._index_len:
+ raise StopIteration()
+
+ index_begin = self._edge_ptr_ds[target_gid]
+ index_end = self._edge_ptr_ds[target_gid+1]
+ for iloc in xrange(index_begin, index_end):
+ source_gid = self._src_gids_ds[iloc]
+ edge_type_id = self._edge_types_ds[iloc]
+ edge_type = self._edge_types_table[edge_type_id]
+ nsyns = self._num_syns_ds[iloc]
+ yield EdgeRow(target_gid, source_gid, nsyns, edge_type)
+
+ def __len__(self):
+ return self._nrows
diff --git a/bmtk-vb/bmtk/utils/io/tabular_network_v1.py b/bmtk-vb/bmtk/utils/io/tabular_network_v1.py
new file mode 100644
index 0000000..3506b81
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/io/tabular_network_v1.py
@@ -0,0 +1,256 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import h5py
+
+import tabular_network as tn
+
+
+"""
+For the initial draft of the network format developed jointly by AI and collaborators in Q2 of 2017.
+
+Edges and nodes files are stored in hdf5, while the edge-types and node-types are stored in csv. In the hd5f files
+optional properties are stored in groups assigned to each node/edge. Optionally each property group may include
+dynamics_params subgroup to describe the model of each node/row, or dynamics_params may be referenced in the types
+metadata file.
+
+"""
+
+class TabularNetwork(tn.TabularNetwork):
+ @staticmethod
+ def load_nodes(nodes_file, node_types_file):
+ nf = NodesFile()
+ nf.load(nodes_file, node_types_file)
+ return nf
+
+ @staticmethod
+ def load_edges(edges_file, edge_types_file):
+ ef = EdgesFile()
+ ef.load(edges_file, edge_types_file)
+ return ef
+
+
+class NodeRow(tn.NodeRow):
+ def __init__(self, gid, group, group_props, types_props):
+ super(NodeRow, self).__init__(gid, group_props, types_props)
+ # TODO: use group to determine if dynamics_params are included.
+
+ @property
+ def with_dynamics_params(self):
+ return False
+
+ @property
+ def dynamics_params(self):
+ return None
+
+
+class NodesFile(tn.NodesFile):
+ def __init__(self):
+ super(NodesFile, self).__init__()
+
+ self._nodes_hf = None
+ self._nodes_index = pd.DataFrame()
+ self._group_table = {}
+ self._nrows = 0
+
+ @property
+ def gids(self):
+ return list(self._nodes_index.index)
+
+ def load(self, nodes_file, node_types_file):
+ nodes_hf = h5py.File(nodes_file, 'r')
+ if 'nodes' not in nodes_hf.keys():
+ raise Exception('Could not find nodes in {}'.format(nodes_file))
+ nodes_group = nodes_hf['nodes']
+
+ self._network_name = nodes_group.attrs['network'] if 'network' in nodes_group.attrs.keys() else 'NA'
+ self._version = 'v0.1' # TODO: get the version number from the attributes
+
+ # Create Indices
+ self._nodes_index['node_gid'] = pd.Series(nodes_group['node_gid'], dtype=nodes_group['node_gid'].dtype)
+ self._nodes_index['node_type_id'] = pd.Series(nodes_group['node_type_id'],
+ dtype=nodes_group['node_type_id'].dtype)
+ self._nodes_index['node_group'] = pd.Series(nodes_group['node_group'],
+ dtype=nodes_group['node_group'].dtype)
+ self._nodes_index['node_group_index'] = pd.Series(nodes_group['node_group_index'],
+ dtype=nodes_group['node_group_index'].dtype)
+ self._nodes_index.set_index(['node_gid'], inplace=True)
+ self._nrows = len(self._nodes_index)
+
+ # Save the node-types
+ self._node_types_table = tn.TypesTable(node_types_file, 'node_type_id')
+
+ # save pointers to the groups table
+ self._group_table = {grp_id: Group(grp_id, grp_ptr, self._node_types_table)
+ for grp_id, grp_ptr in nodes_group.items() if isinstance(grp_ptr, h5py.Group)}
+
+ def get_node(self, gid, cache=False):
+ node_metadata = self._nodes_index.loc[gid]
+ ng = node_metadata['node_group']
+ ng_idx = node_metadata['node_group_index']
+
+ group_props = self._group_table[str(ng)][ng_idx]
+ types_props = self._node_types_table[node_metadata['node_type_id']]
+
+ return NodeRow(gid, self._group_table[str(ng)], group_props, types_props)
+
+ def __len__(self):
+ return self._nrows
+
+ def next(self):
+ if self._iter_index >= len(self):
+ raise StopIteration
+ else:
+ gid = self._nodes_index.index.get_loc(self._iter_index)
+ self._iter_index += 1
+ return self.get_node(gid)
+
+
+class EdgeRow(tn.EdgeRow):
+ def __init__(self, trg_gid, src_gid, syn_group, edge_props={}, edge_type_props={}):
+ super(EdgeRow, self).__init__(trg_gid, src_gid, edge_props, edge_type_props)
+ # TODO: Look in syn_group to see if dynamics_params are included
+
+ @property
+ def with_dynamics_params(self):
+ return False
+
+ @property
+ def dynamics_params(self):
+ return None
+
+
+class EdgesFile(tn.EdgesFile):
+ def __init__(self):
+ super(EdgesFile, self).__init__()
+ self._nedges = 0
+ self._source_network = None
+ self._target_network = None
+
+ # We'll save the target-index dataset into memory
+ self._target_index = None
+ self._target_index_len = 0
+
+ # to save memory just keep pointers to datasets and access them as needed.
+ self._target_gid_ds = None
+ self._source_gid_ds = None
+ self._edge_type_ds = None
+ self._edge_group_ds = None
+ self._edge_group_index_ds = None
+ self._edge_types_table = None
+
+ self._group_table = {} # A table for all subgroups
+
+ @property
+ def source_network(self):
+ return self._source_network
+
+ @property
+ def target_network(self):
+ return self._target_network
+
+ def load(self, edges_file, edge_types_file):
+ edges_hf = h5py.File(edges_file, 'r')
+ if 'edges' not in edges_hf.keys():
+ raise Exception('Could not find edges in {}'.format(edges_file))
+ edges_group = edges_hf['edges']
+
+ # Preload the target index pointers into memory
+ self._target_index = pd.Series(edges_group['index_pointer'], dtype=edges_group['index_pointer'].dtype)
+ self._target_index_len = len(self._target_index)
+
+ # For the other index tables we only load in a file pointer
+ self._target_gid_ds = edges_group['target_gid']
+ if 'network' in self._target_gid_ds.attrs.keys():
+ self._target_network = self._target_gid_ds.attrs['network']
+
+ self._source_gid_ds = edges_group['source_gid']
+ if 'network' in self._source_gid_ds.attrs.keys():
+ self._source_network = self._source_gid_ds.attrs['network']
+
+ self._edge_type_ds = edges_group['edge_type_id']
+ self._edge_group_ds = edges_group['edge_group']
+ self._edge_group_index_ds = edges_group['edge_group_index']
+
+ self._nedges = len(self._edge_group_index_ds)
+
+ # Load in edge-types table
+ self._edge_types_table = tn.TypesTable(edge_types_file, 'edge_type_id')
+
+ # Load in the group properties
+ # TODO: look in attributes for group synonyms
+ # TODO: HDF5 group name will always be a string, but value in groups dataset will be an int.
+ self._group_table = {grp_id: Group(grp_id, grp_ptr, self._edge_types_table)
+ for grp_id, grp_ptr in edges_group.items() if isinstance(grp_ptr, h5py.Group)}
+
+ def edges_itr(self, target_gid):
+ assert(isinstance(target_gid, int))
+ if target_gid+1 >= self._target_index_len:
+ raise StopIteration()
+
+ index_begin = self._target_index.iloc[target_gid]
+ index_end = self._target_index.iloc[target_gid+1]
+ for iloc in xrange(index_begin, index_end):
+ yield self[iloc]
+
+ def __len__(self):
+ return self._nedges
+
+ def __getitem__(self, iloc):
+ trg_gid = self._target_gid_ds[iloc]
+ src_gid = self._source_gid_ds[iloc]
+
+ et_id = self._edge_type_ds[iloc]
+ et_props = self._edge_types_table[et_id]
+
+ syn_group = self._edge_group_ds[iloc]
+ syn_index = self._edge_group_index_ds[iloc]
+ group_props = self._group_table[str(syn_group)][syn_index]
+
+ return EdgeRow(trg_gid, src_gid, syn_group, group_props, et_props)
+
+
+class Group(object):
+ def __init__(self, group_id, h5_group, types_table):
+ self._types_table = types_table
+ self._group_id = group_id
+
+ self._group_columns = tn.ColumnProperty.from_h5(h5_group)
+ self._group_table = [(prop, h5_group[prop.name]) for prop in self._group_columns]
+
+ self._all_columns = self._group_columns + types_table.columns
+
+ # TODO: check to see if dynamics_params exists
+
+ @property
+ def columns(self):
+ return self._all_columns
+
+ def __getitem__(self, indx):
+ group_props = {}
+ for cprop, h5_obj in self._group_table:
+ group_props[cprop.name] = h5_obj[indx]
+ return group_props
+
+ def __repr__(self):
+ return "Group('group id': {}, 'properties':{})".format(self._group_id, self._all_columns)
diff --git a/bmtk-vb/bmtk/utils/property_schema.py b/bmtk-vb/bmtk/utils/property_schema.py
new file mode 100644
index 0000000..54f2005
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/property_schema.py
@@ -0,0 +1,35 @@
+# Allen Institute Software License - This software license is the 2-clause BSD license plus clause a third
+# clause that prohibits redistribution for commercial purposes without further permission.
+#
+# Copyright 2017. Allen Institute. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Redistributions for commercial purposes are not permitted without the Allen Institute's written permission. For
+# purposes of this license, commercial purposes is the incorporation of the Allen Institute's software into anything for
+# which you will charge fees or other compensation. Contact terms@alleninstitute.org for commercial licensing
+# opportunities.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+# TODO: go through the individual simulator's property_schemas and pull out the common functionality. Ideally all
+# simulators should share ~80% of the same schema, with some differences in how certain columns are determined.
+# TODO: Add access to builder so when a network is built with a given property schema
+# TODO: have utils.io.tabular_network use these schemas to discover name of node-id, node-type-id, etc for different
+# standards.
+class PropertySchema:
+ pass
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/reports/__init__.pyc b/bmtk-vb/bmtk/utils/reports/__init__.pyc
new file mode 100644
index 0000000..c99e3f3
Binary files /dev/null and b/bmtk-vb/bmtk/utils/reports/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/utils/reports/spike_trains/__init__.pyc b/bmtk-vb/bmtk/utils/reports/spike_trains/__init__.pyc
new file mode 100644
index 0000000..7a938f1
Binary files /dev/null and b/bmtk-vb/bmtk/utils/reports/spike_trains/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/utils/reports/spike_trains/adaptors/__init__.pyc b/bmtk-vb/bmtk/utils/reports/spike_trains/adaptors/__init__.pyc
new file mode 100644
index 0000000..279d5fc
Binary files /dev/null and b/bmtk-vb/bmtk/utils/reports/spike_trains/adaptors/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/utils/reports/spike_trains/adaptors/csv_adaptors.pyc b/bmtk-vb/bmtk/utils/reports/spike_trains/adaptors/csv_adaptors.pyc
new file mode 100644
index 0000000..6a56f72
Binary files /dev/null and b/bmtk-vb/bmtk/utils/reports/spike_trains/adaptors/csv_adaptors.pyc differ
diff --git a/bmtk-vb/bmtk/utils/reports/spike_trains/adaptors/nwb_adaptors.pyc b/bmtk-vb/bmtk/utils/reports/spike_trains/adaptors/nwb_adaptors.pyc
new file mode 100644
index 0000000..9f0cb24
Binary files /dev/null and b/bmtk-vb/bmtk/utils/reports/spike_trains/adaptors/nwb_adaptors.pyc differ
diff --git a/bmtk-vb/bmtk/utils/reports/spike_trains/adaptors/sonata_adaptors.pyc b/bmtk-vb/bmtk/utils/reports/spike_trains/adaptors/sonata_adaptors.pyc
new file mode 100644
index 0000000..3c79b81
Binary files /dev/null and b/bmtk-vb/bmtk/utils/reports/spike_trains/adaptors/sonata_adaptors.pyc differ
diff --git a/bmtk-vb/bmtk/utils/reports/spike_trains/core.pyc b/bmtk-vb/bmtk/utils/reports/spike_trains/core.pyc
new file mode 100644
index 0000000..a39788c
Binary files /dev/null and b/bmtk-vb/bmtk/utils/reports/spike_trains/core.pyc differ
diff --git a/bmtk-vb/bmtk/utils/reports/spike_trains/plotting.pyc b/bmtk-vb/bmtk/utils/reports/spike_trains/plotting.pyc
new file mode 100644
index 0000000..518e23a
Binary files /dev/null and b/bmtk-vb/bmtk/utils/reports/spike_trains/plotting.pyc differ
diff --git a/bmtk-vb/bmtk/utils/reports/spike_trains/spike_train_buffer.pyc b/bmtk-vb/bmtk/utils/reports/spike_trains/spike_train_buffer.pyc
new file mode 100644
index 0000000..07a5592
Binary files /dev/null and b/bmtk-vb/bmtk/utils/reports/spike_trains/spike_train_buffer.pyc differ
diff --git a/bmtk-vb/bmtk/utils/reports/spike_trains/spike_trains.pyc b/bmtk-vb/bmtk/utils/reports/spike_trains/spike_trains.pyc
new file mode 100644
index 0000000..3a4d638
Binary files /dev/null and b/bmtk-vb/bmtk/utils/reports/spike_trains/spike_trains.pyc differ
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/default_config.json b/bmtk-vb/bmtk/utils/scripts/bionet/default_config.json
new file mode 100644
index 0000000..572f497
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/default_config.json
@@ -0,0 +1,47 @@
+{
+ "manifest": {
+ "$OUTPUT_DIR": "output"
+ },
+
+ "target_simulator":"BioNet",
+
+ "run": {
+ "tstop": 0.0,
+ "dt": 0.1,
+ "dL": 20,
+ "overwrite_output_dir": true,
+ "spike_threshold": -15,
+ "nsteps_block": 5000
+ },
+
+ "conditions": {
+ "celsius": 34.0,
+ "v_init": -80
+ },
+
+ "inputs": {},
+
+ "reports": {},
+
+ "output": {
+ "log_file": "${OUTPUT_DIR}/log.txt",
+ "output_dir": "${OUTPUT_DIR}",
+ "spikes_file": "${OUTPUT_DIR}/spikes.h5",
+ "spikes_file_csv": "${OUTPUT_DIR}/spikes.csv"
+ },
+
+ "components": {
+ "morphologies_dir": "${COMPONENTS_DIR}/biophysical/morphology",
+ "synaptic_models_dir": "${COMPONENTS_DIR}/synaptic_models",
+ "mechanisms_dir":"${COMPONENTS_DIR}/mechanisms",
+ "biophysical_neuron_models_dir": "${COMPONENTS_DIR}/biophysical/electrophysiology",
+ "point_neuron_models_dir": "${COMPONENTS_DIR}/intfire",
+ "templates_dir": "${COMPONENTS_DIR}/hoc_templates"
+ },
+
+ "networks": {
+ "nodes": [],
+
+ "edges": []
+ }
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/hoc_templates/BioAllen_old.hoc b/bmtk-vb/bmtk/utils/scripts/bionet/hoc_templates/BioAllen_old.hoc
new file mode 100644
index 0000000..fde930d
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/hoc_templates/BioAllen_old.hoc
@@ -0,0 +1,21 @@
+begintemplate BioAllenOld
+
+public init
+public soma, dend, apic, axon
+public all, somatic, basal, apical, axonal
+objref all, somatic, basal, apical, axonal
+
+objref this
+
+proc init() {localobj nl, import
+ all = new SectionList()
+ somatic = new SectionList()
+ basal = new SectionList()
+ apical = new SectionList()
+ axonal = new SectionList()
+ forall delete_section()
+}
+
+create soma[1], dend[1], apic[1], axon[1]
+
+endtemplate BioAllenOld
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/hoc_templates/BioAxonStub.hoc b/bmtk-vb/bmtk/utils/scripts/bionet/hoc_templates/BioAxonStub.hoc
new file mode 100644
index 0000000..df8660d
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/hoc_templates/BioAxonStub.hoc
@@ -0,0 +1,61 @@
+begintemplate BioAxonStub
+
+public init
+public soma, dend, apic, axon
+public all, somatic, basal, apical, axonal
+
+objref all, somatic, basal, apical, axonal
+objref this
+
+create soma[1]
+create dend[1]
+create apic[1]
+create axon[1]
+
+
+proc init() {localobj nl, import
+ all = new SectionList()
+ somatic = new SectionList()
+ basal = new SectionList()
+ apical = new SectionList()
+ axonal = new SectionList()
+ forall delete_section()
+
+// nl = new Import3d_Neurolucida3()
+ nl = new Import3d_SWC_read()
+ nl.quiet = 1
+ nl.input($s1)
+ import = new Import3d_GUI(nl, 0)
+// import.quite = 1
+ import.instantiate(this)
+
+ simplify_axon()
+}
+
+proc simplify_axon() {
+
+ forsec axonal { delete_section() }
+ create axon[2]
+
+ axon[0] {
+ L = 30
+ diam = 1
+ nseg = 1+2*int(L/40)
+ all.append()
+ axonal.append()
+ }
+ axon[1] {
+ L = 30
+ diam = 1
+ nseg = 1+2*int(L/40)
+ all.append()
+ axonal.append()
+ }
+ connect axon(0), soma(0.5)
+ connect axon[1](0), axon[0](1)
+ define_shape()
+
+
+}
+
+endtemplate BioAxonStub
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/hoc_templates/Biophys1.hoc b/bmtk-vb/bmtk/utils/scripts/bionet/hoc_templates/Biophys1.hoc
new file mode 100644
index 0000000..bac9b0f
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/hoc_templates/Biophys1.hoc
@@ -0,0 +1,34 @@
+begintemplate Biophys1
+
+public init
+public soma, dend, apic, axon
+public all, somatic, basal, apical, axonal
+
+objref all, somatic, basal, apical, axonal
+objref this
+
+create soma[1]
+create dend[1]
+create apic[1]
+create axon[1]
+
+
+proc init() {localobj nl, import
+ all = new SectionList()
+ somatic = new SectionList()
+ basal = new SectionList()
+ apical = new SectionList()
+ axonal = new SectionList()
+ forall delete_section()
+
+// nl = new Import3d_Neurolucida3()
+ nl = new Import3d_SWC_read()
+ nl.quiet = 1
+ nl.input($s1)
+ import = new Import3d_GUI(nl, 0)
+// import.quite = 1
+ import.instantiate(this)
+
+}
+
+endtemplate Biophys1
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/intfire/IntFire1_exc_1.json b/bmtk-vb/bmtk/utils/scripts/bionet/intfire/IntFire1_exc_1.json
new file mode 100644
index 0000000..6a58d3b
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/intfire/IntFire1_exc_1.json
@@ -0,0 +1,5 @@
+{
+ "tau": 0.024,
+ "type": "NEURON_IntFire1",
+ "refrac": 0.003
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/intfire/IntFire1_inh_1.json b/bmtk-vb/bmtk/utils/scripts/bionet/intfire/IntFire1_inh_1.json
new file mode 100644
index 0000000..0da2f1f
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/intfire/IntFire1_inh_1.json
@@ -0,0 +1,5 @@
+{
+ "tau": 0.007,
+ "type": "NEURON_IntFire1",
+ "refrac": 0.003
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/CaDynamics.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/CaDynamics.mod
new file mode 100644
index 0000000..12af065
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/CaDynamics.mod
@@ -0,0 +1,40 @@
+: Dynamics that track inside calcium concentration
+: modified from Destexhe et al. 1994
+
+NEURON {
+ SUFFIX CaDynamics
+ USEION ca READ ica WRITE cai
+ RANGE decay, gamma, minCai, depth
+}
+
+UNITS {
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+ FARADAY = (faraday) (coulombs)
+ (molar) = (1/liter)
+ (mM) = (millimolar)
+ (um) = (micron)
+}
+
+PARAMETER {
+ gamma = 0.05 : percent of free calcium (not buffered)
+ decay = 80 (ms) : rate of removal of calcium
+ depth = 0.1 (um) : depth of shell
+ minCai = 1e-4 (mM)
+}
+
+ASSIGNED {ica (mA/cm2)}
+
+INITIAL {
+ cai = minCai
+}
+
+STATE {
+ cai (mM)
+}
+
+BREAKPOINT { SOLVE states METHOD cnexp }
+
+DERIVATIVE states {
+ cai' = -(10000)*(ica*gamma/(2*FARADAY*depth)) - (cai - minCai)/decay
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Ca_HVA.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Ca_HVA.mod
new file mode 100644
index 0000000..84db2d3
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Ca_HVA.mod
@@ -0,0 +1,82 @@
+: Reference: Reuveni, Friedman, Amitai, and Gutnick, J.Neurosci. 1993
+
+NEURON {
+ SUFFIX Ca_HVA
+ USEION ca READ eca WRITE ica
+ RANGE gbar, g, ica
+}
+
+UNITS {
+ (S) = (siemens)
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+}
+
+PARAMETER {
+ gbar = 0.00001 (S/cm2)
+}
+
+ASSIGNED {
+ v (mV)
+ eca (mV)
+ ica (mA/cm2)
+ g (S/cm2)
+ mInf
+ mTau
+ mAlpha
+ mBeta
+ hInf
+ hTau
+ hAlpha
+ hBeta
+}
+
+STATE {
+ m
+ h
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ g = gbar*m*m*h
+ ica = g*(v-eca)
+}
+
+DERIVATIVE states {
+ rates()
+ m' = (mInf-m)/mTau
+ h' = (hInf-h)/hTau
+}
+
+INITIAL{
+ rates()
+ m = mInf
+ h = hInf
+}
+
+PROCEDURE rates(){
+ UNITSOFF
+ : if((v == -27) ){
+ : v = v+0.0001
+ : }
+ :mAlpha = (0.055*(-27-v))/(exp((-27-v)/3.8) - 1)
+ mAlpha = 0.055 * vtrap(-27 - v, 3.8)
+ mBeta = (0.94*exp((-75-v)/17))
+ mInf = mAlpha/(mAlpha + mBeta)
+ mTau = 1/(mAlpha + mBeta)
+ hAlpha = (0.000457*exp((-13-v)/50))
+ hBeta = (0.0065/(exp((-v-15)/28)+1))
+ hInf = hAlpha/(hAlpha + hBeta)
+ hTau = 1/(hAlpha + hBeta)
+ UNITSON
+}
+
+FUNCTION vtrap(x, y) { : Traps for 0 in denominator of rate equations
+ UNITSOFF
+ if (fabs(x / y) < 1e-6) {
+ vtrap = y * (1 - x / y / 2)
+ } else {
+ vtrap = x / (exp(x / y) - 1)
+ }
+ UNITSON
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Ca_LVA.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Ca_LVA.mod
new file mode 100644
index 0000000..ab151d0
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Ca_LVA.mod
@@ -0,0 +1,69 @@
+: Comment: LVA ca channel. Note: mtau is an approximation from the plots
+: Reference: Avery and Johnston 1996, tau from Randall 1997
+: Comment: shifted by -10 mv to correct for junction potential
+: Comment: corrected rates using q10 = 2.3, target temperature 34, orginal 21
+
+NEURON {
+ SUFFIX Ca_LVA
+ USEION ca READ eca WRITE ica
+ RANGE gbar, g, ica
+}
+
+UNITS {
+ (S) = (siemens)
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+}
+
+PARAMETER {
+ gbar = 0.00001 (S/cm2)
+}
+
+ASSIGNED {
+ v (mV)
+ eca (mV)
+ ica (mA/cm2)
+ g (S/cm2)
+ celsius (degC)
+ mInf
+ mTau
+ hInf
+ hTau
+}
+
+STATE {
+ m
+ h
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ g = gbar*m*m*h
+ ica = g*(v-eca)
+}
+
+DERIVATIVE states {
+ rates()
+ m' = (mInf-m)/mTau
+ h' = (hInf-h)/hTau
+}
+
+INITIAL{
+ rates()
+ m = mInf
+ h = hInf
+}
+
+PROCEDURE rates(){
+ LOCAL qt
+ qt = 2.3^((celsius-21)/10)
+
+ UNITSOFF
+ v = v + 10
+ mInf = 1.0000/(1+ exp((v - -30.000)/-6))
+ mTau = (5.0000 + 20.0000/(1+exp((v - -25.000)/5)))/qt
+ hInf = 1.0000/(1+ exp((v - -80.000)/6.4))
+ hTau = (20.0000 + 50.0000/(1+exp((v - -40.000)/7)))/qt
+ v = v - 10
+ UNITSON
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Ih.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Ih.mod
new file mode 100644
index 0000000..73b97d8
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Ih.mod
@@ -0,0 +1,71 @@
+: Reference: Kole,Hallermann,and Stuart, J. Neurosci. 2006
+
+NEURON {
+ SUFFIX Ih
+ NONSPECIFIC_CURRENT ihcn
+ RANGE gbar, g, ihcn
+}
+
+UNITS {
+ (S) = (siemens)
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+}
+
+PARAMETER {
+ gbar = 0.00001 (S/cm2)
+ ehcn = -45.0 (mV)
+}
+
+ASSIGNED {
+ v (mV)
+ ihcn (mA/cm2)
+ g (S/cm2)
+ mInf
+ mTau
+ mAlpha
+ mBeta
+}
+
+STATE {
+ m
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ g = gbar*m
+ ihcn = g*(v-ehcn)
+}
+
+DERIVATIVE states {
+ rates()
+ m' = (mInf-m)/mTau
+}
+
+INITIAL{
+ rates()
+ m = mInf
+}
+
+PROCEDURE rates(){
+ UNITSOFF
+ : if(v == -154.9){
+ : v = v + 0.0001
+ : }
+ :mAlpha = 0.001*6.43*(v+154.9)/(exp((v+154.9)/11.9)-1)
+ mAlpha = 0.001 * 6.43 * vtrap(v + 154.9, 11.9)
+ mBeta = 0.001*193*exp(v/33.1)
+ mInf = mAlpha/(mAlpha + mBeta)
+ mTau = 1/(mAlpha + mBeta)
+ UNITSON
+}
+
+FUNCTION vtrap(x, y) { : Traps for 0 in denominator of rate equations
+ UNITSOFF
+ if (fabs(x / y) < 1e-6) {
+ vtrap = y * (1 - x / y / 2)
+ } else {
+ vtrap = x / (exp(x / y) - 1)
+ }
+ UNITSON
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Im.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Im.mod
new file mode 100644
index 0000000..d6112d5
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Im.mod
@@ -0,0 +1,62 @@
+: Reference: Adams et al. 1982 - M-currents and other potassium currents in bullfrog sympathetic neurones
+: Comment: corrected rates using q10 = 2.3, target temperature 34, orginal 21
+
+NEURON {
+ SUFFIX Im
+ USEION k READ ek WRITE ik
+ RANGE gbar, g, ik
+}
+
+UNITS {
+ (S) = (siemens)
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+}
+
+PARAMETER {
+ gbar = 0.00001 (S/cm2)
+}
+
+ASSIGNED {
+ v (mV)
+ ek (mV)
+ ik (mA/cm2)
+ g (S/cm2)
+ celsius (degC)
+ mInf
+ mTau
+ mAlpha
+ mBeta
+}
+
+STATE {
+ m
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ g = gbar*m
+ ik = g*(v-ek)
+}
+
+DERIVATIVE states {
+ rates()
+ m' = (mInf-m)/mTau
+}
+
+INITIAL{
+ rates()
+ m = mInf
+}
+
+PROCEDURE rates(){
+ LOCAL qt
+ qt = 2.3^((celsius-21)/10)
+
+ UNITSOFF
+ mAlpha = 3.3e-3*exp(2.5*0.04*(v - -35))
+ mBeta = 3.3e-3*exp(-2.5*0.04*(v - -35))
+ mInf = mAlpha/(mAlpha + mBeta)
+ mTau = (1/(mAlpha + mBeta))/qt
+ UNITSON
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Im_v2.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Im_v2.mod
new file mode 100644
index 0000000..fc219f7
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Im_v2.mod
@@ -0,0 +1,59 @@
+: Based on Im model of Vervaeke et al. (2006)
+
+NEURON {
+ SUFFIX Im_v2
+ USEION k READ ek WRITE ik
+ RANGE gbar, g, ik
+}
+
+UNITS {
+ (S) = (siemens)
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+}
+
+PARAMETER {
+ gbar = 0.00001 (S/cm2)
+}
+
+ASSIGNED {
+ v (mV)
+ ek (mV)
+ ik (mA/cm2)
+ g (S/cm2)
+ celsius (degC)
+ mInf
+ mTau
+ mAlpha
+ mBeta
+}
+
+STATE {
+ m
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ g = gbar * m
+ ik = g * (v - ek)
+}
+
+DERIVATIVE states {
+ rates()
+ m' = (mInf - m) / mTau
+}
+
+INITIAL{
+ rates()
+ m = mInf
+}
+
+PROCEDURE rates() {
+ LOCAL qt
+ qt = 2.3^((celsius-30)/10)
+ mAlpha = 0.007 * exp( (6 * 0.4 * (v - (-48))) / 26.12 )
+ mBeta = 0.007 * exp( (-6 * (1 - 0.4) * (v - (-48))) / 26.12 )
+
+ mInf = mAlpha / (mAlpha + mBeta)
+ mTau = (15 + 1 / (mAlpha + mBeta)) / qt
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/K_P.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/K_P.mod
new file mode 100644
index 0000000..0a1238f
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/K_P.mod
@@ -0,0 +1,71 @@
+: Comment: The persistent component of the K current
+: Reference: Voltage-gated K+ channels in layer 5 neocortical pyramidal neurones from young rats:subtypes and gradients,Korngreen and Sakmann, J. Physiology, 2000
+
+
+NEURON {
+ SUFFIX K_P
+ USEION k READ ek WRITE ik
+ RANGE gbar, g, ik
+}
+
+UNITS {
+ (S) = (siemens)
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+}
+
+PARAMETER {
+ gbar = 0.00001 (S/cm2)
+ vshift = 0 (mV)
+ tauF = 1
+}
+
+ASSIGNED {
+ v (mV)
+ ek (mV)
+ ik (mA/cm2)
+ g (S/cm2)
+ celsius (degC)
+ mInf
+ mTau
+ hInf
+ hTau
+}
+
+STATE {
+ m
+ h
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ g = gbar*m*m*h
+ ik = g*(v-ek)
+}
+
+DERIVATIVE states {
+ rates()
+ m' = (mInf-m)/mTau
+ h' = (hInf-h)/hTau
+}
+
+INITIAL{
+ rates()
+ m = mInf
+ h = hInf
+}
+
+PROCEDURE rates() {
+ LOCAL qt
+ qt = 2.3^((celsius-21)/10)
+ UNITSOFF
+ mInf = 1 / (1 + exp(-(v - (-14.3 + vshift)) / 14.6))
+ if (v < -50 + vshift){
+ mTau = tauF * (1.25+175.03*exp(-(v - vshift) * -0.026))/qt
+ } else {
+ mTau = tauF * (1.25+13*exp(-(v - vshift) * 0.026))/qt
+ }
+ hInf = 1/(1 + exp(-(v - (-54 + vshift))/-11))
+ hTau = (360+(1010+24*(v - (-55 + vshift)))*exp(-((v - (-75 + vshift))/48)^2))/qt
+ UNITSON
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/K_T.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/K_T.mod
new file mode 100644
index 0000000..c31beaf
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/K_T.mod
@@ -0,0 +1,68 @@
+: Comment: The transient component of the K current
+: Reference: Voltage-gated K+ channels in layer 5 neocortical pyramidal neurones from young rats:subtypes and gradients,Korngreen and Sakmann, J. Physiology, 2000
+
+NEURON {
+ SUFFIX K_T
+ USEION k READ ek WRITE ik
+ RANGE gbar, g, ik
+}
+
+UNITS {
+ (S) = (siemens)
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+}
+
+PARAMETER {
+ gbar = 0.00001 (S/cm2)
+ vshift = 0 (mV)
+ mTauF = 1.0
+ hTauF = 1.0
+}
+
+ASSIGNED {
+ v (mV)
+ ek (mV)
+ ik (mA/cm2)
+ g (S/cm2)
+ celsius (degC)
+ mInf
+ mTau
+ hInf
+ hTau
+}
+
+STATE {
+ m
+ h
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ g = gbar*m*m*m*m*h
+ ik = g*(v-ek)
+}
+
+DERIVATIVE states {
+ rates()
+ m' = (mInf-m)/mTau
+ h' = (hInf-h)/hTau
+}
+
+INITIAL{
+ rates()
+ m = mInf
+ h = hInf
+}
+
+PROCEDURE rates(){
+ LOCAL qt
+ qt = 2.3^((celsius-21)/10)
+
+ UNITSOFF
+ mInf = 1/(1 + exp(-(v - (-47 + vshift)) / 29))
+ mTau = (0.34 + mTauF * 0.92*exp(-((v+71-vshift)/59)^2))/qt
+ hInf = 1/(1 + exp(-(v+66-vshift)/-10))
+ hTau = (8 + hTauF * 49*exp(-((v+73-vshift)/23)^2))/qt
+ UNITSON
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Kd.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Kd.mod
new file mode 100644
index 0000000..82cbe59
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Kd.mod
@@ -0,0 +1,62 @@
+: Based on Kd model of Foust et al. (2011)
+
+
+NEURON {
+ SUFFIX Kd
+ USEION k READ ek WRITE ik
+ RANGE gbar, g, ik
+}
+
+UNITS {
+ (S) = (siemens)
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+}
+
+PARAMETER {
+ gbar = 0.00001 (S/cm2)
+}
+
+ASSIGNED {
+ v (mV)
+ ek (mV)
+ ik (mA/cm2)
+ g (S/cm2)
+ celsius (degC)
+ mInf
+ mTau
+ hInf
+ hTau
+}
+
+STATE {
+ m
+ h
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ g = gbar * m * h
+ ik = g * (v - ek)
+}
+
+DERIVATIVE states {
+ rates()
+ m' = (mInf - m) / mTau
+ h' = (hInf - h) / hTau
+}
+
+INITIAL{
+ rates()
+ m = mInf
+ h = hInf
+}
+
+PROCEDURE rates() {
+ LOCAL qt
+ qt = 2.3^((celsius-23)/10)
+ mInf = 1 - 1 / (1 + exp((v - (-43)) / 8))
+ mTau = 1
+ hInf = 1 / (1 + exp((v - (-67)) / 7.3))
+ hTau = 1500
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Kv2like.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Kv2like.mod
new file mode 100644
index 0000000..1cbdf84
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Kv2like.mod
@@ -0,0 +1,86 @@
+: Kv2-like channel
+: Adapted from model implemented in Keren et al. 2005
+: Adjusted parameters to be similar to guangxitoxin-sensitive current in mouse CA1 pyramids from Liu and Bean 2014
+
+
+NEURON {
+ SUFFIX Kv2like
+ USEION k READ ek WRITE ik
+ RANGE gbar, g, ik
+}
+
+UNITS {
+ (S) = (siemens)
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+}
+
+PARAMETER {
+ gbar = 0.00001 (S/cm2)
+}
+
+ASSIGNED {
+ v (mV)
+ ek (mV)
+ ik (mA/cm2)
+ g (S/cm2)
+ celsius (degC)
+ mInf
+ mAlpha
+ mBeta
+ mTau
+ hInf
+ h1Tau
+ h2Tau
+}
+
+STATE {
+ m
+ h1
+ h2
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ g = gbar * m * m * (0.5 * h1 + 0.5 * h2)
+ ik = g * (v - ek)
+}
+
+DERIVATIVE states {
+ rates()
+ m' = (mInf - m) / mTau
+ h1' = (hInf - h1) / h1Tau
+ h2' = (hInf - h2) / h2Tau
+}
+
+INITIAL{
+ rates()
+ m = mInf
+ h1 = hInf
+ h2 = hInf
+}
+
+PROCEDURE rates() {
+ LOCAL qt
+ qt = 2.3^((celsius-21)/10)
+ UNITSOFF
+ mAlpha = 0.12 * vtrap( -(v - 43), 11.0)
+ mBeta = 0.02 * exp(-(v + 1.27) / 120)
+ mInf = mAlpha / (mAlpha + mBeta)
+ mTau = 2.5 * (1 / (qt * (mAlpha + mBeta)))
+
+ hInf = 1/(1 + exp((v + 58) / 11))
+ h1Tau = (360 + (1010 + 23.7 * (v + 54)) * exp(-((v + 75) / 48)^2)) / qt
+ h2Tau = (2350 + 1380 * exp(-0.011 * v) - 210 * exp(-0.03 * v)) / qt
+ UNITSON
+}
+
+FUNCTION vtrap(x, y) { : Traps for 0 in denominator of rate equations
+ UNITSOFF
+ if (fabs(x / y) < 1e-6) {
+ vtrap = y * (1 - x / y / 2)
+ } else {
+ vtrap = x / (exp(x / y) - 1)
+ }
+ UNITSON
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Kv3_1.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Kv3_1.mod
new file mode 100644
index 0000000..e244657
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Kv3_1.mod
@@ -0,0 +1,54 @@
+: Comment: Kv3-like potassium current
+
+NEURON {
+ SUFFIX Kv3_1
+ USEION k READ ek WRITE ik
+ RANGE gbar, g, ik
+}
+
+UNITS {
+ (S) = (siemens)
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+}
+
+PARAMETER {
+ gbar = 0.00001 (S/cm2)
+ vshift = 0 (mV)
+}
+
+ASSIGNED {
+ v (mV)
+ ek (mV)
+ ik (mA/cm2)
+ g (S/cm2)
+ mInf
+ mTau
+}
+
+STATE {
+ m
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ g = gbar*m
+ ik = g*(v-ek)
+}
+
+DERIVATIVE states {
+ rates()
+ m' = (mInf-m)/mTau
+}
+
+INITIAL{
+ rates()
+ m = mInf
+}
+
+PROCEDURE rates(){
+ UNITSOFF
+ mInf = 1/(1+exp(((v -(18.700 + vshift))/(-9.700))))
+ mTau = 0.2*20.000/(1+exp(((v -(-46.560 + vshift))/(-44.140))))
+ UNITSON
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/NaTa.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/NaTa.mod
new file mode 100644
index 0000000..fcf7bd3
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/NaTa.mod
@@ -0,0 +1,95 @@
+: Reference: Colbert and Pan 2002
+
+NEURON {
+ SUFFIX NaTa
+ USEION na READ ena WRITE ina
+ RANGE gbar, g, ina
+}
+
+UNITS {
+ (S) = (siemens)
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+}
+
+PARAMETER {
+ gbar = 0.00001 (S/cm2)
+
+ malphaF = 0.182
+ mbetaF = 0.124
+ mvhalf = -48 (mV)
+ mk = 6 (mV)
+
+ halphaF = 0.015
+ hbetaF = 0.015
+ hvhalf = -69 (mV)
+ hk = 6 (mV)
+}
+
+ASSIGNED {
+ v (mV)
+ ena (mV)
+ ina (mA/cm2)
+ g (S/cm2)
+ celsius (degC)
+ mInf
+ mTau
+ mAlpha
+ mBeta
+ hInf
+ hTau
+ hAlpha
+ hBeta
+}
+
+STATE {
+ m
+ h
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ g = gbar*m*m*m*h
+ ina = g*(v-ena)
+}
+
+DERIVATIVE states {
+ rates()
+ m' = (mInf-m)/mTau
+ h' = (hInf-h)/hTau
+}
+
+INITIAL{
+ rates()
+ m = mInf
+ h = hInf
+}
+
+PROCEDURE rates(){
+ LOCAL qt
+ qt = 2.3^((celsius-23)/10)
+
+ UNITSOFF
+ mAlpha = malphaF * vtrap(-(v - mvhalf), mk)
+ mBeta = mbetaF * vtrap((v - mvhalf), mk)
+
+ mInf = mAlpha/(mAlpha + mBeta)
+ mTau = (1/(mAlpha + mBeta))/qt
+
+ hAlpha = halphaF * vtrap(v - hvhalf, hk) : ng - adjusted this to match actual Colbert & Pan values for soma model
+ hBeta = hbetaF * vtrap(-(v - hvhalf), hk) : ng - adjusted this to match actual Colbert & Pan values for soma model
+
+ hInf = hAlpha/(hAlpha + hBeta)
+ hTau = (1/(hAlpha + hBeta))/qt
+ UNITSON
+}
+
+FUNCTION vtrap(x, y) { : Traps for 0 in denominator of rate equations
+ UNITSOFF
+ if (fabs(x / y) < 1e-6) {
+ vtrap = y * (1 - x / y / 2)
+ } else {
+ vtrap = x / (exp(x / y) - 1)
+ }
+ UNITSON
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/NaTs.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/NaTs.mod
new file mode 100644
index 0000000..f753e71
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/NaTs.mod
@@ -0,0 +1,95 @@
+: Reference: Colbert and Pan 2002
+
+NEURON {
+ SUFFIX NaTs
+ USEION na READ ena WRITE ina
+ RANGE gbar, g, ina
+}
+
+UNITS {
+ (S) = (siemens)
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+}
+
+PARAMETER {
+ gbar = 0.00001 (S/cm2)
+
+ malphaF = 0.182
+ mbetaF = 0.124
+ mvhalf = -40 (mV)
+ mk = 6 (mV)
+
+ halphaF = 0.015
+ hbetaF = 0.015
+ hvhalf = -66 (mV)
+ hk = 6 (mV)
+}
+
+ASSIGNED {
+ v (mV)
+ ena (mV)
+ ina (mA/cm2)
+ g (S/cm2)
+ celsius (degC)
+ mInf
+ mTau
+ mAlpha
+ mBeta
+ hInf
+ hTau
+ hAlpha
+ hBeta
+}
+
+STATE {
+ m
+ h
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ g = gbar*m*m*m*h
+ ina = g*(v-ena)
+}
+
+DERIVATIVE states {
+ rates()
+ m' = (mInf-m)/mTau
+ h' = (hInf-h)/hTau
+}
+
+INITIAL{
+ rates()
+ m = mInf
+ h = hInf
+}
+
+PROCEDURE rates(){
+ LOCAL qt
+ qt = 2.3^((celsius-23)/10)
+
+ UNITSOFF
+ mAlpha = malphaF * vtrap(-(v - mvhalf), mk)
+ mBeta = mbetaF * vtrap((v - mvhalf), mk)
+
+ mInf = mAlpha/(mAlpha + mBeta)
+ mTau = (1/(mAlpha + mBeta))/qt
+
+ hAlpha = halphaF * vtrap(v - hvhalf, hk)
+ hBeta = hbetaF * vtrap(-(v - hvhalf), hk)
+
+ hInf = hAlpha/(hAlpha + hBeta)
+ hTau = (1/(hAlpha + hBeta))/qt
+ UNITSON
+}
+
+FUNCTION vtrap(x, y) { : Traps for 0 in denominator of rate equations
+ UNITSOFF
+ if (fabs(x / y) < 1e-6) {
+ vtrap = y * (1 - x / y / 2)
+ } else {
+ vtrap = x / (exp(x / y) - 1)
+ }
+ UNITSON
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/NaV.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/NaV.mod
new file mode 100644
index 0000000..a702395
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/NaV.mod
@@ -0,0 +1,186 @@
+TITLE Mouse sodium current
+: Kinetics of Carter et al. (2012)
+: Based on 37 degC recordings from mouse hippocampal CA1 pyramids
+
+NEURON {
+ SUFFIX NaV
+ USEION na READ ena WRITE ina
+ RANGE g, gbar
+}
+
+UNITS {
+ (mV) = (millivolt)
+ (S) = (siemens)
+}
+
+PARAMETER {
+ gbar = .015 (S/cm2)
+
+ : kinetic parameters
+ Con = 0.01 (/ms) : closed -> inactivated transitions
+ Coff = 40 (/ms) : inactivated -> closed transitions
+ Oon = 8 (/ms) : open -> Ineg transition
+ Ooff = 0.05 (/ms) : Ineg -> open transition
+ alpha = 400 (/ms)
+ beta = 12 (/ms)
+ gamma = 250 (/ms) : opening
+ delta = 60 (/ms) : closing
+
+ alfac = 2.51
+ btfac = 5.32
+
+ : Vdep
+ x1 = 24 (mV) : Vdep of activation (alpha)
+ x2 = -24 (mV) : Vdep of deactivation (beta)
+}
+
+ASSIGNED {
+
+ : rates
+ f01 (/ms)
+ f02 (/ms)
+ f03 (/ms)
+ f04 (/ms)
+ f0O (/ms)
+ f11 (/ms)
+ f12 (/ms)
+ f13 (/ms)
+ f14 (/ms)
+ f1n (/ms)
+ fi1 (/ms)
+ fi2 (/ms)
+ fi3 (/ms)
+ fi4 (/ms)
+ fi5 (/ms)
+ fin (/ms)
+
+ b01 (/ms)
+ b02 (/ms)
+ b03 (/ms)
+ b04 (/ms)
+ b0O (/ms)
+ b11 (/ms)
+ b12 (/ms)
+ b13 (/ms)
+ b14 (/ms)
+ b1n (/ms)
+ bi1 (/ms)
+ bi2 (/ms)
+ bi3 (/ms)
+ bi4 (/ms)
+ bi5 (/ms)
+ bin (/ms)
+
+ v (mV)
+ ena (mV)
+ ina (milliamp/cm2)
+ g (S/cm2)
+ celsius (degC)
+}
+
+STATE {
+ C1 FROM 0 TO 1
+ C2 FROM 0 TO 1
+ C3 FROM 0 TO 1
+ C4 FROM 0 TO 1
+ C5 FROM 0 TO 1
+ I1 FROM 0 TO 1
+ I2 FROM 0 TO 1
+ I3 FROM 0 TO 1
+ I4 FROM 0 TO 1
+ I5 FROM 0 TO 1
+ O FROM 0 TO 1
+ I6 FROM 0 TO 1
+}
+
+BREAKPOINT {
+ SOLVE activation METHOD sparse
+ g = gbar * O
+ ina = g * (v - ena)
+}
+
+INITIAL {
+ rates(v)
+ SOLVE seqinitial
+}
+
+KINETIC activation
+{
+ rates(v)
+ ~ C1 <-> C2 (f01,b01)
+ ~ C2 <-> C3 (f02,b02)
+ ~ C3 <-> C4 (f03,b03)
+ ~ C4 <-> C5 (f04,b04)
+ ~ C5 <-> O (f0O,b0O)
+ ~ O <-> I6 (fin,bin)
+ ~ I1 <-> I2 (f11,b11)
+ ~ I2 <-> I3 (f12,b12)
+ ~ I3 <-> I4 (f13,b13)
+ ~ I4 <-> I5 (f14,b14)
+ ~ I5 <-> I6 (f1n,b1n)
+ ~ C1 <-> I1 (fi1,bi1)
+ ~ C2 <-> I2 (fi2,bi2)
+ ~ C3 <-> I3 (fi3,bi3)
+ ~ C4 <-> I4 (fi4,bi4)
+ ~ C5 <-> I5 (fi5,bi5)
+
+ CONSERVE C1 + C2 + C3 + C4 + C5 + O + I1 + I2 + I3 + I4 + I5 + I6 = 1
+}
+
+LINEAR seqinitial { : sets initial equilibrium
+ ~ I1*bi1 + C2*b01 - C1*( fi1+f01) = 0
+ ~ C1*f01 + I2*bi2 + C3*b02 - C2*(b01+fi2+f02) = 0
+ ~ C2*f02 + I3*bi3 + C4*b03 - C3*(b02+fi3+f03) = 0
+ ~ C3*f03 + I4*bi4 + C5*b04 - C4*(b03+fi4+f04) = 0
+ ~ C4*f04 + I5*bi5 + O*b0O - C5*(b04+fi5+f0O) = 0
+ ~ C5*f0O + I6*bin - O*(b0O+fin) = 0
+
+ ~ C1*fi1 + I2*b11 - I1*( bi1+f11) = 0
+ ~ I1*f11 + C2*fi2 + I3*b12 - I2*(b11+bi2+f12) = 0
+ ~ I2*f12 + C3*fi3 + I4*bi3 - I3*(b12+bi3+f13) = 0
+ ~ I3*f13 + C4*fi4 + I5*b14 - I4*(b13+bi4+f14) = 0
+ ~ I4*f14 + C5*fi5 + I6*b1n - I5*(b14+bi5+f1n) = 0
+
+ ~ C1 + C2 + C3 + C4 + C5 + O + I1 + I2 + I3 + I4 + I5 + I6 = 1
+}
+
+PROCEDURE rates(v(mV) )
+{
+ LOCAL qt
+ qt = 2.3^((celsius-37)/10)
+
+ f01 = qt * 4 * alpha * exp(v/x1)
+ f02 = qt * 3 * alpha * exp(v/x1)
+ f03 = qt * 2 * alpha * exp(v/x1)
+ f04 = qt * 1 * alpha * exp(v/x1)
+ f0O = qt * gamma
+ f11 = qt * 4 * alpha * alfac * exp(v/x1)
+ f12 = qt * 3 * alpha * alfac * exp(v/x1)
+ f13 = qt * 2 * alpha * alfac * exp(v/x1)
+ f14 = qt * 1 * alpha * alfac * exp(v/x1)
+ f1n = qt * gamma
+ fi1 = qt * Con
+ fi2 = qt * Con * alfac
+ fi3 = qt * Con * alfac^2
+ fi4 = qt * Con * alfac^3
+ fi5 = qt * Con * alfac^4
+ fin = qt * Oon
+
+ b01 = qt * 1 * beta * exp(v/x2)
+ b02 = qt * 2 * beta * exp(v/x2)
+ b03 = qt * 3 * beta * exp(v/x2)
+ b04 = qt * 4 * beta * exp(v/x2)
+ b0O = qt * delta
+ b11 = qt * 1 * beta * exp(v/x2) / btfac
+ b12 = qt * 2 * beta * exp(v/x2) / btfac
+ b13 = qt * 3 * beta * exp(v/x2) / btfac
+ b14 = qt * 4 * beta * exp(v/x2) / btfac
+ b1n = qt * delta
+ bi1 = qt * Coff
+ bi2 = qt * Coff / (btfac)
+ bi3 = qt * Coff / (btfac^2)
+ bi4 = qt * Coff / (btfac^3)
+ bi5 = qt * Coff / (btfac^4)
+ bin = qt * Ooff
+}
+
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Nap.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Nap.mod
new file mode 100644
index 0000000..ef8021e
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/Nap.mod
@@ -0,0 +1,77 @@
+:Reference : Modeled according to kinetics derived from Magistretti & Alonso 1999
+:Comment: corrected rates using q10 = 2.3, target temperature 34, orginal 21
+
+NEURON {
+ SUFFIX Nap
+ USEION na READ ena WRITE ina
+ RANGE gbar, g, ina
+}
+
+UNITS {
+ (S) = (siemens)
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+}
+
+PARAMETER {
+ gbar = 0.00001 (S/cm2)
+}
+
+ASSIGNED {
+ v (mV)
+ ena (mV)
+ ina (mA/cm2)
+ g (S/cm2)
+ celsius (degC)
+ mInf
+ hInf
+ hTau
+ hAlpha
+ hBeta
+}
+
+STATE {
+ h
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ rates()
+ g = gbar*mInf*h
+ ina = g*(v-ena)
+}
+
+DERIVATIVE states {
+ rates()
+ h' = (hInf-h)/hTau
+}
+
+INITIAL{
+ rates()
+ h = hInf
+}
+
+PROCEDURE rates(){
+ LOCAL qt
+ qt = 2.3^((celsius-21)/10)
+
+ UNITSOFF
+ mInf = 1.0/(1+exp((v- -52.6)/-4.6)) : assuming instantaneous activation as modeled by Magistretti and Alonso
+
+ hInf = 1.0/(1+exp((v- -48.8)/10))
+ hAlpha = 2.88e-6 * vtrap(v + 17, 4.63)
+ hBeta = 6.94e-6 * vtrap(-(v + 64.4), 2.63)
+
+ hTau = (1/(hAlpha + hBeta))/qt
+ UNITSON
+}
+
+FUNCTION vtrap(x, y) { : Traps for 0 in denominator of rate equations
+ UNITSOFF
+ if (fabs(x / y) < 1e-6) {
+ vtrap = y * (1 - x / y / 2)
+ } else {
+ vtrap = x / (exp(x / y) - 1)
+ }
+ UNITSON
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/SK.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/SK.mod
new file mode 100644
index 0000000..8bfa3b7
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/SK.mod
@@ -0,0 +1,56 @@
+: SK-type calcium-activated potassium current
+: Reference : Kohler et al. 1996
+
+NEURON {
+ SUFFIX SK
+ USEION k READ ek WRITE ik
+ USEION ca READ cai
+ RANGE gbar, g, ik
+}
+
+UNITS {
+ (mV) = (millivolt)
+ (mA) = (milliamp)
+ (mM) = (milli/liter)
+}
+
+PARAMETER {
+ v (mV)
+ gbar = .000001 (mho/cm2)
+ zTau = 1 (ms)
+ ek (mV)
+ cai (mM)
+}
+
+ASSIGNED {
+ zInf
+ ik (mA/cm2)
+ g (S/cm2)
+}
+
+STATE {
+ z FROM 0 TO 1
+}
+
+BREAKPOINT {
+ SOLVE states METHOD cnexp
+ g = gbar * z
+ ik = g * (v - ek)
+}
+
+DERIVATIVE states {
+ rates(cai)
+ z' = (zInf - z) / zTau
+}
+
+PROCEDURE rates(ca(mM)) {
+ if(ca < 1e-7){
+ ca = ca + 1e-07
+ }
+ zInf = 1/(1 + (0.00043 / ca)^4.8)
+}
+
+INITIAL {
+ rates(cai)
+ z = zInf
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/vecevent.mod b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/vecevent.mod
new file mode 100644
index 0000000..503dfd2
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/mechanisms/modfiles/vecevent.mod
@@ -0,0 +1,71 @@
+: Vector stream of events
+
+NEURON {
+ ARTIFICIAL_CELL VecStim
+}
+
+ASSIGNED {
+ index
+ etime (ms)
+ space
+}
+
+INITIAL {
+ index = 0
+ element()
+ if (index > 0) {
+ net_send(etime - t, 1)
+ }
+}
+
+NET_RECEIVE (w) {
+ if (flag == 1) {
+ net_event(t)
+ element()
+ if (index > 0) {
+ net_send(etime - t, 1)
+ }
+ }
+}
+
+VERBATIM
+extern double* vector_vec();
+extern int vector_capacity();
+extern void* vector_arg();
+ENDVERBATIM
+
+PROCEDURE element() {
+VERBATIM
+ { void* vv; int i, size; double* px;
+ i = (int)index;
+ if (i >= 0) {
+ vv = *((void**)(&space));
+ if (vv) {
+ size = vector_capacity(vv);
+ px = vector_vec(vv);
+ if (i < size) {
+ etime = px[i];
+ index += 1.;
+ }else{
+ index = -1.;
+ }
+ }else{
+ index = -1.;
+ }
+ }
+ }
+ENDVERBATIM
+}
+
+PROCEDURE play() {
+VERBATIM
+ void** vv;
+ vv = (void**)(&space);
+ *vv = (void*)0;
+ if (ifarg(1)) {
+ *vv = vector_arg(1);
+ }
+ENDVERBATIM
+}
+
+
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/run_bionet.py b/bmtk-vb/bmtk/utils/scripts/bionet/run_bionet.py
new file mode 100644
index 0000000..ca8abeb
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/run_bionet.py
@@ -0,0 +1,23 @@
+# -*- coding: utf-8 -*-
+
+"""Simulates an example network of 14 cell receiving two kinds of exernal input as defined in configuration file"""
+
+import os, sys
+from bmtk.simulator import bionet
+
+
+def run(config_file):
+ conf = bionet.Config.from_json(config_file, validate=True)
+ conf.build_env()
+
+ graph = bionet.BioNetwork.from_config(conf)
+ sim = bionet.BioSimulator.from_config(conf, network=graph)
+ sim.run()
+ bionet.nrn.quit_execution()
+
+
+if __name__ == '__main__':
+ if __file__ != sys.argv[-1]:
+ run(sys.argv[-1])
+ else:
+ run('config.json')
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/AMPA_ExcToExc.json b/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/AMPA_ExcToExc.json
new file mode 100644
index 0000000..c758540
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/AMPA_ExcToExc.json
@@ -0,0 +1,6 @@
+{
+ "level_of_detail": "exp2syn",
+ "tau1": 1.0,
+ "tau2": 3.0,
+ "erev": 0.0
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/AMPA_ExcToInh.json b/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/AMPA_ExcToInh.json
new file mode 100644
index 0000000..4388799
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/AMPA_ExcToInh.json
@@ -0,0 +1,6 @@
+{
+ "level_of_detail": "exp2syn",
+ "tau1": 0.1,
+ "tau2": 0.5,
+ "erev": 0.0
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/GABA_InhToExc.json b/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/GABA_InhToExc.json
new file mode 100644
index 0000000..702ce9b
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/GABA_InhToExc.json
@@ -0,0 +1,7 @@
+{
+ "level_of_detail": "exp2syn",
+ "tau1": 2.7,
+ "tau2": 15.0,
+ "erev": -70.0
+}
+
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/GABA_InhToInh.json b/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/GABA_InhToInh.json
new file mode 100644
index 0000000..ed4130a
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/GABA_InhToInh.json
@@ -0,0 +1,7 @@
+{
+ "level_of_detail": "exp2syn",
+ "tau1": 0.2,
+ "tau2": 8.0,
+ "erev": -70.0
+}
+
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/instanteneousExc.json b/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/instanteneousExc.json
new file mode 100644
index 0000000..9a6d0a5
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/instanteneousExc.json
@@ -0,0 +1,5 @@
+{
+ "level_of_detail": "instanteneous",
+ "sign": 1
+}
+
diff --git a/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/instanteneousInh.json b/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/instanteneousInh.json
new file mode 100644
index 0000000..3bac514
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/bionet/synaptic_models/instanteneousInh.json
@@ -0,0 +1,5 @@
+{
+ "level_of_detail": "instanteneous",
+ "sign": -1
+}
+
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/472363762_point.json b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/472363762_point.json
new file mode 100644
index 0000000..e6154b1
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/472363762_point.json
@@ -0,0 +1,9 @@
+{
+ "I_e": 0.0,
+ "tau_m": 44.9,
+ "C_m": 239.0,
+ "t_ref": 3.0,
+ "E_L": -78.0,
+ "V_th": -43.0,
+ "V_reset": -55.0
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/472912177_point.json b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/472912177_point.json
new file mode 100644
index 0000000..30b9822
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/472912177_point.json
@@ -0,0 +1,9 @@
+{
+ "I_e": 0.0,
+ "tau_m": 22.2,
+ "C_m": 180.0,
+ "t_ref": 3.0,
+ "E_L": -82.0,
+ "V_th": -35.0,
+ "V_reset": -50.0
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/473862421_point.json b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/473862421_point.json
new file mode 100644
index 0000000..6d7e76a
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/473862421_point.json
@@ -0,0 +1,9 @@
+{
+ "I_e": 0.0,
+ "tau_m": 12.5,
+ "C_m": 78.0,
+ "t_ref": 3.0,
+ "E_L": -73.0,
+ "V_th": -37.0,
+ "V_reset": -55.0
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/473863035_point.json b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/473863035_point.json
new file mode 100644
index 0000000..db8e5e4
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/473863035_point.json
@@ -0,0 +1,9 @@
+{
+ "I_e": 0.0,
+ "tau_m": 22.1,
+ "C_m": 117.0,
+ "t_ref": 3.0,
+ "E_L": -78.0,
+ "V_th": -47.0,
+ "V_reset": -50.0
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/473863510_point.json b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/473863510_point.json
new file mode 100644
index 0000000..348c569
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/473863510_point.json
@@ -0,0 +1,9 @@
+{
+ "I_e": 0.0,
+ "tau_m": 11.5,
+ "C_m": 53.0,
+ "t_ref": 3.0,
+ "E_L": -72.0,
+ "V_th": -25.0,
+ "V_reset": -50.0
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/IntFire1_exc_point.json b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/IntFire1_exc_point.json
new file mode 100644
index 0000000..d3e5c19
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/IntFire1_exc_point.json
@@ -0,0 +1,9 @@
+{
+ "I_e": 0.0,
+ "tau_m": 24.0,
+ "C_m": 120.0,
+ "t_ref": 3.0,
+ "E_L": -75.0,
+ "V_th": -37.0,
+ "V_reset": -53.0
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/IntFire1_inh_point.json b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/IntFire1_inh_point.json
new file mode 100644
index 0000000..cf889c5
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/IntFire1_inh_point.json
@@ -0,0 +1,9 @@
+{
+ "I_e": 0.0,
+ "tau_m": 7.0,
+ "C_m": 50.0,
+ "t_ref": 3.0,
+ "E_L": -77.0,
+ "V_th": -36.0,
+ "V_reset": -52.0
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/filter_point.json b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/filter_point.json
new file mode 100644
index 0000000..0e0dcd2
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/point_neuron_templates/filter_point.json
@@ -0,0 +1,3 @@
+{
+
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/run_pointnet.py b/bmtk-vb/bmtk/utils/scripts/pointnet/run_pointnet.py
new file mode 100644
index 0000000..95c7261
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/run_pointnet.py
@@ -0,0 +1,14 @@
+from bmtk.simulator import pointnet
+
+
+def main(config_file):
+ configure = pointnet.Config.from_json(config_file)
+ configure.build_env()
+
+ network = pointnet.PointNetwork.from_config(configure)
+ sim = pointnet.PointSimulator.from_config(configure, network)
+ sim.run()
+
+
+if __name__ == '__main__':
+ main('config.json')
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/ExcToExc.json b/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/ExcToExc.json
new file mode 100644
index 0000000..2c63c08
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/ExcToExc.json
@@ -0,0 +1,2 @@
+{
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/ExcToInh.json b/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/ExcToInh.json
new file mode 100644
index 0000000..2c63c08
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/ExcToInh.json
@@ -0,0 +1,2 @@
+{
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/InhToExc.json b/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/InhToExc.json
new file mode 100644
index 0000000..bfd870e
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/InhToExc.json
@@ -0,0 +1,3 @@
+{
+}
+
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/InhToInh.json b/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/InhToInh.json
new file mode 100644
index 0000000..bfd870e
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/InhToInh.json
@@ -0,0 +1,3 @@
+{
+}
+
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/instanteneousExc.json b/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/instanteneousExc.json
new file mode 100644
index 0000000..bfd870e
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/instanteneousExc.json
@@ -0,0 +1,3 @@
+{
+}
+
diff --git a/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/instanteneousInh.json b/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/instanteneousInh.json
new file mode 100644
index 0000000..bfd870e
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/pointnet/synaptic_models/instanteneousInh.json
@@ -0,0 +1,3 @@
+{
+}
+
diff --git a/bmtk-vb/bmtk/utils/scripts/popnet/population_models/exc_model.json b/bmtk-vb/bmtk/utils/scripts/popnet/population_models/exc_model.json
new file mode 100644
index 0000000..45370e1
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/popnet/population_models/exc_model.json
@@ -0,0 +1,9 @@
+{
+ "tau_m": 0.0429,
+ "record": true,
+ "v_min": -0.05,
+ "v_max": 0.02,
+ "dv": 0.0001,
+ "update_method": "gmres",
+ "approx_order": null
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/popnet/population_models/inh_model.json b/bmtk-vb/bmtk/utils/scripts/popnet/population_models/inh_model.json
new file mode 100644
index 0000000..e9505ea
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/popnet/population_models/inh_model.json
@@ -0,0 +1,9 @@
+{
+ "tau_m": 0.0299,
+ "record": true,
+ "v_min": -0.05,
+ "v_max": 0.02,
+ "dv": 0.0001,
+ "update_method": "gmres",
+ "approx_order": null
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/popnet/run_popnet.py b/bmtk-vb/bmtk/utils/scripts/popnet/run_popnet.py
new file mode 100644
index 0000000..ea46b3e
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/popnet/run_popnet.py
@@ -0,0 +1,24 @@
+import sys
+import os
+
+from bmtk.simulator import popnet
+
+from bmtk.analyzer.visualization.spikes import plot_rates_popnet
+
+def main(config_file):
+ configure = popnet.config.from_json(config_file)
+ configure.build_env()
+ network = popnet.PopNetwork.from_config(configure)
+ sim = popnet.PopSimulator.from_config(configure, network)
+ sim.run()
+
+ cells_file = 'network/brunel_node_types.csv'
+ rates_file = 'output/spike_rates.csv'
+ plot_rates_popnet(cells_file, rates_file, model_keys='pop_name')
+
+
+if __name__ == '__main__':
+ if __file__ != sys.argv[-1]:
+ main(sys.argv[-1])
+ else:
+ main('config.json')
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/ExcToExc.json b/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/ExcToExc.json
new file mode 100644
index 0000000..2c63c08
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/ExcToExc.json
@@ -0,0 +1,2 @@
+{
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/ExcToInh.json b/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/ExcToInh.json
new file mode 100644
index 0000000..2c63c08
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/ExcToInh.json
@@ -0,0 +1,2 @@
+{
+}
diff --git a/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/InhToExc.json b/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/InhToExc.json
new file mode 100644
index 0000000..bfd870e
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/InhToExc.json
@@ -0,0 +1,3 @@
+{
+}
+
diff --git a/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/InhToInh.json b/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/InhToInh.json
new file mode 100644
index 0000000..bfd870e
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/InhToInh.json
@@ -0,0 +1,3 @@
+{
+}
+
diff --git a/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/input_ExcToExc.json b/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/input_ExcToExc.json
new file mode 100644
index 0000000..7a73a41
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/input_ExcToExc.json
@@ -0,0 +1,2 @@
+{
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/input_ExcToInh.json b/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/input_ExcToInh.json
new file mode 100644
index 0000000..7a73a41
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/popnet/synaptic_models/input_ExcToInh.json
@@ -0,0 +1,2 @@
+{
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/scripts/sonata.circuit_config.json b/bmtk-vb/bmtk/utils/scripts/sonata.circuit_config.json
new file mode 100644
index 0000000..a2c7969
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/sonata.circuit_config.json
@@ -0,0 +1,21 @@
+{
+ "manifest": {
+ "$BASE_DIR": "%%BASE_DIR%%",
+ "$COMPONENTS_DIR": "%%COMPONENTS_DIR%%",
+ "$NETWORK_DIR": "%%NETWORK_DIR%%"
+ },
+
+ "components": {
+ "morphologies_dir": "$COMPONENTS_DIR/morphologies",
+ "synaptic_models_dir": "$COMPONENTS_DIR/synaptic_models",
+ "mechanisms_dir":"$COMPONENTS_DIR/mechanisms",
+ "biophysical_neuron_models_dir": "$COMPONENTS_DIR/biophysical_neuron_templates",
+ "point_neuron_models_dir": "$COMPONENTS_DIR/point_neuron_templates"
+ },
+
+ "networks": {
+ "nodes": [],
+
+ "edges": []
+ }
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/scripts/sonata.simulation_config.json b/bmtk-vb/bmtk/utils/scripts/sonata.simulation_config.json
new file mode 100644
index 0000000..c619d29
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/scripts/sonata.simulation_config.json
@@ -0,0 +1,31 @@
+{
+ "manifest": {
+ "$BASE_DIR": "%%BASE_DIR",
+ "$OUTPUT_DIR": "$BASE_DIR/output"
+ },
+
+ "target_simulator":"%%TARGET_SIMULATOR%%",
+
+ "run": {
+ "tstop": 0.0,
+ "dt": 0.1
+ },
+
+ "conditions": {
+ "celsius": 34.0
+ },
+
+ "inputs": {},
+
+ "reports": {},
+
+ "output": {
+ "log_file": "log.txt",
+ "output_dir": "${OUTPUT_DIR}",
+ "spikes_file": "spikes.h5",
+ "spikes_file_csv": "spikes.csv",
+ "overwrite_output_dir": true
+ },
+
+ "network": "./circuit_config.json"
+}
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/sim_setup.py b/bmtk-vb/bmtk/utils/sim_setup.py
new file mode 100644
index 0000000..8301fb5
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/sim_setup.py
@@ -0,0 +1,443 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import shutil
+import json
+import h5py
+import re
+from subprocess import call
+from optparse import OptionParser
+from collections import OrderedDict
+
+# Order of the different sections of the config.json. Any non-listed items will be placed at the end of the config
+config_order = [
+ 'manifest',
+ 'target_simulator',
+ 'run',
+ 'conditions',
+ 'inputs',
+ 'components',
+ 'output',
+ 'reports',
+ 'networks'
+]
+
+local_path = os.path.dirname(os.path.realpath(__file__))
+scripts_path = os.path.join(local_path, 'scripts')
+
+'''
+order_lookup = {k: i for i, k in enumerate(config_order)}
+def sort_config_keys(ckey):
+ print(ckey)
+ exit()
+'''
+
+def get_network_block(circuit_config, network_dir):
+ net_nodes = {}
+ net_edges = {}
+ for f in os.listdir(network_dir):
+ if not os.path.isfile(os.path.join(network_dir, f)) or f.startswith('.'):
+ continue
+
+ if '_nodes' in f:
+ net_name = f[:f.find('_nodes')]
+ nodes_dict = net_nodes.get(net_name, {})
+ nodes_dict['nodes_file'] = os.path.join('$NETWORK_DIR', f)
+ net_nodes[net_name] = nodes_dict
+
+ elif '_node_types' in f:
+ net_name = f[:f.find('_node_types')]
+ nodes_dict = net_nodes.get(net_name, {})
+ nodes_dict['node_types_file'] = os.path.join('$NETWORK_DIR', f)
+ net_nodes[net_name] = nodes_dict
+
+ elif '_edges' in f:
+ net_name = f[:f.find('_edges')]
+ edges_dict = net_edges.get(net_name, {})
+ edges_dict['edges_file'] = os.path.join('$NETWORK_DIR', f)
+ try:
+ edges_h5 = h5py.File(os.path.join(network_dir, f), 'r')
+ edges_dict['target'] = edges_h5['edges']['target_gid'].attrs['network']
+ edges_dict['source'] = edges_h5['edges']['source_gid'].attrs['network']
+ except Exception as e:
+ pass
+
+ net_edges[net_name] = edges_dict
+
+ elif '_edge_types' in f:
+ net_name = f[:f.find('_edge_types')]
+ edges_dict = net_edges.get(net_name, {})
+ edges_dict['edge_types_file'] = os.path.join('$NETWORK_DIR', f)
+ net_edges[net_name] = edges_dict
+
+ else:
+ print('Unknown file {}. Will have to enter by hand'.format(f))
+
+ for _, sect in net_nodes.items():
+ circuit_config['networks']['nodes'].append(sect)
+
+ for _, sect in net_edges.items():
+ circuit_config['networks']['edges'].append(sect)
+
+
+def build_components(circuit_config, components_path, scripts_path, with_examples):
+ for c_name, c_dir in circuit_config['components'].items():
+ dir_name = c_dir.replace('$COMPONENTS_DIR/', '')
+ dir_path = os.path.join(components_path, dir_name)
+
+ # create component directory
+ if not os.path.exists(dir_path):
+ os.makedirs(dir_path)
+
+ # Copy in files from scripts//
+ scripts_dir = os.path.join(scripts_path, dir_name)
+ if with_examples and os.path.isdir(scripts_dir):
+ shutil.rmtree(dir_path)
+ shutil.copytree(scripts_dir, dir_path)
+
+
+def build_circuit_env(base_dir, network_dir, components_dir, simulator, with_examples):
+ simulator_path = os.path.join(scripts_path, simulator)
+
+ circuit_config = json.load(open(os.path.join(scripts_path, 'sonata.circuit_config.json')))
+ circuit_config['manifest']['$BASE_DIR'] = base_dir if base_dir == '.' else os.path.abspath(base_dir)
+ circuit_config['manifest']['$COMPONENTS_DIR'] = '$BASE_DIR/{}'.format(components_dir)
+
+ # Try to figure out the $NETWORK_DIR
+ if network_dir is None:
+ network_path = ''
+ if os.path.isabs(network_dir):
+ # In case network_dir is an absolute path
+ network_path = network_dir
+ elif os.path.abspath(network_dir).startswith(os.path.abspath(base_dir)):
+ # If network_dir is in a subdir of base_dir then NETWORK_DIR=$BASE_DIR/path/to/network
+ network_path = os.path.abspath(network_dir).replace(os.path.abspath(base_dir), '$BASE_DIR')
+ else:
+ # if network_dir exists outside of the base_dir just reference the absolute path
+ network_path = os.path.abspath(network_dir)
+
+ circuit_config['manifest']['$NETWORK_DIR'] = network_path
+
+ # Initialize the components directories
+ build_components(circuit_config, os.path.join(base_dir, components_dir), simulator_path, with_examples)
+
+ # Parse the network directory
+ get_network_block(circuit_config, network_dir)
+
+ return circuit_config
+
+
+def build_simulation_env(base_dir, target_simulator, tstop, dt, reports):
+ simulation_config = json.load(open(os.path.join(scripts_path, 'sonata.simulation_config.json')))
+ simulation_config['manifest']['$BASE_DIR'] = base_dir if base_dir == '.' else os.path.abspath(base_dir)
+ simulation_config['target_simulator'] = target_simulator
+ simulation_config['run']['tstop'] = tstop
+ simulation_config['run']['dt'] = dt
+
+ if reports is not None:
+ for report_name, report_params in reports.items():
+ simulation_config['reports'][report_name] = report_params
+
+ return simulation_config
+
+
+def copy_config(base_dir, json_dict, config_file_name):
+ with open(os.path.join(base_dir, config_file_name), 'w') as outfile:
+ ordered_dict = OrderedDict(sorted(json_dict.items(),
+ key=lambda s: config_order.index(s[0]) if s[0] in config_order else 100))
+ json.dump(ordered_dict, outfile, indent=2)
+
+
+def copy_run_script(base_dir, simulator, run_script):
+ simulator_path = os.path.join(scripts_path, simulator)
+ shutil.copy(os.path.join(simulator_path, run_script), os.path.join(base_dir, run_script))
+
+
+def build_env_pointnet(base_dir='.', network_dir=None, reports=None, with_examples=True, tstop=1000.0, dt=0.001, **args):
+ simulator='pointnet'
+ target_simulator='NEST'
+ components_dir='point_components'
+
+ # Copy run script
+ copy_run_script(base_dir=base_dir, simulator=simulator, run_script='run_{}.py'.format(simulator))
+
+ # Build circuit_config and componenets directory
+ circuit_config = build_circuit_env(base_dir=base_dir, network_dir=network_dir, components_dir=components_dir,
+ simulator=simulator, with_examples=with_examples)
+ copy_config(base_dir, circuit_config, 'circuit_config.json')
+
+ simulation_config = build_simulation_env(base_dir=base_dir, target_simulator=target_simulator, tstop=tstop, dt=dt,
+ reports=reports)
+ copy_config(base_dir, simulation_config, 'simulation_config.json')
+
+
+def build_env_bionet(base_dir='.', network_dir=None, reports=None, with_examples=True, tstop=1000.0, dt=0.001,
+ compile_mechanisms=True, **args):
+ simulator='bionet'
+ target_simulator='NEURON'
+ components_dir='biophys_components'
+
+ # Copy run script
+ copy_run_script(base_dir=base_dir, simulator=simulator, run_script='run_{}.py'.format(simulator))
+
+ # Build circuit_config and componenets directory
+ circuit_config = build_circuit_env(base_dir=base_dir, network_dir=network_dir, components_dir=components_dir,
+ simulator=simulator, with_examples=with_examples)
+ copy_config(base_dir, circuit_config, 'circuit_config.json')
+ if compile_mechanisms:
+ cwd = os.getcwd()
+ os.chdir(os.path.join(base_dir, components_dir, 'mechanisms')) # circuit_config['components']['mechanisms_dir'])
+ try:
+ print(os.getcwd())
+ call(['nrnivmodl', 'modfiles'])
+ except Exception as e:
+ print('Was unable to compile mechanism in {}'.format(circuit_config['components']['mechanisms_dir']))
+ # print e.message
+ os.chdir(cwd)
+
+ # Build simulation config
+ simulation_config = build_simulation_env(base_dir=base_dir, target_simulator=target_simulator, tstop=tstop, dt=dt,
+ reports=reports)
+ simulation_config['run']['dL'] = args.get('dL', 20.0)
+ simulation_config['run']['spike_threshold'] = args.get('spike_threshold', -15.0)
+ simulation_config['run']['nsteps_block'] = args.get('nsteps_block', 5000)
+ simulation_config['conditions']['v_init'] = args.get('v_init', -80.0)
+ copy_config(base_dir, simulation_config, 'simulation_config.json')
+
+
+def build_env_popnet(base_dir='.', network_dir=None, reports=None, with_examples=True, tstop=1000.0, dt=0.001, **args):
+ simulator='popnet'
+ target_simulator='DiPDE'
+ components_dir='pop_components'
+
+ # Copy run script
+ copy_run_script(base_dir=base_dir, simulator=simulator, run_script='run_{}.py'.format(simulator))
+
+ # Build circuit_config and componenets directory
+ circuit_config = build_circuit_env(base_dir=base_dir, network_dir=network_dir, components_dir=components_dir,
+ simulator=simulator, with_examples=with_examples)
+ circuit_config['components']['population_models_dir'] = '$COMPONENTS_DIR/population_models'
+ # population_models_dir = os.path.join(base_dir, components_dir, 'population_models')
+ if with_examples:
+ models_dir = os.path.join(base_dir, components_dir, 'population_models')
+ if os.path.exists(models_dir):
+ shutil.rmtree(models_dir)
+ shutil.copytree(os.path.join(scripts_path, simulator, 'population_models'), models_dir)
+
+ copy_config(base_dir, circuit_config, 'circuit_config.json')
+
+ # Build simulation config
+ simulation_config = build_simulation_env(base_dir=base_dir, target_simulator=target_simulator, tstop=tstop, dt=dt,
+ reports=reports)
+ # PopNet doesn't produce spike files so instead need to replace them with rates files
+ for output_key in simulation_config['output'].keys():
+ if output_key.startswith('spikes'):
+ del simulation_config['output'][output_key]
+ # simulation_config['output']['rates_file_csv'] = 'firing_rates.csv'
+ simulation_config['output']['rates_file'] = 'firing_rates.csv'
+
+ copy_config(base_dir, simulation_config, 'simulation_config.json')
+
+
+"""
+def build_env_bionet(base_dir='.', run_time=0.0, with_config=True, network_dir=None, with_cell_types=True,
+ compile_mechanisms=True, reports=None):
+ local_path = os.path.dirname(os.path.realpath(__file__))
+ scripts_path = os.path.join(local_path, 'scripts', 'bionet')
+
+ components_dir = os.path.join(base_dir, 'components')
+ component_paths = {
+ 'morphologies_dir': os.path.join(components_dir, 'biophysical', 'morphology'),
+ 'biophysical_models_dir': os.path.join(components_dir, 'biophysical', 'electrophysiology'),
+ 'mechanisms_dir': os.path.join(components_dir, 'mechanisms'),
+ 'point_models_dir': os.path.join(components_dir, 'intfire'),
+ 'synaptic_models_dir': os.path.join(components_dir, 'synaptic_models'),
+ 'templates_dir': os.path.join(components_dir, 'hoc_templates')
+ }
+ for path in component_paths.values():
+ if not os.path.exists(path):
+ os.makedirs(path)
+
+ if with_cell_types:
+ shutil.rmtree(component_paths['templates_dir'])
+ shutil.copytree(os.path.join(scripts_path, 'hoc_templates'), component_paths['templates_dir'])
+
+ shutil.rmtree(component_paths['mechanisms_dir'])
+ shutil.copytree(os.path.join(scripts_path, 'mechanisms'), component_paths['mechanisms_dir'])
+
+ shutil.rmtree(component_paths['synaptic_models_dir'])
+ shutil.copytree(os.path.join(scripts_path, 'synaptic_models'), component_paths['synaptic_models_dir'])
+
+ shutil.rmtree(component_paths['point_models_dir'])
+ shutil.copytree(os.path.join(scripts_path, 'intfire'), component_paths['point_models_dir'])
+
+ if compile_mechanisms:
+ cwd = os.getcwd()
+ os.chdir(component_paths['mechanisms_dir'])
+ try:
+ print(os.getcwd())
+ call(['nrnivmodl', 'modfiles'])
+ except Exception as e:
+ print('Was unable to compile mechanism in {}'.format(component_paths['mechanisms_dir']))
+ # print e.message
+ os.chdir(cwd)
+
+ shutil.copy(os.path.join(scripts_path, 'run_bionet.py'), os.path.join(base_dir, 'run_bionet.py'))
+
+ if with_config:
+ config_json = json.load(open(os.path.join(scripts_path, 'default_config.json')))
+ config_json['manifest']['$BASE_DIR'] = os.path.abspath(base_dir)
+ config_json['manifest']['$COMPONENTS_DIR'] = os.path.join('${BASE_DIR}', 'components')
+ config_json['run']['tstop'] = run_time
+
+ if network_dir is not None:
+ config_json['manifest']['$NETWORK_DIR'] = os.path.abspath(network_dir)
+
+ net_nodes = {}
+ net_edges = {}
+ for f in os.listdir(network_dir):
+ if not os.path.isfile(os.path.join(network_dir, f)) or f.startswith('.'):
+ continue
+
+ if '_nodes' in f:
+ net_name = f[:f.find('_nodes')]
+ nodes_dict = net_nodes.get(net_name, {'name': net_name})
+ nodes_dict['nodes_file'] = os.path.join('${NETWORK_DIR}', f)
+ net_nodes[net_name] = nodes_dict
+
+ elif '_node_types' in f:
+ net_name = f[:f.find('_node_types')]
+ nodes_dict = net_nodes.get(net_name, {'name': net_name})
+ nodes_dict['node_types_file'] = os.path.join('${NETWORK_DIR}', f)
+ net_nodes[net_name] = nodes_dict
+
+ elif '_edges' in f:
+ net_name = f[:f.find('_edges')]
+ edges_dict = net_edges.get(net_name, {'name': net_name})
+ edges_dict['edges_file'] = os.path.join('${NETWORK_DIR}', f)
+ try:
+ edges_h5 = h5py.File(os.path.join(network_dir, f), 'r')
+ edges_dict['target'] = edges_h5['edges']['target_gid'].attrs['network']
+ edges_dict['source'] = edges_h5['edges']['source_gid'].attrs['network']
+ except Exception as e:
+ pass
+
+ net_edges[net_name] = edges_dict
+
+ elif '_edge_types' in f:
+ net_name = f[:f.find('_edge_types')]
+ edges_dict = net_edges.get(net_name, {'name': net_name})
+ edges_dict['edge_types_file'] = os.path.join('${NETWORK_DIR}', f)
+ net_edges[net_name] = edges_dict
+
+ else:
+ print('Unknown file {}. Will have to enter by hand'.format(f))
+
+ for _, sect in net_nodes.items():
+ config_json['networks']['nodes'].append(sect)
+
+ for _, sect in net_edges.items():
+ config_json['networks']['edges'].append(sect)
+
+ if reports is not None:
+ for report_name, report_params in reports.items():
+ config_json['reports'][report_name] = report_params
+
+ ordered_dict = OrderedDict(sorted(config_json.items(),
+ key=lambda s: config_order.index(s[0]) if s[0] in config_order else 100))
+ with open(os.path.join(base_dir, 'config.json'), 'w') as outfile:
+ json.dump(ordered_dict, outfile, indent=2)
+ #json.dump(config_json, outfile, indent=2)
+"""
+
+
+if __name__ == '__main__':
+ def str_list(option, opt, value, parser):
+ setattr(parser.values, option.dest, value.split(','))
+
+ #def int_list(option, opt, value, parser):
+ # setattr(parser.values, option.dest, [int(v) for v in value.split(',')])
+
+ def parse_node_set(option, opt, value, parser):
+ try:
+ setattr(parser.values, option.dest, [int(v) for v in value.split(',')])
+ except ValueError as ve:
+ setattr(parser.values, option.dest, value)
+
+
+ parser = OptionParser(usage="Usage: python -m bmtk.utils.sim_setup [options] bionet|pointnet|popnet|mintnet")
+ parser.add_option('-b', '--base_dir', dest='base_dir', default='.', help='path of environment')
+ parser.add_option('-n', '--network_dir', dest='network_dir', default=None,
+ help="Use an exsting directory with network files.")
+ parser.add_option('-r', '--tstop', type='float', dest='tstop', default=1000.0)
+ parser.add_option('-d', '--dt', type=float, dest='dt', help='simulation time step dt', default=0.001)
+
+ # For membrane report
+ def membrane_report_parser(option, opt, value, parser):
+ parser.values.has_membrane_report = True
+ if ',' in value:
+ try:
+ setattr(parser.values, option.dest, [int(v) for v in value.split(',')])
+ except ValueError as ve:
+ setattr(parser.values, option.dest, value.split(','))
+
+ else:
+ setattr(parser.values, option.dest, value)
+
+ parser.add_option('--membrane_report', dest='has_membrane_report', action='store_true', default=False)
+ parser.add_option('--membrane_report-vars', dest='mem_rep_vars', type='string', action='callback',
+ callback=membrane_report_parser, default=[])
+ parser.add_option('--membrane_report-cells', dest='mem_rep_cells', type='string', action='callback',
+ callback=membrane_report_parser, default='all')
+ # parser.add_option('--membrane_report_file', dest='mem_rep_file', type='string', action='callback',
+ # callback=membrane_report_parser, default='$OUTPUT_DIR/cell_vars.h5')
+ parser.add_option('--membrane_report-sections', dest='mem_rep_secs', type='string', action='callback',
+ callback=membrane_report_parser, default='all')
+
+ options, args = parser.parse_args()
+ reports = {}
+
+ if options.has_membrane_report:
+ reports['membrane_report'] = {
+ 'module': 'membrane_report',
+ 'variable_name': options.mem_rep_vars,
+ 'cells': options.mem_rep_cells,
+ # 'file_name': options.mem_rep_file,
+ 'sections': options.mem_rep_secs,
+ }
+
+ target_sim = args[0].lower() if len(args) == 1 else None
+ if target_sim not in ['bionet', 'popnet', 'pointnet', 'mintnet']:
+ raise Exception('Must specify one target simulator. options: "bionet", pointnet", "popnet" or "mintnet"')
+
+ if target_sim == 'bionet':
+ build_env_bionet(base_dir=options.base_dir, network_dir=options.network_dir, tstop=options.tstop,
+ dt=options.dt, reports=reports)
+
+ elif target_sim == 'pointnet':
+ build_env_pointnet(base_dir=options.base_dir, network_dir=options.network_dir, tstop=options.tstop,
+ dt=options.dt, reports=reports)
+
+ elif target_sim == 'popnet':
+ build_env_popnet(base_dir=options.base_dir, network_dir=options.network_dir, tstop=options.tstop,
+ dt=options.dt, reports=reports)
diff --git a/bmtk-vb/bmtk/utils/sonata/__init__.py b/bmtk-vb/bmtk/utils/sonata/__init__.py
new file mode 100644
index 0000000..c236de1
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/sonata/__init__.py
@@ -0,0 +1,25 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .edge import Edge, EdgeSet
+from .file import File
+from .node import Node, NodeSet
diff --git a/bmtk-vb/bmtk/utils/sonata/__init__.pyc b/bmtk-vb/bmtk/utils/sonata/__init__.pyc
new file mode 100644
index 0000000..c20189c
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__init__.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/__init__.cpython-36.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000..fb4c692
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/__init__.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/__init__.cpython-37.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000..0a45475
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/__init__.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/column_property.cpython-36.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/column_property.cpython-36.pyc
new file mode 100644
index 0000000..760998e
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/column_property.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/column_property.cpython-37.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/column_property.cpython-37.pyc
new file mode 100644
index 0000000..5e45b26
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/column_property.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/edge.cpython-36.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/edge.cpython-36.pyc
new file mode 100644
index 0000000..78cc958
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/edge.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/edge.cpython-37.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/edge.cpython-37.pyc
new file mode 100644
index 0000000..e0c5a34
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/edge.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/file.cpython-36.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/file.cpython-36.pyc
new file mode 100644
index 0000000..f765815
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/file.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/file.cpython-37.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/file.cpython-37.pyc
new file mode 100644
index 0000000..5d76eda
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/file.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/file_root.cpython-36.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/file_root.cpython-36.pyc
new file mode 100644
index 0000000..b2ce4a4
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/file_root.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/file_root.cpython-37.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/file_root.cpython-37.pyc
new file mode 100644
index 0000000..a0e6927
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/file_root.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/group.cpython-36.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/group.cpython-36.pyc
new file mode 100644
index 0000000..65c372b
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/group.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/group.cpython-37.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/group.cpython-37.pyc
new file mode 100644
index 0000000..3194b72
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/group.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/node.cpython-36.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/node.cpython-36.pyc
new file mode 100644
index 0000000..ec874dc
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/node.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/node.cpython-37.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/node.cpython-37.pyc
new file mode 100644
index 0000000..a9c5d38
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/node.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/population.cpython-36.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/population.cpython-36.pyc
new file mode 100644
index 0000000..412a1da
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/population.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/population.cpython-37.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/population.cpython-37.pyc
new file mode 100644
index 0000000..ec5b709
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/population.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/types_table.cpython-36.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/types_table.cpython-36.pyc
new file mode 100644
index 0000000..e9019eb
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/types_table.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/types_table.cpython-37.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/types_table.cpython-37.pyc
new file mode 100644
index 0000000..fd5cb3b
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/types_table.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/utils.cpython-36.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/utils.cpython-36.pyc
new file mode 100644
index 0000000..20a79b1
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/utils.cpython-36.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/__pycache__/utils.cpython-37.pyc b/bmtk-vb/bmtk/utils/sonata/__pycache__/utils.cpython-37.pyc
new file mode 100644
index 0000000..eefccc4
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/__pycache__/utils.cpython-37.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/column_property.py b/bmtk-vb/bmtk/utils/sonata/column_property.py
new file mode 100644
index 0000000..34eaa5a
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/sonata/column_property.py
@@ -0,0 +1,103 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import h5py
+import pandas as pd
+
+
+class ColumnProperty(object):
+ """Representation of a column name and metadata from a hdf5 dataset, csv column, etc.
+
+ """
+ def __init__(self, name, dtype, dimension, nrows=0, attrs=None):
+ self._name = name
+ self._dtype = dtype
+ self._dim = dimension
+ self._nrows = nrows
+ self._attrs = attrs or {}
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def dtype(self):
+ return self._dtype
+
+ @property
+ def dimension(self):
+ return self._dim
+
+ @property
+ def nrows(self):
+ return self._nrows
+
+ @property
+ def attributes(self):
+ return self._attrs
+
+ @classmethod
+ def from_h5(cls, hf_obj, name=None):
+ if isinstance(hf_obj, h5py.Dataset):
+ ds_name = name if name is not None else hf_obj.name.split('/')[-1]
+ ds_dtype = hf_obj.dtype
+
+ # If the dataset shape is in the form "(N, M)" then the dimension is M. If the shape is just "(N)" then the
+ # dimension is just 1
+ dim = 1 if len(hf_obj.shape) < 2 else hf_obj.shape[1]
+ nrows = hf_obj.shape[0]
+ return cls(ds_name, ds_dtype, dim, nrows, attrs=hf_obj.attrs)
+
+ elif isinstance(hf_obj, h5py.Group):
+ columns = []
+ for name, ds in hf_obj.items():
+ if isinstance(ds, h5py.Dataset):
+ columns.append(ColumnProperty.from_h5(ds, name))
+ return columns
+
+ else:
+ raise Exception('Unable to convert hdf5 object {} to a property or list of properties.'.format(hf_obj))
+
+ @classmethod
+ def from_csv(cls, pd_obj, name=None):
+ if isinstance(pd_obj, pd.Series):
+ c_name = name if name is not None else pd_obj.name
+ c_dtype = pd_obj.dtype
+ return cls(c_name, c_dtype, 1)
+
+ elif isinstance(pd_obj, pd.DataFrame):
+ return [cls(name, pd_obj[name].dtype, 1) for name in pd_obj.columns]
+
+ else:
+ raise Exception('Unable to convert pandas object {} to a property or list of properties.'.format(pd_obj))
+
+ def __hash__(self):
+ return hash(self._name)
+
+ def __repr__(self):
+ return '{}'.format(self.name, self.dtype)
+
+ def __eq__(self, other):
+ if isinstance(other, ColumnProperty):
+ return self._name == other._name
+ else:
+ return self._name == other
diff --git a/bmtk-vb/bmtk/utils/sonata/column_property.pyc b/bmtk-vb/bmtk/utils/sonata/column_property.pyc
new file mode 100644
index 0000000..8fd0806
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/column_property.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/config.py b/bmtk-vb/bmtk/utils/sonata/config.py
new file mode 100644
index 0000000..fdb97ab
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/sonata/config.py
@@ -0,0 +1,341 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+import re
+import copy
+import datetime
+
+
+class SonataConfig(dict):
+ def __init__(self, *args, **kwargs):
+ super(SonataConfig, self).__init__(*args, **kwargs)
+ self._env_built = False
+
+ @property
+ def run(self):
+ return self['run']
+
+ @property
+ def tstart(self):
+ return self.run.get('tstart', 0.0)
+
+ @property
+ def tstop(self):
+ return self.run['tstop']
+
+ @property
+ def dt(self):
+ return self.run.get('dt', 0.1)
+
+ @property
+ def block_step(self):
+ return self.run.get('nsteps_block', 5000)
+
+ @property
+ def conditions(self):
+ return self['conditions']
+
+ @property
+ def celsius(self):
+ return self.conditions['celsius']
+
+ @property
+ def v_init(self):
+ return self.conditions['v_init']
+
+ @property
+ def path(self):
+ return self['config_path']
+
+ @property
+ def output(self):
+ return self['output']
+
+ @property
+ def output_dir(self):
+ return self.output['output_dir']
+
+ @property
+ def overwrite_output(self):
+ return self.output['overwrite_output_dir']
+
+ @property
+ def log_file(self):
+ return self.output['log_file']
+
+ @property
+ def components(self):
+ return self.get('components', {})
+
+ @property
+ def morphologies_dir(self):
+ return self.components['morphologies_dir']
+
+ @property
+ def synaptic_models_dir(self):
+ return self.components['synaptic_models_dir']
+
+ @property
+ def point_neuron_models_dir(self):
+ return self.components['point_neuron_models_dir']
+
+ @property
+ def mechanisms_dir(self):
+ return self.components['mechanisms_dir']
+
+ @property
+ def biophysical_neuron_models_dir(self):
+ return self.components['biophysical_neuron_models_dir']
+
+ @property
+ def templates_dir(self):
+ return self.components.get('templates_dir', None)
+
+ @property
+ def with_networks(self):
+ return 'networks' in self and len(self.nodes) > 0
+
+ @property
+ def networks(self):
+ return self['networks']
+
+ @property
+ def nodes(self):
+ return self.networks.get('nodes', [])
+
+ @property
+ def edges(self):
+ return self.networks.get('edges', [])
+
+ def copy_to_output(self):
+ copy_config(self)
+
+ @staticmethod
+ def get_validator():
+ raise NotImplementedError
+
+ @classmethod
+ def from_json(cls, config_file, validate=False):
+ validator = cls.get_validator() if validate else None
+ return cls(from_json(config_file, validator))
+
+ @classmethod
+ def from_dict(cls, config_dict, validate=False):
+ validator = cls.get_validator() if validate else None
+ return cls(from_dict(config_dict, validator))
+
+ @classmethod
+ def from_yaml(cls, config_file, validate=False):
+ raise NotImplementedError
+
+ @property
+ def reports(self):
+ return self.get('reports', {})
+
+ @property
+ def inputs(self):
+ return self.get('inputs', {})
+
+ def get_modules(self, module_name):
+ return [report for report in self.reports.values() if report['module'] == module_name]
+
+ def build_env(self):
+ if self._env_built:
+ return
+
+ self.create_output_dir()
+ self.copy_to_output()
+ self._env_built = True
+
+
+def from_json(config_file, validator=None):
+ """Builds and validates a configuration json file.
+
+ :param config_file: File object or path to a json file.
+ :param validator: A SimConfigValidator object to validate json file. Won't validate if set to None
+ :return: A dictionary, verified against json validator and with manifest variables resolved.
+ """
+ if isinstance(config_file, file):
+ conf = json.load(config_file)
+ elif isinstance(config_file, basestring):
+ conf = json.load(open(config_file, 'r'))
+ else:
+ raise Exception('{} is not a file or file path.'.format(config_file))
+
+ # insert file path into dictionary
+ if 'config_path' not in conf:
+ conf['config_path'] = os.path.abspath(config_file)
+
+ # Will resolve manifest variables and validate
+ return from_dict(conf, validator)
+
+
+def from_dict(config_dict, validator=None):
+ """Builds and validates a configuration json dictionary object. Best to directly use from_json when possible.
+
+ :param config_dict: Dictionary object
+ :param validator: A SimConfigValidator object to validate json file. Won't validate if set to None
+ :return: A dictionary, verified against json validator and with manifest variables resolved.
+ """
+ assert(isinstance(config_dict, dict))
+ conf = copy.deepcopy(config_dict) # Since the functions will mutate the dictionary we will copy just-in-case.
+
+ if 'config_path' not in conf:
+ conf['config_path'] = os.path.abspath(__file__)
+
+ # Build the manifest and resolve variables.
+ # TODO: Check that manifest exists
+ manifest = __build_manifest(conf)
+ conf['manifest'] = manifest
+ __recursive_insert(conf, manifest)
+
+ # In our work with Blue-Brain it was agreed that 'network' and 'simulator' parts of config may be split up into
+ # separate files. If this is the case we build each sub-file separately and merge into this one
+ for childconfig in ['network', 'simulation']:
+ if childconfig in conf and isinstance(conf[childconfig], basestring):
+ # Try to resolve the path of the network/simulation config files. If an absolute path isn't used find
+ # the file relative to the current config file. TODO: test if this will work on windows?
+ conf_str = conf[childconfig]
+ conf_path = conf_str if conf_str.startswith('/') else os.path.join(conf['config_path'], conf_str)
+
+ # Build individual json file and merge into parent.
+ child_json = from_json(conf_path)
+ del child_json['config_path'] # we don't want 'config_path' of parent being overwritten.
+ conf.update(child_json)
+
+ # Run the validator
+ if validator is not None:
+ validator.validate(conf)
+
+ return conf
+
+
+def copy_config(conf):
+ """Copy configuration file to different directory, with manifest variables resolved.
+
+ :param conf: configuration dictionary
+ """
+ output_dir = conf["output"]["output_dir"]
+ config_name = os.path.basename(conf['config_path'])
+ output_path = os.path.join(output_dir, config_name)
+ with open(output_path, 'w') as fp:
+ json.dump(conf, fp, indent=2)
+
+
+def __special_variables(conf):
+ """A list of preloaded variables to insert into the manifest, containing things like path to run-time directory,
+ configuration directory, etc.
+ """
+ pre_manifest = dict()
+ pre_manifest['$workingdir'] = os.path.dirname(os.getcwd())
+ if 'config_path' in conf:
+ pre_manifest['$configdir'] = os.path.dirname(conf['config_path']) # path of configuration file
+ pre_manifest['$configfname'] = conf['config_path']
+
+ dt_now = datetime.datetime.now()
+ pre_manifest['$time'] = dt_now.strftime('%H-%M-%S')
+ pre_manifest['$date'] = dt_now.strftime('%Y-%m-%d')
+ pre_manifest['$datetime'] = dt_now.strftime('%Y-%m-%d_%H-%M-%S')
+
+ return pre_manifest
+
+
+def __build_manifest(conf):
+ """Resolves the manifest section and resolve any internal variables"""
+ if 'manifest' not in conf:
+ return __special_variables(conf)
+
+ manifest = conf["manifest"]
+ resolved_manifest = __special_variables(conf)
+ resolved_keys = set()
+ unresolved_keys = set(manifest.keys())
+
+ # No longer using recursion since that can lead to an infinite loop if the person who writes the config file isn't
+ # careful. Also added code to allow for ${VAR} format in-case user wants to user "$.../some_${MODEl}_here/..."
+ while unresolved_keys:
+ for key in unresolved_keys:
+ # Find all variables in manifest and see if they can be replaced by the value in resolved_manifest
+ value = __find_variables(manifest[key], resolved_manifest)
+
+ # If value no longer has variables, and key-value pair to resolved_manifest and remove from unresolved-keys
+ if value.find('$') < 0:
+ resolved_manifest[key] = value
+ resolved_keys.add(key)
+
+ # remove resolved key-value pairs from set, and make sure at every iteration unresolved_keys shrinks to prevent
+ # infinite loops
+ n_unresolved = len(unresolved_keys)
+ unresolved_keys -= resolved_keys
+ if n_unresolved == len(unresolved_keys):
+ msg = "Unable to resolve manifest variables: {}".format(unresolved_keys)
+ raise Exception(msg)
+
+ return resolved_manifest
+
+
+def __recursive_insert(json_obj, manifest):
+ """Loop through the config and substitute the path variables (e.g.: $MY_DIR) with the values from the manifest
+
+ :param json_obj: A json dictionary object that may contain variables needing to be resolved.
+ :param manifest: A dictionary of variable values
+ :return: A new json dictionar config file with variables resolved
+ """
+ if isinstance(json_obj, basestring):
+ return __find_variables(json_obj, manifest)
+
+ elif isinstance(json_obj, list):
+ new_list = []
+ for itm in json_obj:
+ new_list.append(__recursive_insert(itm, manifest))
+ return new_list
+
+ elif isinstance(json_obj, dict):
+ for key, val in json_obj.items():
+ if key == 'manifest':
+ continue
+ json_obj[key] = __recursive_insert(val, manifest)
+
+ return json_obj
+
+ else:
+ return json_obj
+
+
+def __find_variables(json_str, manifest):
+ """Replaces variables (i.e. $VAR, ${VAR}) with their values from the manifest.
+
+ :param json_str: a json string that may contain none, one or multiple variable
+ :param manifest: dictionary of variable lookup values
+ :return: json_str with resolved variables. Won't resolve variables that don't exist in manifest.
+ """
+ variables = [m for m in re.finditer('\$\{?[\w]+\}?', json_str)]
+ for var in variables:
+ var_lookup = var.group()
+ if var_lookup.startswith('${') and var_lookup.endswith('}'):
+ # replace ${VAR} with $VAR
+ var_lookup = "$" + var_lookup[2:-1]
+ if var_lookup in manifest:
+ json_str = json_str.replace(var.group(), manifest[var_lookup])
+
+ return json_str
diff --git a/bmtk-vb/bmtk/utils/sonata/edge.py b/bmtk-vb/bmtk/utils/sonata/edge.py
new file mode 100644
index 0000000..435dd02
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/sonata/edge.py
@@ -0,0 +1,90 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+class EdgeSet(object):
+ def __init__(self, edge_ids, population):
+ self._edge_ids = edge_ids
+ self._population = population
+ self._n_edges = len(self._edge_ids)
+ self.__itr = 0
+
+ def __iter__(self):
+ self.__itr = 0
+ return self
+
+ def next(self):
+ if self.__itr >= self._n_edges:
+ raise StopIteration
+
+ next_edge = self._population.iloc(self._edge_ids[self.__itr])
+ self.__itr += 1
+ return next_edge
+
+
+class Edge(object):
+ def __init__(self, src_node_id, trg_node_id, source_pop, target_pop, group_id, group_props, edge_types_props):
+ self._src_node_id = src_node_id
+ self._trg_node_id = trg_node_id
+ self._source_population = source_pop
+ self._target_population = target_pop
+ self._group_props = group_props
+ self._group_id = group_id
+ self._edge_type_props = edge_types_props
+
+ @property
+ def source_node_id(self):
+ return self._src_node_id
+
+ @property
+ def target_node_id(self):
+ return self._trg_node_id
+
+ @property
+ def source_population(self):
+ return self._source_population
+
+ @property
+ def target_population(self):
+ return self._target_population
+
+ @property
+ def group_id(self):
+ return self._group_id
+
+ @property
+ def edge_type_id(self):
+ return self._edge_type_props['edge_type_id']
+
+ @property
+ def dynamics_params(self):
+ raise NotImplementedError
+
+ def __getitem__(self, prop_key):
+ if prop_key in self._group_props:
+ return self._group_props[prop_key]
+ elif prop_key in self._edge_type_props:
+ return self._edge_type_props[prop_key]
+ else:
+ raise KeyError('Property {} not found in edge.'.format(prop_key))
+
+ def __contains__(self, prop_key):
+ return prop_key in self._group_props or prop_key in self._edge_type_props
\ No newline at end of file
diff --git a/bmtk-vb/bmtk/utils/sonata/edge.pyc b/bmtk-vb/bmtk/utils/sonata/edge.pyc
new file mode 100644
index 0000000..6812aa0
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/edge.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/file.py b/bmtk-vb/bmtk/utils/sonata/file.py
new file mode 100644
index 0000000..d70f66a
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/sonata/file.py
@@ -0,0 +1,124 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from . import utils
+from .file_root import NodesRoot, EdgesRoot
+
+
+class File(object):
+ def __init__(self, data_files, data_type_files, mode='r', gid_table=None, require_magic=True):
+ if mode != 'r':
+ raise Exception('Currently only read mode is supported.')
+
+ self._data_files = utils.listify(data_files)
+ self._data_type_files = utils.listify(data_type_files)
+
+ # Open and check HDF5 file(s)
+ self._h5_file_handles = [utils.load_h5(f, mode) for f in self._data_files]
+ if require_magic:
+ map(utils.check_magic, self._h5_file_handles) # Check magic attribute in h5 files
+
+ # Check version number
+ avail_versions = set(map(utils.get_version, self._h5_file_handles))
+ if len(avail_versions) == 1:
+ self._version = list(avail_versions)[0]
+ elif len(avail_versions) > 1:
+ # TODO: log as warning
+ print('Warning: Passing in multiple hdf5 files of different version')
+ self._version = ','.join(avail_versions)
+ else:
+ self._version = utils.VERSION_NA
+
+ self._csv_file_handles = [(f, utils.load_csv(f)) for f in self._data_type_files]
+
+ self._has_nodes = False
+ self._nodes = None # /nodes object
+ self._nodes_groups = [] # list of all hdf5 /nodes group
+ self._node_types_dataframes = [] # list of all csv node-types dataframe
+
+ self._has_edges = False
+ self._edges = None # /edges object
+ self._edges_groups = [] # list of all hdf5 /edges group
+ self._edge_types_dataframes = [] # list of csv edge-types dataframes
+
+ # for multiple inputs sort into edge files and node files
+ self._sort_types_file()
+ self._sort_h5_files()
+
+ if not (self._has_nodes or self._has_edges):
+ raise Exception('Could not find neither nodes nor edges for the given file(s).')
+
+ if self._has_nodes:
+ self._nodes = NodesRoot(nodes=self._nodes_groups, node_types=self._node_types_dataframes, gid_table=gid_table)
+
+ if self._has_edges:
+ self._edges = EdgesRoot(edges=self._edges_groups, edge_types=self._edge_types_dataframes)
+
+ @property
+ def nodes(self):
+ return self._nodes
+
+ @property
+ def has_nodes(self):
+ return self._has_nodes
+
+ @property
+ def edges(self):
+ return self._edges
+
+ @property
+ def has_edges(self):
+ return self._has_edges
+
+ @property
+ def version(self):
+ return self._version
+
+ def _sort_types_file(self):
+ # TODO: node/edge type_id columnn names should not be hardcoded
+ for filename, df in self._csv_file_handles:
+ has_node_type_id = 'node_type_id' in df.columns
+ has_edge_type_id = 'edge_type_id' in df.columns
+ if has_node_type_id and has_edge_type_id:
+ # TODO: users may be creating their own dataframe and thus not have a filename
+ raise Exception('types file {} has both node_types_id and edge_types_id column.'.format(filename))
+ elif has_node_type_id:
+ self._node_types_dataframes.append(df)
+ elif has_edge_type_id:
+ self._edge_types_dataframes.append(df)
+ else:
+ # TODO: if strict this should fail immedietely
+ print('Warning: Could not determine if file {} was an edge-types or node-types file. Ignoring'.format(filename))
+
+ def _sort_h5_files(self):
+ for h5 in self._h5_file_handles:
+ has_nodes = '/nodes' in h5
+ has_edges = '/edges' in h5
+ if not (has_nodes or has_edges):
+ print('File {} contains neither nodes nor edges. Ignoring'.format(h5.filename))
+ else:
+ if has_nodes:
+ self._nodes_groups.append(h5)
+ self._has_nodes = True
+ if has_edges:
+ self._edges_groups.append(h5)
+ self._has_edges = True
diff --git a/bmtk-vb/bmtk/utils/sonata/file.pyc b/bmtk-vb/bmtk/utils/sonata/file.pyc
new file mode 100644
index 0000000..a118f7b
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/file.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/file_root.py b/bmtk-vb/bmtk/utils/sonata/file_root.py
new file mode 100644
index 0000000..071e88c
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/sonata/file_root.py
@@ -0,0 +1,301 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import sys
+
+import h5py
+import pandas as pd
+import numpy as np
+
+from . import utils
+from .population import NodePopulation, EdgePopulation
+from .types_table import NodeTypesTable, EdgeTypesTable
+
+
+class FileRoot(object):
+ """Base class for both /nodes and /edges root group in h5 file"""
+ def __init__(self, root_name, h5_files, h5_mode, csv_files):
+ """
+ :param root_name: should either be 'nodes' or 'edges'
+ :param h5_files: file (or list of files) containing nodes/edges
+ :param h5_mode: currently only supporting 'r' mode in h5py
+ :param csv_files: file (or list of files) containing node/edge types
+ """
+ self._root_name = root_name
+ self._h5_handles = [utils.load_h5(f, h5_mode) for f in utils.listify(h5_files)]
+ self._csv_handles = [(f, utils.load_csv(f)) for f in utils.listify(csv_files)]
+
+ # merge and create a table of the types table(s)
+ self._types_table = None
+ self._build_types_table()
+
+ # population_name->h5py.Group table (won't instantiate the population)
+ self._populations_groups = {}
+ self._store_groups()
+
+ # A map between population_name -> Population object. Population objects aren't created until called, in the
+ # case user wants to split populations among MPI nodes (instantiation will create node/edge indicies and other
+ # overhead).
+ self._populations_cache = {}
+
+ self.check_format()
+
+ @property
+ def root_name(self):
+ return self._root_name
+
+ @property
+ def population_names(self):
+ return list(self._populations_groups.keys())
+
+ @property
+ def populations(self):
+ return [self[name] for name in self.population_names]
+
+ @property
+ def types_table(self):
+ return self._types_table
+
+ @types_table.setter
+ def types_table(self, types_table):
+ self._types_table = types_table
+
+ def _build_types_table(self):
+ raise NotImplementedError
+
+ def _store_groups(self):
+ """Create a map between group population to their h5py.Group handle"""
+ for h5handle in self._h5_handles:
+ assert(self.root_name in h5handle.keys())
+ for pop_name, pop_group in h5handle[self._root_name].items():
+ if pop_name in self._populations_groups:
+ raise Exception('Multiple {} populations with name {}.'.format(self._root_name, pop_name))
+ self._populations_groups[pop_name] = pop_group
+
+ def _build_population(self, pop_name, pop_group):
+ raise NotImplementedError
+
+ def get_population(self, population_name, default=None):
+ """Return a population group object based on population's name"""
+ if population_name in self:
+ return self[population_name]
+ else:
+ # need this for EdgeRoot.get_populations
+ return default
+
+ def check_format(self):
+ if len(self._h5_handles) == 0:
+ raise Exception('No {} hdf5 files specified.'.format(self.root_name))
+
+ if len(self._csv_handles) == 0:
+ raise Exception('No {} types csv files specified.'.format(self.root_name))
+
+ def __contains__(self, population_name):
+ # TODO: Add condition if user passes in io.Population object
+ return population_name in self.population_names
+
+ def __getitem__(self, population_name):
+ if population_name not in self:
+ raise Exception('{} does not contain a population with name {}.'.format(self.root_name, population_name))
+
+ if population_name in self._populations_cache:
+ return self._populations_cache[population_name]
+ else:
+ h5_grp = self._populations_groups[population_name]
+ pop_obj = self._build_population(population_name, h5_grp)
+ self._populations_cache[population_name] = pop_obj
+ return pop_obj
+
+
+class NodesRoot(FileRoot):
+ def __init__(self, nodes, node_types, mode='r', gid_table=None):
+ super(NodesRoot, self).__init__('nodes', h5_files=nodes, h5_mode=mode, csv_files=node_types)
+
+ # load the gid <--> (node_id, population) map if specified.
+ self._gid_table = gid_table
+ self._gid_table_groupby = {}
+ self._has_gids = False
+ # TODO: Should we allow gid-table to be built into '/nodes' h5 groups, or must it always be a separat file?
+ if gid_table is not None:
+ self.set_gid_table(gid_table)
+
+ @property
+ def has_gids(self):
+ return self._has_gids
+
+ @property
+ def node_types_table(self):
+ return self.types_table
+
+ def set_gid_table(self, gid_table, force=False):
+ """Adds a map from a gids <--> (node_id, population) based on specification.
+
+ :param gid_table: An h5 file/group containing map specifications
+ :param force: Set to true to have it overwrite any exsiting gid table (default False)
+ """
+ assert(gid_table is not None)
+ if self.has_gids and not force:
+ raise Exception('gid table already exists (use force=True to overwrite)')
+
+ self._gid_table = utils.load_h5(gid_table, 'r')
+ # TODO: validate that the correct columns/dtypes exists.
+ gid_df = pd.DataFrame()
+ gid_df['gid'] = pd.Series(data=self._gid_table['gid'], dtype=self._gid_table['gid'].dtype)
+ gid_df['node_id'] = pd.Series(data=self._gid_table['node_id'], dtype=self._gid_table['node_id'].dtype)
+ gid_df['population'] = pd.Series(data=self._gid_table['population'])
+ population_names_ds = self._gid_table['population_names']
+ for pop_id, subset in gid_df.groupby(by='population'):
+ pop_name = population_names_ds[pop_id]
+ self._gid_table_groupby[pop_name] = subset
+ self._has_gids = True
+
+ def generate_gids(self, file_name, gids=None, force=False):
+ """Creates a gid <--> (node_id, population) table based on sonnet specifications.
+
+ Generating gids will take some time and so not recommend to call this during the simulation. Instead save
+ the file to the disk and pass in h5 file during the simulation (using gid_table parameter). In fact if you're
+ worried about efficeny don't use this method.
+
+ :param file_name: Name of h5 file to save gid map to.
+ :param gids: rule/list of gids to use
+ :param force: set to true to overwrite existing gid map (default False).
+ """
+
+ # TODO: This is very inefficent, fix (although not a priority as this function should be called sparingly)
+ # TODO: Allow users to pass in a list/function to determine gids
+ # TODO: We should use an enumerated lookup table for population ds instead of storing strings
+ # TODO: Move this to a utils function rather than a File
+ if self.has_gids and not force:
+ raise Exception('Nodes already have a gid table. Use force=True to overwrite existing gids.')
+
+ dir_name = os.path.dirname(os.path.abspath(file_name))
+ if not os.path.exists(dir_name):
+ os.makedirs(dir_name)
+
+ with h5py.File(file_name, 'w') as h5:
+ # TODO: should we use mode 'x', or give an option to overwrite existing files
+ n_nodes = 0
+ ascii_len = 0 # store max population name for h5 fixed length strings
+ # Find population names and the total size of every population
+ for node_pop in self.populations:
+ n_nodes += len(node_pop)
+ name_nchars = len(node_pop.name)
+ ascii_len = ascii_len if ascii_len >= name_nchars else name_nchars
+
+ # node_id and gid datasets should just be unsigned integers
+ h5.create_dataset(name='gid', shape=(n_nodes,), dtype=np.uint64)
+ h5.create_dataset(name='node_id', shape=(n_nodes,), dtype=np.uint64)
+ # TODO: determine population precisions from num of populations
+ h5.create_dataset(name='population', shape=(n_nodes,), dtype=np.uint16)
+
+ # Create a lookup table for pop-name
+ pop_name_list = [pname for pname in self.population_names]
+ if utils.using_py3:
+ dt = h5py.special_dtype(vlen=str) # python 3
+ else:
+ dt = h5py.special_dtype(vlen=unicode) # python 2
+ h5.create_dataset(name='population_names', shape=(len(pop_name_list),), dtype=dt)
+ # No clue why but just passing in the data during create_dataset doesn't work h5py
+ for i, n in enumerate(pop_name_list):
+ h5['population_names'][i] = n
+
+ # write each (gid, node_id, population)
+ indx = 0
+ for node_pop in self.populations:
+ # TODO: Block write if special gid generator isn't being used
+ # TODO: Block write populations at least
+ pop_name = node_pop.name # encode('ascii', 'ignore')
+ pop_id = pop_name_list.index(pop_name)
+ for node in node_pop:
+ h5['node_id'][indx] = node.node_id
+ h5['population'][indx] = pop_id
+ h5['gid'][indx] = indx
+ indx += 1
+
+ # pass gid table to current nodes
+ self.set_gid_table(h5)
+
+ def _build_types_table(self):
+ self.types_table = NodeTypesTable()
+ for _, csvhandle in self._csv_handles:
+ self.types_table.add_table(csvhandle)
+
+ def _build_population(self, pop_name, pop_group):
+ return NodePopulation(pop_name, pop_group, self.node_types_table)
+
+ def __getitem__(self, population_name):
+ # If their is a gids map then we must pass it into the population
+ pop_obj = super(NodesRoot, self).__getitem__(population_name)
+ if self.has_gids and (not pop_obj.has_gids) and (population_name in self._gid_table_groupby):
+ pop_obj.add_gids(self._gid_table_groupby[population_name])
+
+ return pop_obj
+
+
+class EdgesRoot(FileRoot):
+ def __init__(self, edges, edge_types, mode='r'):
+ super(EdgesRoot, self).__init__(root_name='edges', h5_files=edges, h5_mode=mode, csv_files=edge_types)
+
+
+ @property
+ def edge_types_table(self):
+ return self.types_table
+
+ def get_populations(self, name=None, source=None, target=None):
+ """Find all populations with matching criteria, either using the population name (which will return a list
+ of size 0 or 1) or based on the source/target population.
+
+ To return a list of all populations just use populations() method
+
+ :param name: (str) name of population
+ :param source: (str or NodePopulation) returns edges with nodes coming from matching source-population
+ :param target: (str or NodePopulation) returns edges with nodes coming from matching target-population
+ :return: A (potential empty) list of EdgePopulation objects filter by criteria.
+ """
+ assert((name is not None) ^ (source is not None or target is not None))
+ if name is not None:
+ return [self[name]]
+
+ else:
+ # TODO: make sure groups aren't built unless they are a part of the results
+ selected_pops = self.population_names
+ if source is not None:
+ # filter out only edges with given source population
+ source = source.name if isinstance(source, NodePopulation) else source
+ selected_pops = [name for name in selected_pops
+ if EdgePopulation.get_source_population(self._populations_groups[name]) == source]
+ if target is not None:
+ # filter out by target population
+ target = target.name if isinstance(target, NodePopulation) else target
+ selected_pops = [name for name in selected_pops
+ if EdgePopulation.get_target_population(self._populations_groups[name]) == target]
+
+ return [self[name] for name in selected_pops]
+
+ def _build_types_table(self):
+ self.types_table = EdgeTypesTable()
+ for _, csvhandle in self._csv_handles:
+ self.edge_types_table.add_table(csvhandle)
+
+ def _build_population(self, pop_name, pop_group):
+ return EdgePopulation(pop_name, pop_group, self.edge_types_table)
diff --git a/bmtk-vb/bmtk/utils/sonata/file_root.pyc b/bmtk-vb/bmtk/utils/sonata/file_root.pyc
new file mode 100644
index 0000000..ef37054
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/file_root.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/group.py b/bmtk-vb/bmtk/utils/sonata/group.py
new file mode 100644
index 0000000..4264d45
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/sonata/group.py
@@ -0,0 +1,416 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import pandas as pd
+
+from .column_property import ColumnProperty
+from .node import Node, NodeSet
+from .edge import Edge, EdgeSet
+
+
+class Group(object):
+ """A container containig a node/edge population groups.
+
+ A node or edge population will have one or more groups, each having a unique identifier. Each group shared the same
+ columns and datatypes, thus each group is essentially a different model.
+ """
+
+ def __init__(self, group_id, h5_group, parent):
+ self._group_id = int(group_id)
+ self._parent = parent
+ self._types_table = parent.types_table
+ self._h5_group = h5_group
+ self._types_index_col = self._types_table.index_column_name
+
+ self._group_columns = ColumnProperty.from_h5(h5_group)
+ # TODO: combine group_columns, group_column_names and group_columns_map, doesn't need to be 3 structures
+ self._group_column_map = {col.name: col for col in self._group_columns}
+ self._group_column_names = set(col.name for col in self._group_columns)
+ self._group_table = {prop: h5_group[prop.name] for prop in self._group_columns}
+ self._ncolumns = len(self._group_columns)
+
+ self._all_columns = self._group_columns + self._types_table.columns
+ self._all_column_names = set(col.name for col in self._all_columns)
+
+ self._nrows = 0 # number of group members
+
+ # For storing dynamics_params subgroup (if it exists)
+ self._has_dynamics_params = 'dynamics_params' in self._h5_group and len(self._h5_group['dynamics_params']) > 0
+ self._dynamics_params_columns = []
+
+ # An index of all the rows in parent population that map onto a member of this group
+ self._parent_indicies = None # A list of parent rows indicies
+ self._parent_indicies_built = False
+
+ self.check_format()
+
+ @property
+ def group_id(self):
+ return self._group_id
+
+ @property
+ def has_dynamics_params(self):
+ return False
+
+ @property
+ def columns(self):
+ return self._group_columns
+
+ @property
+ def group_columns(self):
+ return self._group_columns
+
+ @property
+ def all_columns(self):
+ return self._all_columns
+
+ @property
+ def has_gids(self):
+ return self._parent.has_gids
+
+ @property
+ def parent(self):
+ return self._parent
+
+ def get_dataset(self, column_name):
+ return self._group_table[column_name]
+
+ def column(self, column_name, group_only=False):
+ if column_name in self._group_column_map:
+ return self._group_column_map[column_name]
+ elif not group_only and column_name in self._types_table.columns:
+ return self._types_table.column(column_name)
+ else:
+ return KeyError
+
+ def check_format(self):
+ # Check that all the properties have the same number of rows
+ col_counts = [col.nrows for col in self._group_columns + self._dynamics_params_columns]
+ if len(set(col_counts)) > 1:
+ # TODO: Would be nice to warn user which dataset have different size
+ raise Exception('properties in {}/{} have different ranks'.format(self._parent.name, self._group_id))
+ elif len(set(col_counts)) == 1:
+ self._nrows = col_counts[0]
+
+ def build_indicies(self, force=False):
+ raise NotImplementedError
+
+ def to_dataframe(self):
+ raise NotImplementedError
+
+ def get_values(self, property_name, all_rows=False):
+ """Returns all values for a group property.
+
+ Note that a row within a group may not have a corresponding node/edge, or they may have a different order or
+ multiple node/edges may share the same group row. Setting all_rows=False will return all the values as you
+ see if you iterated through all the population's items. Setting all_rows=True just returns the data as a
+ list as they appear in the dataset (will be faster).
+
+ :param property_name: Name of dataset property/column to fetch.
+ :param all_rows: Set false to return order in which they appear in population, false to return entire dataset
+ :return: A list of values for the given column name.
+ """
+ raise NotImplementedError
+
+ def __len__(self):
+ return self._nrows
+
+ def __getitem__(self, group_index):
+ group_props = {}
+ for cname, h5_obj in self._group_table.items():
+ group_props[cname] = h5_obj[group_index]
+ return group_props
+
+ def __contains__(self, prop_name):
+ """Search that a column name exists in this group"""
+ return prop_name in self._group_column_names
+
+
+class NodeGroup(Group):
+ def __init__(self, group_id, h5_group, parent):
+ super(NodeGroup, self).__init__(group_id, h5_group, parent)
+ # Note: Don't call build_indicies right away so uses can call __getitem__ without having to load all the
+ # node_ids
+
+ @property
+ def node_ids(self):
+ self.build_indicies()
+ # print self._parent_indicies
+ return self._parent.inode_ids(self._parent_indicies)
+
+ @property
+ def node_type_ids(self):
+ self.build_indicies()
+ return self._parent.inode_type_ids(self._parent_indicies)
+
+ @property
+ def gids(self):
+ self.build_indicies()
+ return self._parent.igids(self._parent_indicies)
+
+ def build_indicies(self, force=False):
+ if self._parent_indicies_built and not force:
+ return
+
+ # TODO: Check for the special case where there is only one group
+ # TODO: If memory becomes an issue on very larget nodes (10's of millions) consider using a generator
+ # I've pushed the actual building of the population->group indicies onto the parent population
+ self._parent_indicies = self._parent.group_indicies(self.group_id, build_cache=True)
+ self._parent_indicies_built = True
+
+ def get_values(self, property_name, filtered_indicies=True):
+ self.build_indicies()
+ # TODO: Check if property_name is node_id, node_type, or gid
+
+ if property_name in self._group_columns:
+ if not filtered_indicies:
+ # Just return all values in dataset
+ return np.array(self._group_table[property_name])
+ else:
+ # Return only those values for group indicies with associated nodes
+ grp_indicies = self._parent.igroup_indicies(self._parent_indicies)
+ # It is possible that the group_index is unorderd or contains duplicates which will cause h5py slicing
+ # to fail. Thus convert to a numpy array
+ # TODO: loading the entire table is not good if the filtered nodes is small, consider building.
+ tmp_array = np.array(self._group_table[property_name])
+ return tmp_array[grp_indicies]
+
+ elif property_name in self._parent.node_types_table.columns:
+ # For properties that come from node-types table we need to build the results from scratch
+ # TODO: Need to performance test, I think this code could be optimized.
+ node_types_table = self._parent.node_types_table
+ nt_col = node_types_table.column(property_name)
+ tmp_array = np.empty(shape=len(self._parent_indicies), dtype=nt_col.dtype)
+ for i, ntid in enumerate(self.node_type_ids):
+ tmp_array[i] = node_types_table[ntid][property_name]
+
+ return tmp_array
+
+ def to_dataframe(self):
+ self.build_indicies()
+
+ # Build a dataframe of group properties
+ # TODO: Include dynamics_params?
+ properties_df = pd.DataFrame()
+ for col in self._group_columns:
+ if col.dimension > 1:
+ for i in range(col.dimension):
+ # TODO: see if column name exists in the attributes
+ col_name = '{}.{}'.format(col.name, i)
+ properties_df[col_name] = pd.Series(self._h5_group[col.name][:, i])
+ else:
+ properties_df[col.name] = pd.Series(self._h5_group[col.name])
+
+ # Build a dataframe of parent node (node_id, gid, node_types, etc)
+ root_df = pd.DataFrame()
+ root_df['node_type_id'] = pd.Series(self.node_type_ids)
+ root_df['node_id'] = pd.Series(self.node_ids)
+ root_df['node_group_index'] = pd.Series(self._parent.igroup_indicies(self._parent_indicies)) # used as pivot
+ if self._parent.has_gids:
+ root_df['gid'] = self.gids
+
+ # merge group props df with parent df
+ results_df = root_df.merge(properties_df, how='left', left_on='node_group_index', right_index=True)
+ results_df = results_df.drop('node_group_index', axis=1)
+
+ # Build node_types dataframe and merge
+ node_types_df = self._parent.node_types_table.to_dataframe()
+ # remove properties that exist in the group
+ node_types_cols = [c.name for c in self._parent.node_types_table.columns if c not in self._group_columns]
+ node_types_df = node_types_df[node_types_cols]
+
+ # TODO: consider caching these results
+ return results_df.merge(node_types_df, how='left', left_on='node_type_id', right_index=True)
+
+ def filter(self, **filter_props):
+ """Filter all nodes in the group by key=value pairs.
+
+ The filter specifications may apply to either node_type or group column properties. Currently at the moment
+ it only supports equivlency. An intersection (and operator) is done for every different filter pair. This will
+ produce a generator of all nodes matching the the filters.
+
+ for node in filter(pop_name='VIp', depth=10.0):
+ assert(node['pop_name'] == 'VIp' and node['depth'] == 10.0)
+
+ :param filter_props: keys and their values to filter nodes on.
+ :return: A generator that produces all valid nodes within the group with matching key==value pairs.
+ """
+ # TODO: Integrate this with NodeSet.
+ self.build_indicies()
+ node_types_table = self._parent.node_types_table
+ node_type_filter = set(node_types_table.node_type_ids) # list of valid node_type_ids
+ type_filter = False
+ group_prop_filter = {} # list of 'prop_name'==prov_val for group datasets
+ group_filter = False
+
+ # Build key==value lists
+ for filter_key, filter_val in filter_props.items():
+ # TODO: Check if node_type_id is an input
+ if filter_key in self._group_columns:
+ # keep of list of group_popertiess to filter
+ group_prop_filter[filter_key] = filter_val
+ group_filter = True
+
+ elif filter_key in node_types_table.columns:
+ # for node_types we just keep a list of all node_type_ids with matching key==value pairs
+ node_type_filter &= set(node_types_table.find(filter_key, filter_val))
+ type_filter = True
+
+ else:
+ # TODO: should we raise an exception?
+ # TODO: User logger
+ print('Could not find property {} in either group or types table. Ignoring.'.format(filter_key))
+
+ # iterate through all nodes, skipping ones that don't have matching key==value pairs
+ for indx in self._parent_indicies:
+ # TODO: Don't build the node until you filter out node_type_id
+ node = self._parent.get_row(indx)
+ if type_filter and node.node_type_id not in node_type_filter:
+ # confirm node_type_id is a correct one
+ continue
+
+ if group_filter:
+ # Filter by group property values
+ # TODO: Allow group properties to handle lists
+ src_failed = True
+ for k, v in group_prop_filter.items():
+ if node[k] != v:
+ break
+ else:
+ src_failed = False
+
+ if src_failed:
+ continue
+
+ yield node
+
+ def __iter__(self):
+ self.build_indicies()
+ # Pass a list of indicies into the NodeSet, the NodeSet will take care of the iteration
+ return NodeSet(self._parent_indicies, self._parent).__iter__()
+
+
+class EdgeGroup(Group):
+ def __init__(self, group_id, h5_group, parent):
+ super(EdgeGroup, self).__init__(group_id, h5_group, parent)
+ self._indicies_count = 0 # Used to keep track of number of indicies (since it contains multple ranges)
+
+ self.__itr_index = 0
+ self.__itr_range = []
+ self.__itr_range_idx = 0
+ self.__itr_range_max = 0
+
+ def build_indicies(self, force=False):
+ if self._parent_indicies_built and not force:
+ return
+
+ # Saves indicies as a (potentially empty) list of ranges
+ # TODO: Turn index into generator, allows for cheaper iteration over the group
+ self._indicies_count, self._parent_indicies = self._parent.group_indicies(self.group_id, build_cache=False)
+ self._parent_indicies_built = True
+
+ def to_dataframe(self):
+ raise NotImplementedError
+
+
+ def _get_parent_ds(self, parent_ds):
+ self.build_indicies()
+ ds_vals = np.zeros(self._indicies_count, dtype=parent_ds.dtype)
+ c_indx = 0
+ for indx_range in self._parent_indicies:
+ indx_beg, indx_end = indx_range[0], indx_range[1]
+ n_indx = c_indx + (indx_end - indx_beg)
+ ds_vals[c_indx:n_indx] = parent_ds[indx_beg:indx_end]
+ c_indx = n_indx
+
+ return ds_vals
+
+ def src_node_ids(self):
+ return self._get_parent_ds(self.parent._source_node_id_ds)
+
+ def trg_node_ids(self):
+ return self._get_parent_ds(self.parent._target_node_id_ds)
+
+ def node_type_ids(self):
+ return self._get_parent_ds(self.parent._type_id_ds)
+
+ def get_values(self, property_name, all_rows=False):
+ # TODO: Need to take into account if property_name is in the edge-types
+ if property_name not in self.columns:
+ raise KeyError
+
+ if all_rows:
+ return np.array(self._h5_group[property_name])
+ else:
+ self.build_indicies()
+ # Go through all ranges and build the return list
+ dataset = self._h5_group[property_name]
+ return_list = np.empty(self._indicies_count, self._h5_group[property_name].dtype)
+ i = 0
+ for r_beg, r_end in self._parent_indicies:
+ r_len = r_end - r_beg
+ return_list[i:(i+r_len)] = dataset[r_beg:r_end]
+ i += r_len
+ return return_list
+
+ def filter(self, **filter_props):
+ # TODO: I'm not sure If I want to do this? Need to check on a larger dataset than I currently have.
+ raise NotImplementedError
+
+ def __iter__(self):
+ self.build_indicies()
+ # TODO: Implement using an EdgeSet
+ if len(self._parent_indicies) == 0:
+ self.__itr_max_range = 0
+ self.__itr_range = []
+ self.__itr_index = 0
+ else:
+ # Stop at the largest range end (I'm not sure if the indicies are ordered, if we can make it ordered then
+ # in the future just use self_parent_indicies[-1][1]
+ self.__itr_range_max = len(self._parent_indicies)
+ self.__itr_range_idx = 0
+ self.__itr_range = self._parent_indicies[0]
+ self.__itr_index = self.__itr_range[0]
+
+ return self
+
+ def next(self):
+ return self.__next__()
+
+ def __next__(self):
+ if self.__itr_range_idx >= self.__itr_range_max:
+ raise StopIteration
+
+ nxt_edge = self._parent.get_row(self.__itr_index)
+ self.__itr_index += 1
+ if self.__itr_index >= self.__itr_range[1]:
+ # iterator has moved past the current range
+ self.__itr_range_idx += 1
+ if self.__itr_range_idx < self.__itr_range_max:
+ # move the iterator onto next range
+ self.__itr_range = self._parent_indicies[self.__itr_range_idx] # update range
+ self.__itr_index = self.__itr_range[0] # update iterator to start and the beginning of new range
+ else:
+ self.__itr_range = []
+
+ return nxt_edge
diff --git a/bmtk-vb/bmtk/utils/sonata/group.pyc b/bmtk-vb/bmtk/utils/sonata/group.pyc
new file mode 100644
index 0000000..51067ca
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/group.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/node.py b/bmtk-vb/bmtk/utils/sonata/node.py
new file mode 100644
index 0000000..4fa24ae
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/sonata/node.py
@@ -0,0 +1,126 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+
+class NodeSet(object):
+ # TODO: Merge NodeSet and NodePopulation
+ def __init__(self, node_indicies, population, **parameters):
+ self._indicies = node_indicies
+ self._n_nodes = len(self._indicies)
+ self._population = population
+
+ self.__itr_index = 0
+
+ @property
+ def node_ids(self):
+ return self._population.inode_ids(self._indicies)
+
+ @property
+ def gids(self):
+ return self._population.igids(self._indicies)
+
+ @property
+ def node_type_ids(self):
+ return self._population.inode_type_ids(self._indicies)
+
+ '''
+ @property
+ def node_types(self):
+ return [self._population._node_types_table[ntid] for ntid in self._node_type_ids]
+ '''
+
+ def get_properties(self, property_name):
+ raise NotImplementedError
+
+ def __len__(self):
+ return self._n_nodes
+
+ def __iter__(self):
+ self.__itr_index = 0
+ return self
+
+ def next(self):
+ return self.__next__()
+
+ def __next__(self):
+ if self.__itr_index >= self._n_nodes:
+ raise StopIteration
+
+ node = self._population.get_row(self._indicies[self.__itr_index])
+ self.__itr_index += 1
+ return node
+
+
+class Node(object):
+ # TODO: include population name/reference
+ # TODO: make a dictionary (or preferably a collections.MutableMap
+ def __init__(self, node_id, node_type_id, node_types_props, group_id, group_props, dynamics_params, gid=None):
+ self._node_id = node_id
+ self._gid = gid
+ self._node_type_id = node_type_id
+ self._node_type_props = node_types_props
+ self._group_id = group_id
+ self._group_props = group_props
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def gid(self):
+ return self._gid
+
+ @property
+ def group_id(self):
+ return self._group_id
+
+ @property
+ def node_type_id(self):
+ return self._node_type_id
+
+ @property
+ def group_props(self):
+ return self._group_props
+
+ @property
+ def node_type_properties(self):
+ return self._node_type_props
+
+ @property
+ def dynamics_params(self):
+ raise NotImplementedError
+
+ def __getitem__(self, prop_key):
+ if prop_key in self._group_props:
+ return self._group_props[prop_key]
+ elif prop_key in self._node_type_props:
+ return self._node_type_props[prop_key]
+ elif prop_key == 'node_id':
+ return self.node_id
+ elif property == 'node_type_id':
+ return self.node_type_id
+ else:
+ raise KeyError('Unknown property {}'.format(prop_key))
+
+ def __contains__(self, prop_key):
+ return prop_key in self._group_props or prop_key in self._node_type_props
diff --git a/bmtk-vb/bmtk/utils/sonata/node.pyc b/bmtk-vb/bmtk/utils/sonata/node.pyc
new file mode 100644
index 0000000..c31b400
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/node.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/population.py b/bmtk-vb/bmtk/utils/sonata/population.py
new file mode 100644
index 0000000..0d16064
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/sonata/population.py
@@ -0,0 +1,608 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import h5py
+import numpy as np
+
+from .utils import range_itr, get_attribute_h5
+from .node import Node, NodeSet
+from .edge import Edge, EdgeSet
+from .group import NodeGroup, EdgeGroup
+
+
+class Population(object):
+ def __init__(self, pop_name, pop_group, types_table):
+ self._pop_name = pop_name
+ self._pop_group = pop_group
+ self._types_table = types_table
+ self._nrows = 0
+
+ # For storing individual groups
+ self._group_map = {} # grp-id --> h5py.Group object
+ self._find_groups()
+ self._group_cache = {} # grp-id --> soneta.io.Group() object
+
+ # Refrences to most of the population's primary dataset
+ self._type_id_ds = pop_group[self.type_ids_column]
+ self._group_id_ds = pop_group[self.group_id_column]
+ self._group_index_ds = pop_group[self.group_index_column]
+
+ self._group_indicies = {} # grp-id --> list of rows indicies
+ self._group_indicies_cache_built = False
+
+ @property
+ def name(self):
+ """name of current population"""
+ return self._pop_name
+
+ @property
+ def group_ids(self):
+ """List of all group_ids belonging to population"""
+ return list(self._group_map.keys())
+
+ @property
+ def groups(self):
+ """Returns a list of sonata.Group objects"""
+ return [self.get_group(name) for name in self._group_map.keys()]
+
+ @property
+ def types_table(self):
+ return self._types_table
+
+ @property
+ def type_ids(self):
+ return np.array(self._type_id_ds)
+
+ @property
+ def group_id_ds(self):
+ return self._group_id_ds
+
+ @property
+ def group_index_ds(self):
+ return self._group_index_ds
+
+ @property
+ def group_id_column(self):
+ raise NotImplementedError
+
+ @property
+ def group_index_column(self):
+ raise NotImplementedError
+
+ @property
+ def type_ids_column(self):
+ raise NotImplementedError
+
+ def to_dataframe(self):
+ """Convert Population to dataframe"""
+ raise NotImplementedError
+
+ def get_group(self, group_id):
+ if group_id in self._group_cache:
+ return self._group_cache[group_id]
+ else:
+ grp_h5 = self._group_map[group_id]
+ grp_obj = self._build_group(group_id, grp_h5)
+ self._group_cache[group_id] = grp_obj
+ return grp_obj
+
+ def group_indicies(self, group_id, build_cache=False):
+ """Returns a list of all the population row index that maps onto the given group.
+
+ Used for iterating or searching within a Group
+
+ :param group_id: id of a given group
+ :param build_cache: Will cache indicies for all groups. Will be faster if making multiple calls but requires
+ more memory (default False)
+ :return: A (possibly empty) list of row indicies (non-contiguous, but unique)
+ """
+ if self._group_indicies_cache_built:
+ return self._group_indicies.get(group_id, [])
+
+ else:
+ tmp_index = pd.DataFrame()
+ # TODO: Need to check the memory overhead, especially for edges. See if an iterative search is just as fast
+ tmp_index['grp_id'] = pd.Series(self._group_id_ds[:], dtype=self._group_id_ds.dtype)
+ tmp_index['row_indx'] = pd.Series(range_itr(self._nrows), dtype=np.uint32)
+ if build_cache:
+ # save all indicies as arrays
+ self._group_indicies = {grp_id: np.array(subset['row_indx'])
+ for grp_id, subset in tmp_index.groupby(by='grp_id')}
+ self._group_indicies_cache_built = True
+ return self._group_indicies.get(group_id, [])
+ else:
+ # TODO: Manually del tmp_index to clear out the memory?
+ tmp_index = tmp_index[tmp_index['grp_id'] == group_id]
+ return np.array(tmp_index['row_indx'])
+
+ def igroup_ids(self, row_indicies):
+ return self._group_id_ds[list(row_indicies)]
+
+ def igroup_indicies(self, row_indicies):
+ return self._group_index_ds[list(row_indicies)]
+
+ def _find_groups(self):
+ """Create a map between group-id and h5py.Group reference"""
+ for grp_key, grp_h5 in self._pop_group.items():
+ if grp_key.isdigit():
+ grp_id = int(grp_key)
+ self._group_map[grp_id] = grp_h5
+ else:
+ # TODO: Should we put a warning if an unrecognized group exists?
+ pass
+
+ def _build_group(self, group_id, group_h5):
+ raise NotImplementedError
+
+ def __len__(self):
+ return self._nrows
+
+
+class NodePopulation(Population):
+ def __init__(self, pop_name, pop_group, node_types_tables):
+ super(NodePopulation, self).__init__(pop_name=pop_name, pop_group=pop_group, types_table=node_types_tables)
+
+ # TODO: node_ids can be implicit
+ self._node_id_ds = pop_group['node_id']
+ self._nrows = len(self._node_id_ds)
+
+ # TODO: This isn't necessary if only using iterator. Delay building index until get_node() is called.
+ self._index_nid2row = None # A lookup from node_id --> h5 row number
+ self._node_id_index_built = False
+ self._build_node_id_index()
+
+ # indicies for gid <--> node_id map
+ self._has_gids = False
+ self._index_gid2row = None # gid --> row (for searching by gid)
+ self._index_row2gid = None # row --> gid (for iterator or searching by node-id)
+ self._gid_lookup_fnc = lambda _: None # for looking up gid by row, use fnc pointer rather than conditional
+
+ self.__itr_index = 0 # for iterator
+
+ @property
+ def group_id_column(self):
+ return 'node_group_id'
+
+ @property
+ def group_index_column(self):
+ return 'node_group_index'
+
+ @property
+ def type_ids_column(self):
+ return 'node_type_id'
+
+ @property
+ def has_gids(self):
+ return self._has_gids
+
+ @property
+ def node_ids(self):
+ return np.array(self._node_id_ds)
+
+ @property
+ def gids(self):
+ if self.has_gids:
+ return np.array(self._index_gid2row.index)
+ else:
+ return None
+
+ @property
+ def node_types_table(self):
+ return self._types_table
+
+ @property
+ def index_column_name(self):
+ return 'node_id'
+
+ @property
+ def node_types_table(self):
+ return self.types_table
+
+ def add_gids(self, gid_map_df, force=False):
+ if self.has_gids and not force:
+ # TODO: not sure if it's best to return an exception or just continue on in silence?
+ raise Exception('Node population {} already has gids mapped onto node-ids.'.format(self.name))
+ # return
+
+ # Create map from gid --> node_id --> row #
+ self._build_node_id_index()
+ tmp_df = pd.DataFrame()
+ tmp_df['row_id'] = self._index_nid2row.index
+ tmp_df['node_id'] = self._index_nid2row
+ gid_map_df = gid_map_df.merge(tmp_df, how='left', left_on='node_id', right_on='node_id')
+ gid_map_df = gid_map_df.drop(['node_id', 'population'], axis=1)
+ self._index_gid2row = gid_map_df.set_index('gid')
+ self._index_row2gid = gid_map_df.set_index('row_id')
+ self._gid_lookup_fnc = lambda row_indx: self._index_row2gid.loc[row_indx]['gid']
+ self._has_gids = True
+
+ def to_dataframe(self):
+ raise NotImplementedError
+
+ def get_row(self, row_indx):
+ # TODO: Use helper function so we don't have to lookup gid/node_id twice
+ # Note: I'm not cacheing the nodes for memory purposes, but it might be beneificial too.
+ node_id = self._node_id_ds[row_indx]
+ node_type_id = self._type_id_ds[row_indx]
+ node_group_id = self._group_id_ds[row_indx]
+ node_group_index = self._group_index_ds[row_indx]
+
+ node_type_props = self.node_types_table[node_type_id]
+ node_group_props = self.get_group(node_group_id)[node_group_index]
+ node_gid = self._gid_lookup_fnc(row_indx)
+
+ return Node(node_id, node_type_id, node_type_props, node_group_id, node_group_props, None, gid=node_gid)
+
+ def get_rows(self, row_indicies):
+ """Returns a set of all nodes based on list of row indicies.
+
+ Warning: currently due to the use of h5py, the list must be ordered and cannot contain duplicates.
+
+ :param row_indicies: A list of row indicies
+ :return: An iterable NodeSet of nodes in the specified indicies
+ """
+ # TODO: Check that row_indicies is unsigned and the max (which will be the last value) < n_rows
+ # TODO: Check order and check for duplicates in list
+ return NodeSet(row_indicies, self)
+
+ def inode_ids(self, row_indicies):
+ # You get errors if row_indicies is a numpy array or panda series so convert to python list
+ # TODO: list conversion can be expensive, see if h5py will work with np arrays natively.
+ return self._node_id_ds[list(row_indicies)]
+
+ def igids(self, row_indicies):
+ gids = self._gid_lookup_fnc(row_indicies)
+ if gids is not None:
+ gids = np.array(gids)
+ return gids
+
+ def inode_type_ids(self, row_indicies):
+ # self._node_type_id_ds
+ return self._type_id_ds[list(row_indicies)]
+
+ def get_node_id(self, node_id):
+ row_indx = self._index_nid2row.iloc[node_id]
+ return self.get_row(row_indx)
+
+ def get_gid(self, gid):
+ # assert(self.has_gids)
+ row_indx = self._index_gid2row.iloc[gid]['row_id']
+ return self.get_row(row_indx)
+
+ def filter(self, **filter_props):
+ for grp in self.groups:
+ for node in grp.filter(**filter_props):
+ yield node
+
+ def _build_node_id_index(self, force=False):
+ if self._node_id_index_built and not force:
+ return
+
+ self._index_nid2row = pd.Series(range_itr(self._nrows), index=self._node_id_ds, dtype=self._node_id_ds.dtype)
+ self._node_id_index_built = True
+
+ def _build_group(self, group_id, group_h5):
+ return NodeGroup(group_id, group_h5, self)
+
+ def __iter__(self):
+ self.__itr_index = 0
+ return self
+
+ def next(self):
+ return self.__next__()
+
+ def __next__(self):
+ if self.__itr_index >= self._nrows:
+ raise StopIteration
+
+ nxt_node = self.get_row(self.__itr_index)
+ self.__itr_index += 1
+ return nxt_node
+
+ def __getitem__(self, item):
+ if isinstance(item, slice):
+ # TODO: Check
+ start = item.start if item.start is not None else 0
+ stop = item.stop if item.stop is not None else self._nrows
+ row_indicies = range_itr(start, stop, item.step)
+ return NodeSet(row_indicies, self)
+
+ elif isinstance(item, int):
+ return self.get_row(item)
+
+ elif isinstance(item, list):
+ return NodeSet(item)
+ else:
+ print('Unable to get item using {}.'.format(type(item)))
+
+
+class EdgePopulation(Population):
+ class __IndexStruct(object):
+ """Class sto store indicies subgroup"""
+ # TODO: Use collections.namedtuple
+ def __init__(self, lookup_table, edge_table):
+ self.lookup_table = lookup_table
+ self.edge_table = edge_table
+
+ def __init__(self, pop_name, pop_group, edge_types_tables):
+ super(EdgePopulation, self).__init__(pop_name=pop_name, pop_group=pop_group, types_table=edge_types_tables)
+
+ # keep reference to source and target datasets
+ self._source_node_id_ds = pop_group['source_node_id']
+ self._target_node_id_ds = pop_group['target_node_id']
+
+ self._nrows = len(self._source_node_id_ds)
+
+ # TODO: Throw an error/warning if missing
+ self._source_population = EdgePopulation.get_source_population(pop_group)
+ self._target_population = EdgePopulation.get_target_population(pop_group)
+
+ self.__itr_index = 0
+
+ # TODO: use a function pointer for get_index so it doesn't have to run a conditional every time
+ # TODO: add property and/or property so user can determine what indicies exists.
+ self._targets_index = None
+ self._has_target_index = False
+ self._sources_index = None
+ self._has_source_index = False
+ self.build_indicies()
+
+ @property
+ def group_id_column(self):
+ return 'edge_group_id'
+
+ @property
+ def group_index_column(self):
+ return 'edge_group_index'
+
+ @property
+ def type_ids_column(self):
+ return 'edge_type_id'
+
+ @property
+ def source_population(self):
+ return self._source_population
+
+ @property
+ def target_population(self):
+ return self._target_population
+
+ @staticmethod
+ def get_source_population(pop_group_h5):
+ return get_attribute_h5(pop_group_h5['source_node_id'], 'node_population', None)
+
+ @staticmethod
+ def get_target_population(pop_group_h5):
+ return get_attribute_h5(pop_group_h5['target_node_id'], 'node_population', None)
+
+ @property
+ def edge_types_table(self):
+ return self._types_table
+
+ def to_dataframe(self):
+ raise NotImplementedError
+
+ def build_indicies(self):
+ if 'indicies' in self._pop_group:
+ indicies_grp = self._pop_group['indicies']
+ for index_name, index_grp in indicies_grp.items():
+ # TODO: Let __IndexStruct build the indicies
+ # Make sure subgroup has the correct datasets
+ if not isinstance(index_grp, h5py.Group):
+ continue
+
+ if 'node_id_to_range' not in index_grp:
+ # TODO: make this more general, i.e 'id_to_range' thus we can index on gids, edge_types, etc
+ # TODO: Check that there are two columns in dataset
+ raise Exception('index {} in {} edges is missing column {}.'.format(index_name, self.name,
+ 'node_id_to_range'))
+ if 'range_to_edge_id' not in index_grp:
+ raise Exception('index {} in {} edges is missing column {}.'.format(index_name, self.name,
+ 'range_to_edge_id'))
+
+ # Cache the index
+ targets_lookup = index_grp['node_id_to_range']
+ edges_range = index_grp['range_to_edge_id']
+ index_obj = self.__IndexStruct(targets_lookup, edges_range)
+
+ # Determine the type of index
+ if index_name == 'source_to_target':
+ self._sources_index = index_obj
+ self._has_source_index = True
+ elif index_name == 'target_to_source':
+ self._targets_index = index_obj
+ self._has_target_index = True
+ else:
+ # TODO: Need to send this to a logger rather than stdout
+ print('Unrecognized index {}. Ignoring.'.format(index_name))
+
+ def _build_group(self, group_id, group_h5):
+ return EdgeGroup(group_id, group_h5, self)
+
+ def group_indicies(self, group_id, build_cache=False):
+ # For nodes it's safe to just keep a list of all indicies that map onto a given group. For edges bc there are
+ # many more rows (and typically a lot less groups), We want to build an index like for source/target ids
+ if len(self._group_map) == 1:
+ return len(self), [[0, len(self)]]
+
+ grp_indicies = super(EdgePopulation, self).group_indicies(group_id, build_cache=False)
+ if len(grp_indicies) == 0:
+ # Return an index with no ranges
+ return 0, []
+
+ # cluster into ranges. Naively implement, there is probably a faster way to cluster an ordered array!
+ range_beg = grp_indicies[0]
+ ranges = []
+ for i in range_itr(1, len(grp_indicies)):
+ if (grp_indicies[i-1]+1) != grp_indicies[i]:
+ ranges.append([range_beg, grp_indicies[i-1]+1])
+ range_beg = grp_indicies[i]
+ ranges.append([range_beg, grp_indicies[-1]+1])
+ return len(grp_indicies), np.array(ranges, dtype=np.uint32)
+
+ '''
+ def _get_target_index(self):
+ # TODO: Do only once
+ if self._targets_index is not None:
+ return self._targets_index
+
+ if 'incidies' in self._pop_group:
+ if 'target_to_source' in self._pop_group['incidies']:
+ targets_lookup = self._pop_group['incidies']['target_to_source']['node_id_to_range']
+ edges_range = self._pop_group['incidies']['target_to_source']['range_to_edge_id']
+ self._targets_index = self.__IndexStruct(targets_lookup, edges_range)
+ return self._targets_index
+
+ # TODO: What to do if index doesn't exist?
+ raise NotImplementedError
+ '''
+
+ def get_row(self, index):
+ src_node = self._source_node_id_ds[index]
+ trg_node = self._target_node_id_ds[index]
+ edge_type_id = self._type_id_ds[index]
+ edge_types_props = self.edge_types_table[edge_type_id]
+
+ edge_group_id = self._group_id_ds[index]
+ edge_group_index = self._group_index_ds[index]
+ edge_group_props = self.get_group(edge_group_id)[edge_group_index]
+ return Edge(trg_node_id=trg_node, src_node_id=src_node, source_pop=self.source_population,
+ target_pop=self.target_population, group_id = edge_group_id,
+ group_props=edge_group_props, edge_types_props=edge_types_props)
+
+ def filter(self, **filter_props):
+ selected_edge_types = set(self.edge_types_table.edge_type_ids)
+ types_filter = False # Do we need to filter results by edge_type_id
+ if 'edge_type_id' in filter_props:
+ # TODO: Make sure the edge_type_id is valid
+ selected_edge_types = set([filter_props['edge_type_id']])
+ del filter_props['edge_type_id']
+ types_filter = True
+
+ selected_groups = set(self._group_map.keys()) # list of grp_id's that will be used
+ group_prop_filter = {} # list of actual query statements
+ group_filter = False # do we need to filter results by group_id
+
+ # Go through filter key==value pairs, create filters for groups and edge_types
+ for filter_key, filter_val in filter_props.items():
+ # Find out what groups, if any, the column should search in.
+ group_query = False # If it's querying a group property don't look in edge_types
+ types_query = False
+ for grp_id, grp_h5 in self._group_map.items():
+ if filter_key in grp_h5:
+ # TODO: Need to check the dtype's match
+ selected_groups &= set([grp_id])
+ group_prop_filter[filter_key] = filter_val
+ group_query = True
+ group_filter = True
+
+ if (not group_query) and filter_key in self.edge_types_table.columns:
+ # Presearch the edge types and get only those edge_type_ids which match key==val
+ selected_edge_types &= set(self.edge_types_table.find(filter_key, filter_val))
+ types_filter = True
+ types_query = True
+
+ if not (group_query or types_query):
+ # Property key neither exists in a group or the edge_types_table
+ raise Exception('Could not find property {}'.format(filter_key))
+
+ # Iterate through all nodes, only returning those that match the filter
+ for indx in range_itr(self._nrows):
+ # Filter by edge_type_id
+ if types_filter:
+ # TODO: Invert the selected_edge_types, it will be faster to fail immeditely than search the entire list
+ if self._type_id_ds[indx] not in selected_edge_types:
+ continue
+
+ # Filter by group properties
+ if group_filter:
+ # TODO: Invert group search
+ grp_id = self._group_id_ds[indx]
+ if grp_id not in selected_groups:
+ continue
+
+ grp_index = self._group_index_ds[indx]
+ search_failed = True
+ for prop_key, prop_val in group_prop_filter.items():
+ if prop_val != self._group_map[grp_id][prop_key][grp_index]:
+ break
+ else:
+ search_failed = False
+
+ if search_failed:
+ continue
+
+ yield self.get_row(indx)
+
+ def get_target(self, target_node_id):
+ # TODO: Raise an exception, or call find() and log a warning that the index is not available
+ # TODO: check validity of target_node_id (non-negative integer and smaller than index range)
+ assert(self._has_target_index)
+ return self._get_index(self._targets_index, target_node_id)
+
+ def get_targets(self, target_node_ids):
+ # TODO: verify input is iterable
+ assert(self._has_target_index)
+ trg_index = self._targets_index
+ for trg_id in target_node_ids:
+ for edge in self._get_index(trg_index, trg_id):
+ yield edge
+
+ def get_source(self, source_node_id):
+ assert(self._has_source_index)
+ return self._get_index(self._sources_index, source_node_id)
+
+ def get_sources(self, source_node_ids):
+ assert(self._has_target_index)
+ trg_index = self._sources_index
+ for src_id in source_node_ids:
+ for edge in self._get_index(trg_index, src_id):
+ yield edge
+
+ def _get_index(self, index_struct, lookup_id):
+ # TODO: Use a EdgeSet instead
+ if lookup_id >= len(index_struct.lookup_table):
+ # TODO: Store length in index
+ raise StopIteration
+
+ edges_table = index_struct.edge_table
+ lookup_beg, lookup_end = index_struct.lookup_table[lookup_id]
+ for i in range_itr(lookup_beg, lookup_end):
+ edge_indx_beg, edge_indx_end = edges_table[i]
+ for edge_indx in range_itr(edge_indx_beg, edge_indx_end):
+ yield self.get_row(edge_indx)
+
+ def __iter__(self):
+ self.__itr_index = 0
+ return self
+
+ def __next__(self):
+ if self.__itr_index >= self._nrows:
+ raise StopIteration
+
+ next_edge = self.get_row(self.__itr_index)
+ self.__itr_index += 1
+ return next_edge
+
+ def next(self):
+ return self.__next__()
diff --git a/bmtk-vb/bmtk/utils/sonata/population.pyc b/bmtk-vb/bmtk/utils/sonata/population.pyc
new file mode 100644
index 0000000..3c668a3
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/population.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/types_table.py b/bmtk-vb/bmtk/utils/sonata/types_table.py
new file mode 100644
index 0000000..375d332
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/sonata/types_table.py
@@ -0,0 +1,220 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import pandas as pd
+import numbers
+import math
+
+from .column_property import ColumnProperty
+
+
+def remove_nans(types_dict):
+ """Convert nan values to None in type row (dict)"""
+ for k, v in types_dict.items():
+ if isinstance(v, numbers.Real) and math.isnan(v):
+ types_dict[k] = None
+
+
+class TypesTable(object):
+ def __init__(self, parent=None):
+ self._parent = None # Used to keep track of FileRoot object this table belongs to
+ self._columns = {}
+ self._index_typeid2df = {} # map from node(edge)_type_id --> csv Row
+ self._column_map = {} # TODO: Use defaultdict
+ # self._id_table = self.IDSearcher(self)
+ self._dataframes = [] # list of all pandas dataframe (types tables)
+
+ self._cached_node_types = {}
+ self._df_cache = None
+
+ self._itr_indx = 0
+ self._itr_end = 0
+
+ @property
+ def index_column_name(self):
+ raise NotImplementedError
+
+ @property
+ def type_ids(self):
+ return self._index_typeid2df.keys()
+
+ @property
+ def columns(self):
+ return list(self._columns.values())
+
+ def column(self, column_name):
+ return self._columns[column_name]
+
+ def add_table(self, nt_df):
+ # TODO: Just saving the entire dataframe currently because we don't expect the node-types table to get too large
+ # (few hundred rows at the most). If that changes consider to loading the csv until explicity called by user.
+ self._dataframes.append(nt_df)
+
+ # Check that the type ids are unique and build id --> dataframe map
+ nt_df.set_index(keys=self.index_column_name, inplace=True)
+ for type_id in list(nt_df.index):
+ if type_id in self._index_typeid2df:
+ raise Exception('Multiple {}s with value {}.'.format(self.index_column_name, type_id))
+ self._index_typeid2df[type_id] = nt_df
+
+ columns = ColumnProperty.from_csv(nt_df)
+ for col in columns:
+ self._columns[col.name] = col
+ if col in self._column_map:
+ # TODO: make sure dtype matches. Bad things can happen if the same col has heterogeneous dtypes
+ self._column_map[col.name].append(nt_df)
+ else:
+ self._column_map[col.name] = [nt_df]
+
+ def find(self, column_key, column_val, silent=False):
+ """Returns a list of type_ids that contain column property column_key==column_val
+
+ :param column_key: Name of column to search
+ :param column_val: Value of column to select for
+ :param silent: Set to true to prevent KeyError if column_key doesn't exist (default=False)
+ :return: A (potentially empty) list of type_ids
+ """
+ if not silent and column_key not in self.columns:
+ raise KeyError
+
+ is_list = isinstance(column_val, list)
+ selected_ids = [] # running list of valid type-ids
+ column_dtype = self.column(column_key).dtype
+ for df in self._column_map[column_key]:
+ # if a csv column has all NONE values, pandas will load the values as float(NaN)'s. Thus for str/object
+ # columns we need to check dtype otherwise we'll get an invalid comparisson.
+ if df[column_key].dtype == column_dtype:
+ if is_list:
+ indicies = df[df[column_key].isin(column_val)].index
+ else:
+ indicies = df[df[column_key] == column_val].index
+
+ if len(indicies) > 0:
+ selected_ids.extend(list(indicies))
+
+ return selected_ids
+
+ def to_dataframe(self, cache=False):
+ if self._df_cache is not None:
+ return self._df_cache
+
+ if len(self._dataframes) == 0:
+ return None
+ elif len(self._dataframes) == 1:
+ merged_table = self._dataframes[0]
+ else:
+ # merge all dataframes together
+ merged_table = self._dataframes[0].reset_index() # TODO: just merge on the indicies rather than reset
+ for df in self._dataframes[1:]:
+ try:
+ merged_table = merged_table.merge(df.reset_index(), how='outer')
+ except ValueError as ve:
+ # There is a potential issue if merging where one dtype is different from another (ex, if all
+ # model_template's are NONE pandas will load column as float64). First solution is to find columns
+ # that differ and upcast columns as object's (TODO: look for better solution)
+ right_df = df.reset_index()
+ for col in set(merged_table.columns) & set(right_df.columns):
+ # find all shared columns whose dtype differs
+ if merged_table[col].dtype != right_df[col].dtype:
+ # change column(s) dtype to object
+ merged_table[col] = merged_table[col] if merged_table[col].dtype == object \
+ else merged_table[col].astype(object)
+ right_df[col] = right_df[col] if right_df[col].dtype == object \
+ else right_df[col].astype(object)
+
+ merged_table = merged_table.merge(right_df, how='outer')
+
+ merged_table.set_index(self.index_column_name, inplace=True)
+
+ if cache:
+ self._df_cache = merged_table
+
+ return merged_table
+
+ def __iter__(self):
+ self._itr_indx = 0
+ self._itr_end = len(self.type_ids)
+ return self
+
+ def next(self):
+ return self.__next__()
+
+ def __next__(self):
+ if self._itr_indx >= self._itr_end:
+ raise StopIteration
+
+ ntid = self.type_ids[self._itr_indx]
+ self._itr_indx += 1
+ return self[ntid]
+
+ def __getitem__(self, type_id):
+ if isinstance(type_id, tuple):
+ return [self[ntid] for ntid in type_id]
+
+ elif isinstance(type_id, numbers.Integral):
+ if type_id not in self._index_typeid2df:
+ raise Exception('{} {} not found'.format(self.index_column_name, type_id))
+
+ if type_id in self._cached_node_types:
+ return self._cached_node_types[type_id]
+ else:
+ nt_dict = self._index_typeid2df[type_id].loc[type_id].to_dict()
+ # TODO: consider just removing key from dict if value is None/NaN
+ remove_nans(nt_dict) # pd turns None into np.nan's. Temp soln is to just convert them back.
+ self._cached_node_types[type_id] = nt_dict
+ self._cached_node_types[type_id][self.index_column_name] = type_id # include node/edge_type_id
+ return nt_dict
+ else:
+ raise Exception('Unsupported search on node-type-id')
+
+ def __contains__(self, type_id):
+ return type_id in self._index_typeid2df
+
+ def __repr__(self):
+ return repr(self.to_dataframe())
+
+
+class NodeTypesTable(TypesTable):
+ def __init__(self, parent=None):
+ super(NodeTypesTable, self).__init__(parent)
+
+ @property
+ def index_column_name(self):
+ return 'node_type_id'
+
+ @property
+ def node_type_ids(self):
+ return self.type_ids
+
+
+class EdgeTypesTable(TypesTable):
+ def __init__(self, parent=None):
+ super(EdgeTypesTable, self).__init__(parent)
+
+ @property
+ def index_column_name(self):
+ return 'edge_type_id'
+
+ @property
+ def edge_type_ids(self):
+ return self.type_ids
diff --git a/bmtk-vb/bmtk/utils/sonata/types_table.pyc b/bmtk-vb/bmtk/utils/sonata/types_table.pyc
new file mode 100644
index 0000000..a242e6c
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/types_table.pyc differ
diff --git a/bmtk-vb/bmtk/utils/sonata/utils.py b/bmtk-vb/bmtk/utils/sonata/utils.py
new file mode 100644
index 0000000..953572d
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/sonata/utils.py
@@ -0,0 +1,116 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import sys
+
+import h5py
+import pandas as pd
+import numpy as np
+
+MAGIC_ATTR = 'magic'
+MAGIC_VAL = 0x0A7A
+VERSION_ATTR = 'version'
+VERSION_NA = 'NA'
+VERSION_CURRENT = '0.1'
+
+try:
+ ver_split = VERSION_CURRENT.split('.')
+ VERSION_MAJOR = ver_split[0]
+ VERSION_MINOR = ver_split[1]
+except (IndexError, AttributeError) as err:
+ VERSION_MAJOR = 0
+ VERSION_MINOR = 1
+
+
+def listify(files):
+ # TODO: change this to include any iterable datastructures (sets, panda sequences, etc)
+ if not isinstance(files, (list, tuple)):
+ return [files]
+ else:
+ return files
+
+
+def load_h5(h5file, mode='r'):
+ # TODO: Allow for h5py.Group also
+ if isinstance(h5file, h5py.File):
+ return h5file
+
+ return h5py.File(h5file, mode)
+
+
+def load_csv(csvfile):
+ # TODO: make the separator more flexible
+ if isinstance(csvfile, pd.DataFrame):
+ return csvfile
+
+ # TODO: check if it is csv object and convert to a pd dataframe
+ return pd.read_csv(csvfile, sep=' ', na_values='NONE')
+
+
+def get_attribute_h5(h5obj, attribut_name, default=None):
+ val = h5obj.attrs.get(attribut_name, default)
+ if using_py3 and isinstance(val, bytes):
+ # There is an but with h5py returning unicode/str based attributes as bytes
+ val = val.decode()
+
+ return val
+
+
+def check_magic(hdf5_file):
+ """Check the magic attribute exists according to the sonata format"""
+ h5_file_obj = load_h5(hdf5_file)
+ if MAGIC_ATTR not in h5_file_obj.attrs:
+ raise Exception('File {} missing top-level \"{}\" attribute.'.format(h5_file_obj.filename, MAGIC_ATTR))
+ elif np.uint32(get_attribute_h5(hdf5_file, MAGIC_ATTR)) != MAGIC_VAL:
+ raise Exception('File {} has unexpected magic value (expected {})'.format(h5_file_obj.filename, MAGIC_VAL))
+
+ return True
+
+
+def get_version(hdf5_file):
+ h5_file_obj = load_h5(hdf5_file)
+ if VERSION_ATTR not in h5_file_obj.attrs:
+ return VERSION_NA
+
+ else:
+ version_val = get_attribute_h5(h5_file_obj, VERSION_ATTR)
+ version_str = str(version_val[0])
+ for ver_sub in version_val[1:]:
+ version_str += '.{}'.format(ver_sub)
+ return version_str
+
+
+def add_hdf5_magic(hdf5_handle):
+ hdf5_handle['/'].attrs['magic'] = np.uint32(0x0A7A)
+
+
+def add_hdf5_version(hdf5_handle):
+ hdf5_handle['/'].attrs['version'] = [np.uint32(VERSION_MAJOR), np.uint32(VERSION_MINOR)]
+
+
+if sys.version_info[0] == 3:
+ using_py3 = True
+ range_itr = range
+else:
+ using_py3 = False
+ range_itr = xrange
diff --git a/bmtk-vb/bmtk/utils/sonata/utils.pyc b/bmtk-vb/bmtk/utils/sonata/utils.pyc
new file mode 100644
index 0000000..5a44079
Binary files /dev/null and b/bmtk-vb/bmtk/utils/sonata/utils.pyc differ
diff --git a/bmtk-vb/bmtk/utils/spike_trains/__init__.py b/bmtk-vb/bmtk/utils/spike_trains/__init__.py
new file mode 100644
index 0000000..7fdcfe6
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/spike_trains/__init__.py
@@ -0,0 +1,24 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .spikes_csv import SpikesGenerator
+from .spikes_file import SpikesFile
diff --git a/bmtk-vb/bmtk/utils/spike_trains/spikes_csv.py b/bmtk-vb/bmtk/utils/spike_trains/spikes_csv.py
new file mode 100644
index 0000000..64651d0
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/spike_trains/spikes_csv.py
@@ -0,0 +1,94 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import numpy as np
+import csv
+import h5py
+from six import string_types
+
+from bmtk.utils import sonata
+
+class Rates(object):
+ def __iter__(self):
+ return self
+
+ def next(self):
+ raise StopIteration
+
+
+class NormalRates(Rates):
+ def __init__(self, t_start, t_end, rate_mu, rate_sigma=5.0):
+ self.t_start = t_start
+ self.t_end = t_end
+ self.period_mu = 1.0/float(rate_mu)
+ self.period_sigma = 1.0/float(rate_mu + rate_sigma)
+
+ self._current_t = t_start
+
+ def next(self):
+ self._current_t += abs(np.random.normal(self.period_mu, self.period_sigma))
+ if self._current_t > self.t_end:
+ self._current_t = self.t_start
+ raise StopIteration
+ else:
+ return self._current_t
+
+
+class SpikesGenerator(object):
+ def __init__(self, nodes, populations=None, t_min=0, t_max=1.0):
+ self._t_min = t_min
+ self._t_max = t_max
+
+ if isinstance(nodes, string_types):
+ nodes_h5 = h5py.File(nodes, 'r')
+ nodes_grp = nodes_h5['/nodes']
+ if populations is None:
+ populations = nodes_grp.keys()
+
+ # TODO: Need a way to Use sonata library without having to use node-types
+ nodes = []
+ for node_pop in populations:
+ nodes.extend(nodes_grp[node_pop]['node_id'])
+
+ self._nodes = {n: Rates() for n in nodes}
+
+ def set_rate(self, firing_rate, gids=None, t_start=None, t_end=None):
+ t_start = t_start or self._t_min
+ assert(t_start >= self._t_min)
+
+ t_end = t_end or self._t_max
+ assert(t_end <= self._t_max)
+
+ gids = gids or self._nodes.keys()
+ for gid in gids:
+ self._nodes[gid] = NormalRates(t_start, t_end, firing_rate)
+
+ def save_csv(self, csv_file_name, in_ms=False):
+ conv = 1000.0 if in_ms else 1.0
+
+ with open(csv_file_name, 'w') as csv_file:
+ csv_writer = csv.writer(csv_file, delimiter=' ')
+ csv_writer.writerow(['gid', 'spike-times'])
+ for gid, rate_gen in self._nodes.items():
+ csv_writer.writerow([gid, ','.join(str(r*conv) for r in rate_gen)])
+
diff --git a/bmtk-vb/bmtk/utils/spike_trains/spikes_file.py b/bmtk-vb/bmtk/utils/spike_trains/spikes_file.py
new file mode 100644
index 0000000..fd4577a
--- /dev/null
+++ b/bmtk-vb/bmtk/utils/spike_trains/spikes_file.py
@@ -0,0 +1,174 @@
+import os
+from collections import Counter
+import numpy as np
+import pandas as pd
+import h5py
+
+
+class SpikesFile(object):
+ _file_adaptors = {}
+
+ def __init__(self, filename, mode='r', filetype=None, **params):
+ self._ftype = self._get_file_type(filename, filetype)
+ self._adaptor = SpikesFile._file_adaptors[self._ftype](filename, **params)
+
+ def _get_file_type(self, filename, filetype):
+ if filetype is not None:
+ if filetype not in self._file_adaptors:
+ raise Exception('Unknown spikes file type {}'.format(filetype))
+ else:
+ return filetype
+
+ else:
+ for ft, adaptor_cls in self._file_adaptors.items():
+ if adaptor_cls.is_type(filename):
+ return ft
+
+ raise Exception('Unable to determine file type for {}.'.format(filename))
+
+ def _get_spikes_sort(self, spikes_list, t_window=None):
+ if t_window is not None:
+ spikes_list.sort()
+ return [s for s in spikes_list if t_window[0] <= s <= t_window[1]]
+ else:
+ spikes_list.sort()
+ return spikes_list
+
+ @property
+ def gids(self):
+ """Return a list of all gids"""
+ return self._adaptor.gids
+
+ def to_dataframe(self):
+ return self._adaptor.to_dataframe()
+
+ def get_spikes(self, gid, time_window=None):
+ return self._adaptor.get_spikes(gid, time_window=None)
+
+ def __eq__(self, other):
+ return self.is_equal(other)
+
+ def is_equal(self, other, err=0.00001, time_window=None):
+ # check that gids matches
+ if set(self.gids) != set(other.gids):
+ return False
+
+ for gid in self.gids:
+ spikes_self = self._get_spikes_sort(self.get_spikes(gid), time_window)
+ spikes_other = self._get_spikes_sort(other.get_spikes(gid), time_window)
+
+ if len(spikes_other) != len(spikes_self):
+ return False
+
+ for s0, s1 in zip(spikes_self, spikes_other):
+ if abs(s0 - s1) > err:
+ return False
+ return True
+
+ @classmethod
+ def register_adaptor(cls, adaptor_cls):
+ cls._file_adaptors[adaptor_cls.ext_name()] = adaptor_cls
+ return adaptor_cls
+
+
+class SpikesFileAdaptor(object):
+ def __init__(self, filename):
+ self._filename = filename
+
+ @property
+ def gids(self):
+ raise NotImplementedError
+
+ def to_dataframe(self):
+ raise NotImplementedError
+
+ def get_spikes(self, gid, time_window=None):
+ raise NotImplementedError
+
+ @staticmethod
+ def is_type(filename):
+ raise NotImplementedError
+
+ @staticmethod
+ def ext_name():
+ raise NotImplementedError
+
+
+@SpikesFile.register_adaptor
+class SpikesFileH5(SpikesFileAdaptor):
+ def __init__(self, filename, **params):
+ super(SpikesFileH5, self).__init__(filename)
+ self._h5_handle = h5py.File(self._filename, 'r')
+ self._sort_order = self._h5_handle['/spikes'].attrs.get('sorting', None)
+ self._gid_ds = self._h5_handle['/spikes/gids']
+ self._timestamps_ds = self._h5_handle['/spikes/timestamps']
+
+ self._indexed = False
+ self._gid_indicies = {}
+ self._build_indicies()
+
+ def _build_indicies(self):
+ if self._sort_order == 'by_gid':
+ indx_beg = 0
+ c_gid = self._gid_ds[0]
+ for indx, gid in enumerate(self._gid_ds):
+ if gid != c_gid:
+ self._gid_indicies[c_gid] = slice(indx_beg, indx)
+ c_gid = gid
+ indx_beg = indx
+ self._gid_indicies[c_gid] = slice(indx_beg, indx+1)
+ self._indexed = True
+ else:
+ self._gid_indicies = {int(gid): [] for gid in np.unique(self._gid_ds)}
+ for indx, gid in enumerate(self._gid_ds):
+ self._gid_indicies[gid].append(indx)
+ self._indexed = True
+
+ @property
+ def gids(self):
+ return list(self._gid_indicies.keys())
+
+ def to_dataframe(self):
+ return pd.DataFrame({'timestamps': self._timestamps_ds, 'gids': self._gid_ds})
+
+ def get_spikes(self, gid, time_window=None):
+ return self._timestamps_ds[self._gid_indicies[gid]]
+
+ @staticmethod
+ def is_type(filename):
+ _, fext = os.path.splitext(filename)
+ fext = fext.lower()
+ return fext == '.h5' or fext == '.hdf' or fext == '.hdf5'
+
+ @staticmethod
+ def ext_name():
+ return 'h5'
+
+
+@SpikesFile.register_adaptor
+class SpikesFileCSV(SpikesFileAdaptor):
+ def __init__(self, filename, **params):
+ super(SpikesFileCSV, self).__init__(filename)
+ self._spikes_df = pd.read_csv(self._filename, names=['timestamps', 'gids'], sep=' ')
+
+ @property
+ def gids(self):
+ return list(self._spikes_df.gids.unique())
+
+ def to_dataframe(self):
+ return self._spikes_df
+
+ def get_spikes(self, gid, time_window=None):
+ return np.array(self._spikes_df[self._spikes_df.gids == gid].timestamps)
+
+ @staticmethod
+ def is_type(filename):
+ _, fext = os.path.splitext(filename)
+ fext = fext.lower()
+ return fext == '.csv' or fext == '.txt'
+
+ @staticmethod
+ def ext_name():
+ return 'csv'
+
+
diff --git a/bmtk-vb/build/lib/bmtk/__init__.py b/bmtk-vb/build/lib/bmtk/__init__.py
new file mode 100644
index 0000000..f4f772b
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/__init__.py
@@ -0,0 +1,23 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+__version__ = '0.0.6'
diff --git a/bmtk-vb/build/lib/bmtk/analyzer/__init__.py b/bmtk-vb/build/lib/bmtk/analyzer/__init__.py
new file mode 100644
index 0000000..7b04c40
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/analyzer/__init__.py
@@ -0,0 +1,189 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+from six import string_types
+import h5py
+import pandas as pd
+import matplotlib.pyplot as plt
+import numpy as np
+
+import bmtk.simulator.utils.config as cfg
+
+
+def _get_config(config):
+ if isinstance(config, string_types):
+ return cfg.from_json(config)
+ elif isinstance(config, dict):
+ return config
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(config, type(config)))
+
+def plot_potential(cell_vars_h5=None, config_file=None, gids=None, show_plot=True, save=False):
+ if (cell_vars_h5 or config_file) is None:
+ raise Exception('Please specify a cell_vars hdf5 file or a simulation config.')
+
+ if cell_vars_h5 is not None:
+ plot_potential_hdf5(cell_vars_h5, gids=gids, show_plot=show_plot,
+ save_as='sim_potential.jpg' if save else None)
+
+ else:
+ # load the json file or object
+ if isinstance(config_file, string_types):
+ config = cfg.from_json(config_file)
+ elif isinstance(config_file, dict):
+ config = config_file
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(config_file, type(config_file)))
+
+ gid_list = gids or config['node_id_selections']['save_cell_vars']
+ for gid in gid_list:
+ save_as = '{}_v.jpg'.format(gid) if save else None
+ title = 'cell gid {}'.format(gid)
+ var_h5 = os.path.join(config['output']['cell_vars_dir'], '{}.h5'.format(gid))
+ plot_potential_hdf5(var_h5, title, show_plot, save_as)
+
+
+def plot_potential_hdf5(cell_vars_h5, gids, title='membrane potential', show_plot=True, save_as=None):
+ data_h5 = h5py.File(cell_vars_h5, 'r')
+ membrane_trace = data_h5['data']
+
+ time_ds = data_h5['/mapping/time']
+ tstart = time_ds[0]
+ tstop = time_ds[1]
+ x_axis = np.linspace(tstart, tstop, len(membrane_trace), endpoint=True)
+
+ gids_ds = data_h5['/mapping/gids']
+ index_ds = data_h5['/mapping/index_pointer']
+ index_lookup = {gids_ds[i]: (index_ds[i], index_ds[i+1]) for i in range(len(gids_ds))}
+ gids = gids_ds.keys() if gids_ds is None else gids
+ for gid in gids:
+ var_indx = index_lookup[gid][0]
+ plt.plot(x_axis, membrane_trace[:, var_indx], label=gid)
+
+ plt.xlabel('time (ms)')
+ plt.ylabel('membrane (mV)')
+ plt.title(title)
+ plt.legend(markerscale=2, scatterpoints=1)
+
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ if show_plot:
+ plt.show()
+
+
+def plot_calcium(cell_vars_h5=None, config_file=None, gids=None, show_plot=True, save=False):
+ if (cell_vars_h5 or config_file) is None:
+ raise Exception('Please specify a cell_vars hdf5 file or a simulation config.')
+
+ if cell_vars_h5 is not None:
+ plot_calcium_hdf5(cell_vars_h5, gids, show_plot=show_plot, save_as='sim_ca.jpg' if save else None)
+
+ else:
+ # load the json file or object
+ if isinstance(config_file, string_types):
+ config = cfg.from_json(config_file)
+ elif isinstance(config_file, dict):
+ config = config_file
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(config_file, type(config_file)))
+
+ gid_list = gids or config['node_id_selections']['save_cell_vars']
+ for gid in gid_list:
+ save_as = '{}_v.jpg'.format(gid) if save else None
+ title = 'cell gid {}'.format(gid)
+ var_h5 = os.path.join(config['output']['cell_vars_dir'], '{}.h5'.format(gid))
+ plot_calcium_hdf5(var_h5, title, show_plot, save_as)
+
+
+def plot_calcium_hdf5(cell_vars_h5, gids, title='Ca2+ influx', show_plot=True, save_as=None):
+ data_h5 = h5py.File(cell_vars_h5, 'r')
+ cai_trace = data_h5['cai/data']
+
+ time_ds = data_h5['/mapping/time']
+ tstart = time_ds[0]
+ tstop = time_ds[1]
+ x_axis = np.linspace(tstart, tstop, len(cai_trace), endpoint=True)
+
+ gids_ds = data_h5['/mapping/gids']
+ index_ds = data_h5['/mapping/index_pointer']
+ index_lookup = {gids_ds[i]: (index_ds[i], index_ds[i+1]) for i in range(len(gids_ds))}
+ gids = gids_ds.keys() if gids_ds is None else gids
+ for gid in gids:
+ var_indx = index_lookup[gid][0]
+ plt.plot(x_axis, cai_trace[:, var_indx], label=gid)
+
+ #plt.plot(x_axis, cai_trace)
+ plt.xlabel('time (ms)')
+ plt.ylabel('calcium [Ca2+]')
+ plt.title(title)
+ plt.legend(markerscale=2, scatterpoints=1)
+
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ if show_plot:
+ plt.show()
+
+
+def spikes_table(config_file, spikes_file=None):
+ config = _get_config(config_file)
+ spikes_file = config['output']['spikes_file']
+ spikes_h5 = h5py.File(spikes_file, 'r')
+ gids = np.array(spikes_h5['/spikes/gids'], dtype=np.uint)
+ times = np.array(spikes_h5['/spikes/timestamps'], dtype=np.float)
+ return pd.DataFrame(data={'gid': gids, 'spike time (ms)': times})
+ #return pd.read_csv(spikes_ascii, names=['time (ms)', 'cell gid'], sep=' ')
+
+
+def nodes_table(nodes_file, population):
+ # TODO: Integrate into sonata api
+ nodes_h5 = h5py.File(nodes_file, 'r')
+ nodes_pop = nodes_h5['/nodes'][population]
+ root_df = pd.DataFrame(data={'node_id': nodes_pop['node_id'], 'node_type_id': nodes_pop['node_type_id'],
+ 'node_group_id': nodes_pop['node_group_id'],
+ 'node_group_index': nodes_pop['node_group_index']}) #,
+ #index=[nodes_pop['node_group_id'], nodes_pop['node_group_index']])
+ root_df = root_df.set_index(['node_group_id', 'node_group_index'])
+
+ node_grps = np.unique(nodes_pop['node_group_id'])
+ for grp_id in node_grps:
+ sub_group = nodes_pop[str(grp_id)]
+ grp_df = pd.DataFrame()
+ for hf_key in sub_group:
+ hf_obj = sub_group[hf_key]
+ if isinstance(hf_obj, h5py.Dataset):
+ grp_df[hf_key] = hf_obj
+
+ subgrp_len = len(grp_df)
+ if subgrp_len > 0:
+ grp_df['node_group_id'] = [grp_id]*subgrp_len
+ grp_df['node_group_index'] = range(subgrp_len)
+ grp_df = grp_df.set_index(['node_group_id', 'node_group_index'])
+ root_df = root_df.join(other=grp_df, how='left')
+
+ return root_df.reset_index(drop=True)
+
+
+def node_types_table(node_types_file, population):
+ return pd.read_csv(node_types_file, sep=' ')
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/analyzer/cell_vars.py b/bmtk-vb/build/lib/bmtk/analyzer/cell_vars.py
new file mode 100644
index 0000000..da2e719
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/analyzer/cell_vars.py
@@ -0,0 +1,95 @@
+import os
+import matplotlib.pyplot as plt
+
+from .io_tools import load_config
+from .utils import listify
+from bmtk.utils.cell_vars import CellVarsFile
+
+# In the case reports are missing units, try to guess based on
+missing_units = {
+ 'V_m': 'mV',
+ 'cai': 'mM',
+ 'v': 'mV'
+}
+
+
+def _get_cell_report(config_file, report_name):
+ cfg = load_config(config_file)
+ if report_name is not None:
+ return cfg.reports[report_name], report_name
+
+ else:
+ cell_var_reports = [(r_name, r_dict) for r_name, r_dict in cfg.reports.items()
+ if r_dict['module'] == 'membrane_report']
+ if len(cell_var_reports) == 0:
+ raise Exception('Could not find any membrane_reports in {}'.format(config_file))
+
+ elif len(cell_var_reports) > 1:
+ raise Exception('Found more than one membrane_report, please specify report_name')
+
+ else:
+ report_name = cell_var_reports[0][0]
+ report = cell_var_reports[0][1]
+ report_fname = report['file_name'] if 'file_name' in report else '{}.h5'.format(report_name)
+ return report_name, os.path.join(cfg.output_dir, report_fname)
+
+
+def plot_report(config_file=None, report_file=None, report_name=None, variables=None, gids=None):
+ if report_file is None:
+ report_name, report_file = _get_cell_report(config_file, report_name)
+
+ var_report = CellVarsFile(report_file)
+ variables = listify(variables) if variables is not None else var_report.variables
+ gids = listify(gids) if gids is not None else var_report.gids
+ time_steps = var_report.time_trace
+
+ def __units_str(var):
+ units = var_report.units(var)
+ if units == CellVarsFile.UNITS_UNKNOWN:
+ units = missing_units.get(var, '')
+ return '({})'.format(units) if units else ''
+
+ n_plots = len(variables)
+ if n_plots > 1:
+ # If more than one variale to plot do so in different subplots
+ f, axarr = plt.subplots(n_plots, 1)
+ for i, var in enumerate(variables):
+ for gid in gids:
+ axarr[i].plot(time_steps, var_report.data(gid=gid, var_name=var), label='gid {}'.format(gid))
+
+ axarr[i].legend()
+ axarr[i].set_ylabel('{} {}'.format(var, __units_str(var)))
+ if i < n_plots - 1:
+ axarr[i].set_xticklabels([])
+
+ axarr[i].set_xlabel('time (ms)')
+
+ elif n_plots == 1:
+ # For plotting a single variable
+ plt.figure()
+ for gid in gids:
+ plt.plot(time_steps, var_report.data(gid=0, var_name=variables[0]), label='gid {}'.format(gid))
+ plt.ylabel('{} {}'.format(variables[0], __units_str(variables[0])))
+ plt.xlabel('time (ms)')
+
+ else:
+ return
+
+ plt.show()
+
+ #for gid in gids:
+ # plt.plot(times, var_report.data(gid=0, var_name='v'), label='gid {}'.format(gid))
+
+
+ '''
+
+
+
+ plt.ylabel('{} {}'.format('v', units_str))
+ plt.xlabel('time (ms)')
+ plt.legend()
+ plt.show()
+ '''
+
+
+
diff --git a/bmtk-vb/build/lib/bmtk/analyzer/firing_rates.py b/bmtk-vb/build/lib/bmtk/analyzer/firing_rates.py
new file mode 100644
index 0000000..bca785c
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/analyzer/firing_rates.py
@@ -0,0 +1,55 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import numpy as np
+
+def convert_rates(rates_file):
+ rates_df = pd.read_csv(rates_file, sep=' ', names=['gid', 'time', 'rate'])
+ rates_sorted_df = rates_df.sort_values(['gid', 'time'])
+ rates_dict = {}
+ for gid, rates in rates_sorted_df.groupby('gid'):
+ start = rates['time'].iloc[0]
+ #start = rates['rate'][0]
+ end = rates['time'].iloc[-1]
+ dt = float(end - start)/len(rates)
+ rates_dict[gid] = {'start': start, 'end': end, 'dt': dt, 'rates': np.array(rates['rate'])}
+
+ return rates_dict
+
+
+def firing_rates_equal(rates_file1, rates_file2, err=0.0001):
+ trial_1 = convert_rates(rates_file1)
+ trial_2 = convert_rates(rates_file2)
+ if set(trial_1.keys()) != set(trial_2.keys()):
+ return False
+
+ for gid, rates_data1 in trial_1.items():
+ rates_data2 = trial_2[gid]
+ if rates_data1['dt'] != rates_data2['dt'] or rates_data1['start'] != rates_data2['start'] or rates_data1['end'] != rates_data2['end']:
+ return False
+
+ for r1, r2 in zip(rates_data1['rates'], rates_data2['rates']):
+ if abs(r1 - r2) > err:
+ return False
+
+ return True
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/analyzer/io_tools.py b/bmtk-vb/build/lib/bmtk/analyzer/io_tools.py
new file mode 100644
index 0000000..326389b
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/analyzer/io_tools.py
@@ -0,0 +1,11 @@
+from six import string_types
+from bmtk.simulator.utils.config import ConfigDict
+
+
+def load_config(config):
+ if isinstance(config, string_types):
+ return ConfigDict.from_json(config)
+ elif isinstance(config, dict):
+ return ConfigDict.from_dict(config)
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(config, type(config)))
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/analyzer/spike_trains.py b/bmtk-vb/build/lib/bmtk/analyzer/spike_trains.py
new file mode 100644
index 0000000..a7f6c8d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/analyzer/spike_trains.py
@@ -0,0 +1,16 @@
+import numpy as np
+import pandas as pd
+import h5py
+
+
+from bmtk.analyzer.visualization.spikes import plot_spikes as raster_plot
+from bmtk.analyzer.visualization.spikes import plot_rates as rates_plot
+from .io_tools import load_config
+from bmtk.utils.spike_trains import SpikesFile
+
+
+def to_dataframe(config_file, spikes_file=None):
+ config = load_config(config_file)
+ spikes_file = SpikesFile(config.spikes_file)
+ return spikes_file.to_dataframe()
+
diff --git a/bmtk-vb/build/lib/bmtk/analyzer/spikes_analyzer.py b/bmtk-vb/build/lib/bmtk/analyzer/spikes_analyzer.py
new file mode 100644
index 0000000..af77187
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/analyzer/spikes_analyzer.py
@@ -0,0 +1,127 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import numpy as np
+
+try:
+ from distutils.version import LooseVersion
+ use_sort_values = LooseVersion(pd.__version__) >= LooseVersion('0.19.0')
+
+except:
+ use_sort_values = False
+
+
+def spikes2dict(spikes_file):
+ spikes_df = pd.read_csv(spikes_file, sep=' ', names=['time', 'gid'])
+
+ if use_sort_values:
+ spikes_sorted = spikes_df.sort_values(['gid', 'time'])
+ else:
+ spikes_sorted = spikes_df.sort(['gid', 'time'])
+
+ spike_dict = {}
+ for gid, spike_train in spikes_sorted.groupby('gid'):
+ spike_dict[gid] = np.array(spike_train['time'])
+ return spike_dict
+
+
+def spike_files_equal(spikes_txt_1, spikes_txt_2, err=0.0001):
+ trial_1 = spikes2dict(spikes_txt_1)
+ trial_2 = spikes2dict(spikes_txt_2)
+ if set(trial_1.keys()) != set(trial_2.keys()):
+ return False
+
+ for gid, spike_train1 in trial_1.items():
+ spike_train2 = trial_2[gid]
+ if len(spike_train1) != len(spike_train2):
+ return False
+
+ for s1, s2 in zip(spike_train1, spike_train2):
+ if abs(s1 - s2) > err:
+ return False
+
+ return True
+
+
+def get_mean_firing_rates(spike_gids, node_ids, tstop_msec):
+
+ """
+ Compute mean firing rate over the duration of the simulation
+
+ :param spike_gids: gids of cells which spiked
+ :param node_ids: np.array of node_ids
+
+ :return mean_firing_rate: np.array mean firing rates
+
+ """
+
+ min_gid = np.min(node_ids)
+ max_gid = np.max(node_ids)
+
+ gid_bins = np.arange(min_gid-0.5,max_gid+1.5,1)
+ hist,bins = np.histogram(spike_gids, bins=gid_bins)
+
+ tstop_sec = tstop_msec*1E-3
+ mean_firing_rates = hist/tstop_sec
+
+ return mean_firing_rates
+
+
+
+def spikes_equal_in_window(spikes1,spikes2,twindow):
+ """
+ Compare spikes within a time window
+ :param spikes1: dict with "time" and "gid" arrays for raster 1
+ :param spikes2: dict with "time" and "gid" arrays for raster 2
+ :param twindow: [tstart,tend] time window
+
+ :return boolean: True if equal, False if different
+ """
+
+ ix1_window0=np.where(spikes1["time"]>twindow[0])
+ ix1_window1=np.where(spikes1["time"]twindow[0])
+ ix2_window1=np.where(spikes2["time"] tstart) & (spikes[0] < tend))
+
+ spike_times = spikes[0][ix_t]
+ spike_gids = spikes[1][ix_t]
+
+ for query, col in cmap.items():
+ query_df = nodes_df.query(query)
+ gids_query = query_df.index
+ print("{} ncells: {} {}".format(query, len(gids_query), col))
+
+ ix_g = np.in1d(spike_gids, gids_query)
+ ax.scatter(spike_times[ix_g], spike_gids[ix_g],
+ marker=marker,
+ # facecolors='none',
+ facecolors=col,
+ # edgecolors=col,
+ s=s,
+ label=query,
+ lw=lw)
diff --git a/bmtk-vb/build/lib/bmtk/analyzer/visualization/spikes.py b/bmtk-vb/build/lib/bmtk/analyzer/visualization/spikes.py
new file mode 100644
index 0000000..e7b34e9
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/analyzer/visualization/spikes.py
@@ -0,0 +1,499 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import csv
+import h5py
+from six import string_types
+import pandas as pd
+import numpy as np
+import matplotlib.pyplot as plt
+import matplotlib.cm as cmx
+import matplotlib.colors as colors
+import matplotlib.gridspec as gridspec
+
+import bmtk.simulator.utils.config as config
+
+from mpl_toolkits.axes_grid1 import make_axes_locatable
+
+def _create_node_table(node_file, node_type_file, group_key=None, exclude=[]):
+ """Creates a merged nodes.csv and node_types.csv dataframe with excluded items removed. Returns a dataframe."""
+ node_types_df = pd.read_csv(node_type_file, sep=' ', index_col='node_type_id')
+ nodes_h5 = h5py.File(node_file)
+ # TODO: Use utils.spikesReader
+ node_pop_name = nodes_h5['/nodes'].keys()[0]
+
+ nodes_grp = nodes_h5['/nodes'][node_pop_name]
+ # TODO: Need to be able to handle gid or node_id
+ nodes_df = pd.DataFrame({'node_id': nodes_grp['node_id'], 'node_type_id': nodes_grp['node_type_id']})
+ #nodes_df = pd.DataFrame({'node_id': nodes_h5['/nodes/node_gid'], 'node_type_id': nodes_h5['/nodes/node_type_id']})
+ nodes_df.set_index('node_id', inplace=True)
+
+ # nodes_df = pd.read_csv(node_file, sep=' ', index_col='node_id')
+ full_df = pd.merge(left=nodes_df, right=node_types_df, how='left', left_on='node_type_id', right_index=True)
+
+ if group_key is not None and len(exclude) > 0:
+ # Make sure sure we group-key exists as column
+ if group_key not in full_df:
+ raise Exception('Could not find column {}'.format(group_key))
+
+ group_keys = set(nodes_df[group_key].unique()) - set(exclude)
+ groupings = nodes_df.groupby(group_key)
+ # remove any rows with matching column value
+ for cond in exclude:
+ full_df = full_df[full_df[group_key] != cond]
+
+ return full_df
+
+def _count_spikes(spikes_file, max_gid, interval=None):
+ def parse_line(line):
+ ts, gid = line.strip().split(' ')
+ return float(ts), int(gid)
+
+ if interval is None:
+ t_max = t_bounds_low = -1.0
+ t_min = t_bounds_high = 1e16
+ elif hasattr(interval, "__getitem__") and len(interval) == 2:
+ t_min = t_bounds_low = interval[0]
+ t_max = t_bounds_high = interval[1]
+ elif isinstance(interval, float):
+ t_max = t_min = t_bounds_low = interval[0]
+ t_bounds_high = 1e16
+ else:
+ raise Exception("Unable to determine interval.")
+
+ max_gid = int(max_gid) # strange bug where max_gid was being returned as a float.
+ spikes = [[] for _ in xrange(max_gid+1)]
+ spike_sums = np.zeros(max_gid+1)
+ # TODO: Use utils.spikesReader
+ spikes_h5 = h5py.File(spikes_file, 'r')
+ #print spikes_h5['/spikes'].keys()
+ gid_ds = spikes_h5['/spikes/gids']
+ ts_ds = spikes_h5['/spikes/timestamps']
+
+ for i in range(len(gid_ds)):
+ ts = ts_ds[i]
+ gid = gid_ds[i]
+
+ if gid <= max_gid and t_bounds_low <= ts <= t_bounds_high:
+ spikes[gid].append(ts)
+ spike_sums[gid] += 1
+ t_min = ts if ts < t_min else t_min
+ t_max = ts if ts > t_max else t_max
+
+ """
+ with open(spikes_file, 'r') as fspikes:
+ for line in fspikes:
+ ts, gid = parse_line(line)
+ if gid <= max_gid and t_bounds_low <= ts <= t_bounds_high:
+ spikes[gid].append(ts)
+ spike_sums[gid] += 1
+ t_min = ts if ts < t_min else t_min
+ t_max = ts if ts > t_max else t_max
+ """
+ return spikes, spike_sums/(float(t_max-t_min)*1e-3)
+
+
+
+def plot_spikes_config(configure, group_key=None, exclude=[], save_as=None, show_plot=True):
+ if isinstance(configure, string_types):
+ conf = config.from_json(configure)
+ elif isinstance(configure, dict):
+ conf = configure
+ else:
+ raise Exception("configure variable must be either a json dictionary or json file name.")
+
+ cells_file_name = conf['internal']['nodes']
+ cell_models_file_name = conf['internal']['node_types']
+ spikes_file = conf['output']['spikes_ascii']
+
+ plot_spikes(cells_file_name, cell_models_file_name, spikes_file, group_key, exclude, save_as, show_plot)
+
+
+def plot_spikes(cells_file, cell_models_file, spikes_file, population=None, group_key=None, exclude=[], save_as=None,
+ show=True, title=None):
+ # check if can be shown and/or saved
+ #if save_as is not None:
+ # if os.path.exists(save_as):
+ # raise Exception('file {} already exists. Cannot save.'.format(save_as))
+
+ cm_df = pd.read_csv(cell_models_file, sep=' ')
+ cm_df.set_index('node_type_id', inplace=True)
+
+ cells_h5 = h5py.File(cells_file, 'r')
+ # TODO: Use sonata api
+ if population is None:
+ if len(cells_h5['/nodes']) > 1:
+ raise Exception('Multiple populations in nodes file. Please specify one to plot using population param')
+ else:
+ population = cells_h5['/nodes'].keys()[0]
+
+ nodes_grp = cells_h5['/nodes'][population]
+ c_df = pd.DataFrame({'node_id': nodes_grp['node_id'], 'node_type_id': nodes_grp['node_type_id']})
+ # c_df = pd.read_csv(cells_file, sep=' ')
+ c_df.set_index('node_id', inplace=True)
+ nodes_df = pd.merge(left=c_df,
+ right=cm_df,
+ how='left',
+ left_on='node_type_id',
+ right_index=True) # use 'model_id' key to merge, for right table the "model_id" is an index
+
+ # TODO: Uses utils.SpikesReader to open
+ spikes_h5 = h5py.File(spikes_file, 'r')
+ spike_gids = np.array(spikes_h5['/spikes/gids'], dtype=np.uint)
+ spike_times = np.array(spikes_h5['/spikes/timestamps'], dtype=np.float)
+ # spike_times, spike_gids = np.loadtxt(spikes_file, dtype='float32,int', unpack=True)
+ # spike_gids, spike_times = np.loadtxt(spikes_file, dtype='int,float32', unpack=True)
+
+ spike_times = spike_times * 1.0e-3
+
+ if group_key is not None:
+ if group_key not in nodes_df:
+ raise Exception('Could not find column {}'.format(group_key))
+ groupings = nodes_df.groupby(group_key)
+
+ n_colors = nodes_df[group_key].nunique()
+ color_norm = colors.Normalize(vmin=0, vmax=(n_colors-1))
+ scalar_map = cmx.ScalarMappable(norm=color_norm, cmap='hsv')
+ color_map = [scalar_map.to_rgba(i) for i in range(0, n_colors)]
+ else:
+ groupings = [(None, nodes_df)]
+ color_map = ['blue']
+
+ #marker = '.' if len(nodes_df) > 1000 else 'o'
+ marker = 'o'
+
+ # Create plot
+ gs = gridspec.GridSpec(2, 1, height_ratios=[7, 1])
+ ax1 = plt.subplot(gs[0])
+ gid_min = 10**10
+ gid_max = -1
+ for color, (group_name, group_df) in zip(color_map, groupings):
+ if group_name in exclude:
+ continue
+ group_min_gid = min(group_df.index.tolist())
+ group_max_gid = max(group_df.index.tolist())
+ gid_min = group_min_gid if group_min_gid <= gid_min else gid_min
+ gid_max = group_max_gid if group_max_gid > gid_max else gid_max
+
+ gids_group = group_df.index
+ indexes = np.in1d(spike_gids, gids_group)
+ ax1.scatter(spike_times[indexes], spike_gids[indexes], marker=marker, facecolors=color, label=group_name, lw=0, s=5)
+
+ #ax1.set_xlabel('time (s)')
+ ax1.axes.get_xaxis().set_visible(False)
+ ax1.set_ylabel('cell_id')
+ ax1.set_xlim([0, max(spike_times)])
+ ax1.set_ylim([gid_min, gid_max])
+ plt.legend(markerscale=2, scatterpoints=1)
+
+ ax2 = plt.subplot(gs[1])
+ plt.hist(spike_times, 100)
+ ax2.set_xlabel('time (s)')
+ ax2.set_xlim([0, max(spike_times)])
+ ax2.axes.get_yaxis().set_visible(False)
+ if title is not None:
+ ax1.set_title(title)
+
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ if show:
+ plt.show()
+
+
+def plot_ratess(cells_file, cell_models_file, spikes_file, group_key='pop_name', exclude=['LIF_inh', 'LIF_exc'], save_as=None, show_plot=True):
+ #if save_as is not None:
+ # if os.path.exists(save_as):
+ # raise Exception('file {} already exists. Cannot save.'.format(save_as))
+
+ cm_df = pd.read_csv(cell_models_file, sep=' ')
+ cm_df.set_index('node_type_id', inplace=True)
+
+ c_df = pd.read_csv(cells_file, sep=' ')
+ c_df.set_index('node_id', inplace=True)
+ nodes_df = pd.merge(left=c_df,
+ right=cm_df,
+ how='left',
+ left_on='node_type_id',
+ right_index=True) # use 'model_id' key to merge, for right table the "model_id" is an index
+
+ for cond in exclude:
+ nodes_df = nodes_df[nodes_df[group_key] != cond]
+
+ groupings = nodes_df.groupby(group_key)
+ n_colors = nodes_df[group_key].nunique()
+ color_norm = colors.Normalize(vmin=0, vmax=(n_colors - 1))
+ scalar_map = cmx.ScalarMappable(norm=color_norm, cmap='hsv')
+ color_map = [scalar_map.to_rgba(i) for i in range(0, n_colors)]
+
+
+ spike_times, spike_gids = np.loadtxt(spikes_file, dtype='float32,int', unpack=True)
+ rates = np.zeros(max(spike_gids) + 1)
+ for ts, gid in zip(spike_times, spike_gids):
+ if ts < 500.0:
+ continue
+ rates[gid] += 1
+
+ for color, (group_name, group_df) in zip(color_map, groupings):
+ print(group_name)
+ print(group_df.index)
+ print(rates[group_df.index])
+ plt.plot(group_df.index, rates[group_df.index], '.', color=color)
+
+ plt.show()
+
+ print(n_colors)
+ exit()
+
+
+
+ group_keys = set(nodes_df[group_key].unique()) - set(exclude)
+ groupings = nodes_df.groupby(group_key)
+
+ n_colors = len(group_keys)
+ color_norm = colors.Normalize(vmin=0, vmax=(n_colors - 1))
+ scalar_map = cmx.ScalarMappable(norm=color_norm, cmap='hsv')
+ color_map = [scalar_map.to_rgba(i) for i in range(0, n_colors)]
+
+ for color, (group_name, group_df) in zip(color_map, groupings):
+ print(group_name)
+ print(group_df.index)
+
+ exit()
+
+
+ """
+ print color_map
+ exit()
+
+ n_colors = nodes_df[group_key].nunique()
+ color_norm = colors.Normalize(vmin=0, vmax=(n_colors - 1))
+ scalar_map = cmx.ScalarMappable(norm=color_norm, cmap='hsv')
+ color_map = [scalar_map.to_rgba(i) for i in range(0, n_colors)]
+ """
+
+ spike_times, spike_gids = np.loadtxt(spikes_file, dtype='float32,int', unpack=True)
+ rates = np.zeros(max(spike_gids)+1)
+
+ for ts, gid in zip(spike_times, spike_gids):
+ if ts < 500.0:
+ continue
+
+ rates[gid] += 1
+
+ rates = rates / 3.0
+
+ plt.plot(xrange(max(spike_gids)+1), rates, '.')
+ plt.show()
+
+
+def plot_rates(cells_file, cell_models_file, spikes_file, group_key=None, exclude=[], interval=None, show=True,
+ title=None, save_as=None, smoothed=False):
+ def smooth(data, window=100):
+ h = int(window/2)
+ x_max = len(data)
+ return [np.mean(data[max(0, x-h):min(x_max, x+h)]) for x in xrange(0, x_max)]
+
+ nodes_df = _create_node_table(cells_file, cell_models_file, group_key, exclude)
+ _, spike_rates = _count_spikes(spikes_file, max(nodes_df.index), interval)
+
+ if group_key is not None:
+ groupings = nodes_df.groupby(group_key)
+ group_order = {k: i for i, k in enumerate(nodes_df[group_key].unique())}
+
+ n_colors = len(group_order)
+ color_norm = colors.Normalize(vmin=0, vmax=(n_colors-1))
+ scalar_map = cmx.ScalarMappable(norm=color_norm, cmap='hsv')
+ color_map = [scalar_map.to_rgba(i) for i in range(0, n_colors)]
+ ordered_groupings = [(group_order[name], c, name, df) for c, (name, df) in zip(color_map, groupings)]
+
+ else:
+ ordered_groupings = [(0, 'blue', None, nodes_df)]
+
+ keys = ['' for _ in xrange(len(group_order))]
+ means = [0 for _ in xrange(len(group_order))]
+ stds = [0 for _ in xrange(len(group_order))]
+ fig = plt.figure()
+ ax1 = fig.add_subplot(111)
+ for indx, color, group_name, group_df in ordered_groupings:
+ keys[indx] = group_name
+ means[indx] = np.mean(spike_rates[group_df.index])
+ stds[indx] = np.std(spike_rates[group_df.index])
+ y = smooth(spike_rates[group_df.index]) if smoothed else spike_rates[group_df.index]
+ ax1.plot(group_df.index, y, '.', color=color, label=group_name)
+
+ max_rate = np.max(spike_rates)
+ ax1.set_ylim(0, 50)#max_rate*1.3)
+ ax1.set_ylabel('Hz')
+ ax1.set_xlabel('gid')
+ ax1.legend(fontsize='x-small')
+ if title is not None:
+ ax1.set_title(title)
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ plt.figure()
+ plt.errorbar(xrange(len(means)), means, stds, linestyle='None', marker='o')
+ plt.xlim(-0.5, len(color_map)-0.5) # len(color_map) == last_index + 1
+ plt.ylim(0, 50.0)# max_rate*1.3)
+ plt.xticks(xrange(len(means)), keys)
+ if title is not None:
+ plt.title(title)
+ if save_as is not None:
+ if save_as.endswith('.jpg'):
+ base = save_as[0:-4]
+ elif save_as.endswith('.jpeg'):
+ base = save_as[0:-5]
+ else:
+ base = save_as
+
+ plt.savefig('{}.summary.jpg'.format(base))
+ with open('{}.summary.csv'.format(base), 'w') as f:
+ f.write('population mean stddev\n')
+ for i, key in enumerate(keys):
+ f.write('{} {} {}\n'.format(key, means[i], stds[i]))
+
+ if show:
+ plt.show()
+
+def plot_rates_popnet(cell_models_file, rates_file, model_keys=None, save_as=None, show_plot=True):
+ """Initial method for plotting popnet output
+
+ :param cell_models_file:
+ :param rates_file:
+ :param model_keys:
+ :param save_as:
+ :param show_plot:
+ :return:
+ """
+
+ pops_df = pd.read_csv(cell_models_file, sep=' ')
+ lookup_col = model_keys if model_keys is not None else 'node_type_id'
+ pop_keys = {str(r['node_type_id']): r[lookup_col] for _, r in pops_df.iterrows()}
+
+ # organize the rates file by population
+ # rates = {pop_name: ([], []) for pop_name in pop_keys.keys()}
+ rates_df = pd.read_csv(rates_file, sep=' ', names=['id', 'times', 'rates'])
+ for grp_key, grp_df in rates_df.groupby('id'):
+ grp_label = pop_keys[str(grp_key)]
+ plt.plot(grp_df['times'], grp_df['rates'], label=grp_label)
+
+ plt.legend(fontsize='x-small')
+ plt.xlabel('time (s)')
+ plt.ylabel('firing rates (Hz)')
+
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ if show_plot:
+ plt.show()
+
+def plot_avg_rates(cell_models_file, rates_file, model_keys=None, save_as=None, show_plot=True):
+ pops_df = pd.read_csv(cell_models_file, sep=' ')
+ lookup_col = model_keys if model_keys is not None else 'node_type_id'
+ pop_keys = {str(r['node_type_id']): r[lookup_col] for _, r in pops_df.iterrows()}
+
+ # organize the rates file by population
+ rates = {pop_name: [] for pop_name in pop_keys.keys()}
+ with open(rates_file, 'r') as f:
+ reader = csv.reader(f, delimiter=' ')
+ for row in reader:
+ if row[0] in rates:
+ #rates[row[0]][0].append(row[1])
+ rates[row[0]].append(float(row[2]))
+
+ labels = []
+ means = []
+ stds = []
+ #print rates
+ for pop_name in pops_df['node_type_id'].unique():
+ r = rates[str(pop_name)]
+ if len(r) == 0:
+ continue
+
+ labels.append(pop_keys.get(str(pop_name), str(pop_name)))
+ means.append(np.mean(r))
+ stds.append(np.std(r))
+
+ plt.figure()
+ plt.errorbar(xrange(len(means)), means, stds, linestyle='None', marker='o')
+ plt.xlim(-0.5, len(means) - 0.5)
+ plt.xticks(xrange(len(means)), labels)
+ plt.ylabel('firing rates (Hz)')
+
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ if show_plot:
+ plt.show()
+
+
+def plot_tuning(sg_analysis, node, band, Freq=0, show=True, save_as=None):
+ def index_for_node(node, band):
+ if node == 's4':
+ mask = sg_analysis.node_table.node == node
+ else:
+ mask = (sg_analysis.node_table.node == node) & (sg_analysis.node_table.band == band)
+ return str(sg_analysis.node_table[mask].index[0])
+
+ index = index_for_node(node, band)
+
+ key = index + '/sg/tuning'
+ analysis_file = sg_analysis.get_tunings_file()
+
+ tuning_matrix = analysis_file[key].value[:, :, :, Freq]
+
+ n_or, n_sf, n_ph = tuning_matrix.shape
+
+ vmax = np.max(tuning_matrix[:, :, :])
+ vmin = np.min(tuning_matrix[:, :, :])
+
+ #fig, ax = plt.subplots(1, n_ph, figsize=(12, 16), sharex=True, sharey=True)
+ fig, ax = plt.subplots(1, n_ph, figsize=(13.9, 4.3), sharex=False, sharey=True)
+
+ print(sg_analysis.orientations)
+ for phase in range(n_ph):
+ tuning_to_plot = tuning_matrix[:, :, phase]
+
+ im = ax[phase].imshow(tuning_to_plot, interpolation='nearest', vmax=vmax, vmin=vmin)
+ ax[phase].set_xticklabels([0] + list(sg_analysis.spatial_frequencies))
+ ax[phase].set_yticklabels([0] + list(sg_analysis.orientations))
+
+ ax[phase].set_title('phase = {}'.format(sg_analysis.phases[phase]))
+ ax[phase].set_xlabel('spatial_frequency')
+ if phase == 0:
+ ax[phase].set_ylabel('orientation')
+
+ fig.subplots_adjust(right=0.90)
+ cbar_ax = fig.add_axes([0.92, 0.10, 0.02, 0.75])
+ cbar = fig.colorbar(im, cax=cbar_ax, ticks=[vmin, 0.0, vmax])
+
+ if save_as is not None:
+ plt.savefig(save_as)
+
+ if show:
+ plt.show()
+
+
+ #config_file =
+# plot_spikes('../../examples/pointnet/example2/config.json', 'pop_name')
diff --git a/bmtk-vb/build/lib/bmtk/analyzer/visualization/widgets.py b/bmtk-vb/build/lib/bmtk/analyzer/visualization/widgets.py
new file mode 100644
index 0000000..bb9c909
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/analyzer/visualization/widgets.py
@@ -0,0 +1,114 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import matplotlib.pyplot as plt
+import scipy.interpolate as spinterp
+import numpy as np
+
+class PlotWidget(object):
+
+ def __init__(self, t_range, y_range, rate_ax=None, position_ax=None, metadata={}, location_markersize=5):
+
+ if rate_ax is None:
+ self.fig = plt.figure()
+ self.ax = self.fig.add_subplot(111)
+ else:
+ self.ax = rate_ax
+ self.position_ax = position_ax
+
+ self.t_range = t_range
+ self.y_range = y_range
+ self.interp_fcn = spinterp.interp1d(self.t_range, self.y_range)
+ self._t = None
+ self.metadata=metadata
+ self.artist_list = []
+ self.location_markersize = location_markersize
+
+ @property
+ def y(self):
+ return self.interp_fcn(self._t)
+
+ def initialize(self, t0, **kwargs):
+
+ self._t = t0
+ self.plot_data, = self.ax.plot(self.t_range,self.y_range,**kwargs)
+ self.vertical_rule_data, = self.ax.plot([self._t, self._t],self.ax.get_ylim(),'--r')
+ self.point_data, = self.ax.plot([self._t],[self.y],'*r')
+
+ self.artist_list = [self.plot_data, self.vertical_rule_data, self.point_data]
+
+ if (not self.position_ax is None) and 'position' in self.metadata:
+ x = self.metadata['position'][0]
+ y = self.metadata['position'][1]
+ self.location_point_data, = self.position_ax.plot([x],[y],'*r', markersize=self.location_markersize)
+ self.artist_list.append(self.location_point_data)
+
+
+ def update(self, t):
+
+ self._t = t
+ self.point_data
+ self.point_data.set_xdata([self._t])
+ self.vertical_rule_data.set_xdata([self._t, self._t])
+ self.vertical_rule_data.set_ydata(self.ax.get_ylim())
+
+ for data in self.artist_list:
+ self.ax.figure.canvas.blit(data)
+
+ def set_visible(self, visible_or_not):
+
+
+ for data in self.artist_list:
+ data.set_visible(visible_or_not)
+ self.ax.figure.canvas.blit(data)
+
+
+class MovieWidget(object):
+
+ def __init__(self, t_range, data, ax=None, metadata={}):
+
+ if ax is None:
+ self.fig = plt.figure()
+ self.ax = self.fig.add_subplot(111)
+ else:
+ self.ax = ax
+
+ self.t_range = t_range
+ self.frame_rate = 1./np.mean(np.diff(t_range))
+ self.data = data
+ self.ax.get_xaxis().set_visible(False)
+ self.ax.get_yaxis().set_visible(False)
+ self.metadata=metadata
+
+ def initialize(self, t0, vmin=-1, vmax=1, cmap=plt.cm.gray):
+
+ data = self.data[self.ti(t0),:,:]
+ self.im = self.ax.imshow(data, vmin=vmin, vmax=vmax, cmap=cmap)
+
+ def update(self, t):
+
+ data = self.data[self.ti(t),:,:]
+ self.im.set_data(data)
+ self.ax.figure.canvas.draw()
+
+ def ti(self, t):
+ return int(t*self.frame_rate) - int(self.t_range[0]*self.frame_rate)
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/builder/__init__.py b/bmtk-vb/build/lib/bmtk/builder/__init__.py
new file mode 100644
index 0000000..1f7a3ed
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/__init__.py
@@ -0,0 +1,23 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .networks import DenseNetwork, NetworkBuilder
diff --git a/bmtk-vb/build/lib/bmtk/builder/aux/__init__.py b/bmtk-vb/build/lib/bmtk/builder/aux/__init__.py
new file mode 100644
index 0000000..2d56a26
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/aux/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/builder/aux/edge_connectors.py b/bmtk-vb/build/lib/bmtk/builder/aux/edge_connectors.py
new file mode 100644
index 0000000..7abba26
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/aux/edge_connectors.py
@@ -0,0 +1,56 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import random
+
+
+def distance_connector(source, target, d_weight_min, d_weight_max, d_max, nsyn_min, nsyn_max):
+ # Avoid self-connections.
+ sid = source.node_id
+ tid = target.node_id
+ if sid == tid:
+ return None
+
+ # first create weights by euclidean distance between cells
+ r = np.linalg.norm(np.array(source['positions']) - np.array(target['positions']))
+ if r > d_max:
+ dw = 0.0
+ else:
+ t = r / d_max
+ dw = d_weight_max * (1.0 - t) + d_weight_min * t
+
+ # drop the connection if the weight is too low
+ if dw <= 0:
+ return None
+
+ # filter out nodes by treating the weight as a probability of connection
+ if random.random() > dw:
+ return None
+
+ # Add the number of synapses for every connection.
+ tmp_nsyn = random.randint(nsyn_min, nsyn_max)
+ return tmp_nsyn
+
+
+def connect_random(source, target, nsyn_min=0, nsyn_max=10, distribution=None):
+ return np.random.randint(nsyn_min, nsyn_max)
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/builder/aux/node_params.py b/bmtk-vb/build/lib/bmtk/builder/aux/node_params.py
new file mode 100644
index 0000000..0ce1f4f
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/aux/node_params.py
@@ -0,0 +1,38 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import math
+
+
+def positions_columinar(N=1, center=[0.0, 50.0, 0.0], height=100.0, min_radius=0.0, max_radius=1.0, distribution='uniform'):
+ phi = 2.0 * math.pi * np.random.random([N])
+ r = np.sqrt((min_radius**2 - max_radius**2) * np.random.random([N]) + max_radius**2)
+ x = center[0] + r * np.cos(phi)
+ z = center[2] + r * np.sin(phi)
+ y = center[1] + height * (np.random.random([N]) - 0.5)
+
+ return np.column_stack((x, y, z))
+
+
+def xiter_random(N=1, min_x=0.0, max_x=1.0):
+ return np.random.uniform(low=min_x, high=max_x, size=(N,))
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/builder/bionet/__init__.py b/bmtk-vb/build/lib/bmtk/builder/bionet/__init__.py
new file mode 100644
index 0000000..324aace
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/bionet/__init__.py
@@ -0,0 +1 @@
+from swc_reader import SWCReader
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/builder/bionet/swc_reader.py b/bmtk-vb/build/lib/bmtk/builder/bionet/swc_reader.py
new file mode 100644
index 0000000..4833a1d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/bionet/swc_reader.py
@@ -0,0 +1,81 @@
+import numpy as np
+from neuron import h
+
+from bmtk.simulator.bionet import nrn
+from bmtk.simulator.bionet.morphology import Morphology
+
+
+class SWCReader(object):
+ def __init__(self, swc_file, random_seed=10, fix_axon=True):
+ nrn.load_neuron_modules(None, None)
+ self._swc_file = swc_file
+ self._hobj = h.Biophys1(swc_file)
+ if fix_axon:
+ self._fix_axon()
+
+ self._morphology = Morphology(self._hobj)
+ self._morphology.set_seg_props()
+ self._morphology.calc_seg_coords()
+ self._prng = np.random.RandomState(random_seed)
+
+ self._secs = []
+ self._save_sections()
+
+ def _save_sections(self):
+ for sec in self._hobj.all:
+ for _ in sec:
+ self._secs.append(sec)
+
+ def _fix_axon(self):
+ """Removes and refixes axon"""
+ axon_diams = [self._hobj.axon[0].diam, self._hobj.axon[0].diam]
+ for sec in self._hobj.all:
+ section_name = sec.name().split(".")[1][:4]
+ if section_name == 'axon':
+ axon_diams[1] = sec.diam
+
+ for sec in self._hobj.axon:
+ h.delete_section(sec=sec)
+
+ h.execute('create axon[2]', self._hobj)
+ for index, sec in enumerate(self._hobj.axon):
+ sec.L = 30
+ sec.diam = 1
+
+ self._hobj.axonal.append(sec=sec)
+ self._hobj.all.append(sec=sec) # need to remove this comment
+
+ self._hobj.axon[0].connect(self._hobj.soma[0], 1.0, 0)
+ self._hobj.axon[1].connect(self._hobj.axon[0], 1.0, 0)
+
+ h.define_shape()
+
+ def find_sections(self, section_names, distance_range):
+ return self._morphology.find_sections(section_names, distance_range)
+
+ def choose_sections(self, section_names, distance_range, n_sections=1):
+ secs, probs = self.find_sections(section_names, distance_range)
+ secs_ix = self._prng.choice(secs, n_sections, p=probs)
+ return secs_ix, self._morphology.seg_prop['x'][secs_ix]
+
+ def get_coord(self, sec_ids, sec_xs, soma_center=(0.0, 0.0, 0.0), rotations=None):
+ adjusted = self._morphology.get_soma_pos() - np.array(soma_center)
+ absolute_coords = []
+ for sec_id, sec_x in zip(sec_ids, sec_xs):
+ sec = self._secs[sec_id]
+ n_coords = int(h.n3d(sec=sec))
+ coord_indx = int(sec_x*(n_coords - 1))
+ swc_coords = np.array([h.x3d(coord_indx, sec=sec), h.y3d(coord_indx, sec=sec), h.x3d(coord_indx, sec=sec)])
+ absolute_coords.append(swc_coords - adjusted)
+
+ if rotations is not None:
+ raise NotImplementedError
+
+ return absolute_coords
+
+ def get_dist(self, sec_ids):
+ return [self._morphology.seg_prop['dist'][sec_id] for sec_id in sec_ids]
+
+ def get_type(self, sec_ids):
+ return [self._morphology.seg_prop['type'][sec_id] for sec_id in sec_ids]
+
diff --git a/bmtk-vb/build/lib/bmtk/builder/connection_map.py b/bmtk-vb/build/lib/bmtk/builder/connection_map.py
new file mode 100644
index 0000000..863cf26
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/connection_map.py
@@ -0,0 +1,153 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from . import connector
+from . import iterator
+
+
+class ConnectionMap(object):
+ """Class for keeping track of connection rules.
+
+ For every connection from source --> target this keeps track of rules (functions, literals, lists) for
+ 1. the number of synapses between source and target
+ 2. Used defined parameters (syn-weight, synaptic-location) for every synapse.
+
+ The number of synapses rule (1) is stored as a connector. Individual synaptic parameters, if they exists, are stored
+ as ParamsRules.
+ """
+
+ class ParamsRules(object):
+ """A subclass to store indvidiual synpatic parameter rules"""
+ def __init__(self, names, rule, rule_params, dtypes):
+ self._names = names
+ self._rule = rule
+ self._rule_params = rule_params
+ self._dtypes = self.__create_dtype_dict(names, dtypes)
+
+ def __create_dtype_dict(self, names, dtypes):
+ if isinstance(names, list):
+ # TODO: compare size of names and dtypes
+ return {n: dt for n, dt in zip(names, dtypes)}
+ else:
+ return {names: dtypes}
+
+ @property
+ def names(self):
+ return self._names
+
+ @property
+ def rule(self):
+ return connector.create(self._rule, **(self._rule_params or {}))
+
+ @property
+ def dtypes(self):
+ return self._dtypes
+
+ def get_prop_dtype(self, prop_name):
+ return self._dtypes[prop_name]
+
+ def __init__(self, sources=None, targets=None, connector=None, connector_params=None, iterator='one_to_one',
+ edge_type_properties=None):
+ self._source_nodes = sources # source nodes
+ self._target_nodes = targets # target nodes
+ self._connector = connector # function, list or value that determines connection between sources and targets
+ self._connector_params = connector_params # parameters passed into connector
+ self._iterator = iterator # rule for iterating between sources and targets
+ self._edge_type_properties = edge_type_properties
+
+ self._params = []
+ self._param_keys = []
+
+ @property
+ def params(self):
+ return self._params
+
+ @property
+ def source_nodes(self):
+ return self._source_nodes
+
+ @property
+ def source_network_name(self):
+ return self._source_nodes.network_name
+
+ @property
+ def target_nodes(self):
+ return self._target_nodes
+
+ @property
+ def target_network_name(self):
+ return self._target_nodes.network_name
+
+ @property
+ def connector(self):
+ return self._connector
+
+ @property
+ def connector_params(self):
+ return self._connector_params
+
+ @property
+ def iterator(self):
+ return self._iterator
+
+ @property
+ def edge_type_properties(self):
+ return self._edge_type_properties or {}
+
+ @property
+ def edge_type_id(self):
+ # TODO: properly implement edge_type
+ return self._edge_type_properties['edge_type_id']
+
+ @property
+ def property_names(self):
+ if len(self._param_keys) == 0:
+ return ['nsyns']
+ else:
+ return self._param_keys
+
+ def properties_keys(self):
+ ordered_keys = sorted(self.property_names)
+ return str(ordered_keys)
+
+
+ def max_connections(self):
+ return len(self._source_nodes) * len(self._target_nodes)
+
+ def add_properties(self, names, rule, rule_params=None, dtypes=None):
+ """A a synaptic property
+
+ :param names: list, or single string, of the property
+ :param rule: function, list or value of property
+ :param rule_params: when rule is a function, rule_params will be passed into function when called.
+ :param dtypes: expected property type
+ """
+ self._params.append(self.ParamsRules(names, rule, rule_params, dtypes))
+ self._param_keys += names
+
+ def connection_itr(self):
+ """Returns a generator that will iterate through the source/target pairs (as specified by the iterator function,
+ and create a connection rule based on the connector.
+ """
+ conr = connector.create(self.connector, **(self.connector_params or {}))
+ itr = iterator.create(self.iterator, conr, **({}))
+ return itr(self.source_nodes, self.target_nodes, conr)
diff --git a/bmtk-vb/build/lib/bmtk/builder/connector.py b/bmtk-vb/build/lib/bmtk/builder/connector.py
new file mode 100644
index 0000000..0d2cfd6
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/connector.py
@@ -0,0 +1,35 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from . import functor_cache
+
+
+def create(connector, **params):
+ return CONNECTOR_CACHE.create(connector, **params)
+
+
+def register(name, func):
+ CONNECTOR_CACHE.register(name, func)
+
+
+CONNECTOR_CACHE = functor_cache.FunctorCache()
+register('passthrough', lambda *_: {})
diff --git a/bmtk-vb/build/lib/bmtk/builder/edge.py b/bmtk-vb/build/lib/bmtk/builder/edge.py
new file mode 100644
index 0000000..31265a9
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/edge.py
@@ -0,0 +1,66 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+
+class Edge(object):
+ def __init__(self, src_gid, trg_gid, edge_type_props, syn_props):
+ self.__src_gid = src_gid
+ self.__trg_gid = trg_gid
+ self.__edge_type_props = edge_type_props
+ self.__syn_props = syn_props
+
+ @property
+ def source_gid(self):
+ return self.__src_gid
+
+ @property
+ def target_gid(self):
+ return self.__trg_gid
+
+ @property
+ def edge_type_properties(self):
+ return self.__edge_type_props
+
+ @property
+ def edge_type_id(self):
+ return self.edge_type_properties['edge_type_id']
+
+ @property
+ def synaptic_properties(self):
+ return self.__syn_props
+
+ def __contains__(self, item):
+ return item in self.edge_type_properties or item in self.synaptic_properties
+
+ def __getitem__(self, item):
+ if item in self.edge_type_properties:
+ return self.edge_type_properties[item]
+ elif item in self.synaptic_properties:
+ return self.synaptic_properties[item]
+ else:
+ return None
+
+ def __repr__(self):
+ rstr = "{} --> {} ('edge_type_id': {}, ".format(self.source_gid, self.target_gid, self.edge_type_id)
+ rstr += "{}: {}" ', '.join("'{}': {}".format(k, v) for k, v in self.synaptic_properties.items())
+ return rstr + ")"
diff --git a/bmtk-vb/build/lib/bmtk/builder/formats/__init__.py b/bmtk-vb/build/lib/bmtk/builder/formats/__init__.py
new file mode 100644
index 0000000..6480e34
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/formats/__init__.py
@@ -0,0 +1,246 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+""" network2.format
+
+The XFormat classes are implemented within Network class to allow network objects to handle different data types.
+Each class should be able to control both input and output file format (json, csv, h5, etc) and the expected parameters,
+including their corresponding order.
+
+Example:
+ net = Network(format=ISeeFormat)
+ ...
+ net.save(cells="cells.csv", models="cell_models.csv", connections="connections.h5")
+
+Todo:
+ * change network.load(cls) to be format specific.
+"""
+import csv
+import h5py
+import numpy as np
+import json
+import pandas as pd
+
+from ..node import Node
+
+from iformats import IFormat
+
+
+class DefaultFormat(IFormat):
+ def save_nodes(self, file_name):
+ raise NotImplementedError()
+
+ def save_edges(self, file_name):
+ raise NotImplementedError()
+
+ def save(self, file_name):
+ raise NotImplementedError()
+
+
+class ISeeFormat(IFormat):
+ """Controls the output of networks that will be used in the isee_engine simulator.
+
+ The nodes are saved in a cells and cell_model csv files with predefined format. the edges/connections are
+ saved in a connections h5 format.
+ """
+ def save_cells(self, filename, columns, position_labels=None):
+ """Saves nodes/cell information and their model type metadata.
+
+ :param cells_csv: name of csv file where cell information will be saved.
+ :param models_csv: name of csv file where cell model information will be saved.
+ """
+ # TODO: add checks and warnings if parameters are missing.
+ with open(filename, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=' ')
+ header = []
+ for col in columns:
+ if col == 'position':
+ for label in position_labels:
+ if label:
+ header.append(label)
+ else:
+ header.append(col)
+ csvw.writerow(header)
+ for nid, params in self._network.nodes():
+ row_array = []
+ for col in columns:
+ if col == 'position':
+ for i, label in enumerate(position_labels):
+ if label:
+ row_array.append(params['position'][i])
+ else:
+ row_array.append(params[col])
+
+ csvw.writerow(row_array)
+
+ def save_types(self, filename, columns, key=None):
+ seen_types = set()
+
+ with open(filename, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=' ')
+ csvw.writerow(columns)
+ #csvw.writerow(['model_id', 'electrophysiology' 'level_of_detail', 'morphology', 'rotation_angle_zaxis'])
+ for node_set in self._network._node_sets:
+ props = node_set.properties#['properties']
+
+ if key is not None:
+ key_val = props.get(key, None)
+ if key_val is not None and key_val in seen_types:
+ continue
+ else:
+ seen_types.add(key_val)
+
+ row_array = []
+ for col in columns:
+ row_array.append(props.get(col, 'NA'))
+ csvw.writerow(row_array)
+
+ def save_edges(self, filename, include_nsyns=True):
+ """Saves connection information into h5 format
+
+ :param filename: Name of h5 file where connection information will be stored.
+ :param include_nsyns: setting to false will omit the nsyns table in the h5 file, default
+ true (nsyn table included).
+ """
+ print("save_edges")
+
+ n_nodes = self._network.nnodes
+ n_edges = self._network.nedges
+
+ # TODO: check the order of the node list
+
+ print("> building tables with %d nodes and %d edges" % (self._network.nnodes, self._network.nedges))
+ indptr_table = [0]
+ nsyns_table = []
+ src_gids_table = []
+ edge_types_table = []
+ for trg in self._network.nodes():
+ tid = trg[1]['id']
+ for edges in self._network.edges([tid], rank=1):
+ src_gids_table.append(edges[0])
+ nsyns_table.append(edges[2])
+ edge_types_table.append(edges[3])
+
+ #if len(src_gids_table) == indptr_table[-1]:
+ # print "node %d doesn't have any edges" % (tid)
+ indptr_table.append(len(src_gids_table))
+
+
+ print("> saving tables to %s" % (filename))
+
+ with h5py.File(filename, 'w') as hf:
+ hf.create_dataset('edge_ptr', data=indptr_table)
+ if include_nsyns:
+ hf.create_dataset('num_syns', data=nsyns_table)
+ hf.create_dataset('src_gids', data=src_gids_table)
+ hf.create_dataset('edge_types', data=edge_types_table)
+ hf.attrs["shape"] = (n_nodes, n_nodes)
+
+
+ """
+ temp = np.empty([n_edges, 3])
+ for i, edge in enumerate(self._network.edges()):
+ temp[i, 0] = edge[0]
+ temp[i, 1] = edge[1]
+ temp[i, 2] = edge[2]
+
+ src_gids_new = np.array([])
+ nsyns_new = np.array([])
+ indptr_new = []
+ counter = 0
+ indptr_new.append(counter)
+ print "Building database"
+ for i in range(n_nodes):
+ indicies = np.where(temp[:, 1] == i)
+
+ src_gids_new = np.concatenate([src_gids_new, np.array(temp[indicies[0], 0])])
+ nsyns_new = np.concatenate([nsyns_new, np.array(temp[indicies[0], 2])])
+
+ counter += np.size(indicies[0])
+ indptr_new.append(counter)
+
+ print "Writing to h5"
+
+ indptr_new = np.array(indptr_new)
+
+ src_gids_new = src_gids_new.astype(int)
+ print src_gids_new
+ exit()
+
+ nsyns_new = nsyns_new.astype(int)
+ indptr_new = indptr_new.astype(int)
+
+ with h5py.File(filename, 'w') as hf:
+ hf.create_dataset('indptr', data=indptr_new)
+ if include_nsyns:
+ hf.create_dataset('nsyns', data=nsyns_new)
+ hf.create_dataset('src_gids', data=src_gids_new)
+ hf.attrs["shape"] = (n_nodes, n_nodes)
+ """
+
+ def save(self, cells_fname, cell_models_fname, connections_fname, include_nsyns=True):
+ """Saves node (cells) and connection information to files.
+
+ :param cells_fname: name of csv file where cell information will be saved.
+ :param cell_models_fname: name of csv file where cell model information will be saved.
+ :param connections_fname: Name of h5 file where connection information will be stored.
+ :param include_nsyns: set to False to build h5 without nsyn table.
+ """
+ #self.save_nodes(cells_fname, cell_models_fname)
+ self.save_edges(connections_fname, include_nsyns)
+
+ def load(self, nodes, edge_types=None, node_types=None, edges=None, positions=None):
+ # TODO: check imported ids
+
+ df = pd.read_csv(nodes, sep=' ')
+ if node_types is not None:
+ types_df = pd.read_csv(node_types, sep=' ', index_col='node_type_id')
+ df = pd.merge(left=df, right=types_df, how='left', left_on='node_type_id', right_index=True)
+
+ gids_df = df['node_id'] if 'node_id' in df.columns else df['id']
+ #df = df.drop(['id'], axis=1)
+
+ positions_df = None
+ if positions:
+ positions_df = df[positions]
+ df = df.drop(positions, axis=1)
+
+ node_params = df.to_dict(orient='records')
+ node_tuples = [Node(gids_df[i], gids_df[i], None, array_params=node_params[i])
+ for i in xrange(df.shape[0])]
+
+
+ if positions:
+ self._network.positions = position_set.PositionSet()
+ posr = positioner.create('points', location=positions_df.as_matrix())
+ #self._network.positions.add(posr(df.shape[0]), gids_df.tolist())
+ self._network.positions.add(positions_df.values, gids_df.tolist())
+
+ for i in xrange(df.shape[0]):
+ node_tuples[i]['position'] = np.array(positions_df.loc[i])
+
+ self._network.positions.finalize()
+
+ self._network._initialize()
+ self._network._add_nodes(node_tuples)
+ self._network.nodes_built = True
+
diff --git a/bmtk-vb/build/lib/bmtk/builder/formats/hdf5_format.py b/bmtk-vb/build/lib/bmtk/builder/formats/hdf5_format.py
new file mode 100644
index 0000000..a0227ca
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/formats/hdf5_format.py
@@ -0,0 +1,423 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import csv
+import json
+import math
+import h5py
+import pandas as pd
+from ast import literal_eval
+
+import bmtk
+from .iformats import IFormat
+from bmtk.builder.node_pool import NodePool
+from time import gmtime, strftime
+
+
+class HDF5Format(IFormat):
+ """
+ Format prior to Blue-brain project collaboration.
+ Saves as:
+ nodes (csv)
+ node_types (csv)
+ edge_types (csv)
+ edges (h5)
+ """
+
+ CSV_DELIMITER = ' '
+ COL_NODE_TYPE_ID = 'node_type_id'
+ COL_EDGE_TYPE_ID = 'edge_type_id'
+ COL_TARGET_QUERY = 'target_query'
+ COL_SOURCE_QUERY = 'source_query'
+ COL_NODE_ID = 'node_id'
+ BASE_DIR = 'network'
+
+ @property
+ def format(self):
+ return 'msdk.HDF5Format'
+
+ def save(self, directory, **kwargs):
+ """ saves nodes.csv, node_types.csv, edges.h5, edge_types.csv and .metadata.json. Will overwrite existing files.
+
+ :param directory: Directory where all the files will be saved, creating dir if it doesn't exists.
+ :param kwargs:
+ """
+ if directory is None:
+ base_path = os.path.join(self.BASE_DIR, self._network.name)
+ else:
+ base_path = directory
+
+ metadata = {
+ 'version': bmtk.__version__,
+ 'name': self._network.name,
+ 'date_created': strftime("%Y-%m-%d %H:%M:%S", gmtime()),
+ 'file_format': self.format,
+ 'network_class': self._network.__class__.__name__
+ }
+
+ # save node-types.
+ node_types_path = os.path.join(base_path, 'node_types.csv')
+ self.save_node_types(node_types_path, **kwargs)
+ metadata['node_types_file'] = 'node_types.csv'
+
+ # save individual nodes.
+ if self._network.nodes_built:
+ # make sure nodes have been built
+ nodes_path = os.path.join(base_path, 'nodes.csv')
+ self.save_nodes(nodes_path, **kwargs)
+ metadata['nodes_file'] = 'nodes.csv'
+ else:
+ print('Nodes not built. Unable to save to nodes.csv.')
+
+ # save edge-types.
+ edge_types_path = os.path.join(base_path, 'edge_types.csv')
+ self.save_edge_types(edge_types_path, **kwargs)
+ metadata['edge_types_file'] = 'edge_types.csv'
+
+ # save edges if they have been built
+ if self._network.edges_built:
+ edges_path = os.path.join(base_path, 'edges.h5')
+ self.save_edges(edges_path, **kwargs)
+ metadata['edges_file'] = 'edges.h5'
+ else:
+ print('Edges not built. Unable to save to edges.h5.')
+
+ # save the metadata file
+ metadata_path = os.path.join(base_path, '.metadata.json')
+ with open(metadata_path, 'w') as mdfile:
+ json.dump(metadata, mdfile, indent=2)
+
+ def save_node_types(self, file_name, columns=None, **kwargs):
+ """Write node_types to csv.
+
+ :param file_name: path to csv file. Will be overwritten if it exists
+ :param columns: optional columns (not incl. manditory ones). If None then will use all node properties.
+ :param kwargs: optional
+ """
+ self.__checkpath(file_name, **kwargs)
+
+ # csv should always start with node_type_id
+ manditory_cols = [self.COL_NODE_TYPE_ID]
+
+ # Determine which columns are in the node_types file and their order
+ nt_properties = self._network.node_type_properties
+ opt_cols = []
+ if columns is None:
+ # use all node type properties
+ opt_cols = list(nt_properties)
+ else:
+ # check that columns specified by user exists
+ for col_name in columns:
+ if col_name not in nt_properties:
+ raise Exception('No node property {} found in network, cannot save {}.'.format(col_name, file_name))
+ else:
+ opt_cols.append(col_name)
+
+ # write to csv iteratively
+ cols = manditory_cols + opt_cols
+ with open(file_name, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=self.CSV_DELIMITER)
+ csvw.writerow(cols)
+ for node_set in self._network._node_sets:
+ props = node_set.properties
+ row = []
+ for cname in cols:
+ # TODO: determine dtype of parameters so we can use the appropiate none value
+ row.append(props.get(cname, 'NA')) # get column name or NA if it doesn't exists for this node
+ csvw.writerow(row)
+
+ def save_nodes(self, file_name, columns=None, **kwargs):
+ """Write nodes to csv.
+
+ :param file_name: path to csv file. Will be overwritten if it exists
+ :param columns: optional columns (not incl. manditory ones). If None then will use all node properties.
+ :param kwargs: optional
+ """
+ self.__checkpath(file_name, **kwargs)
+
+ # csv will start with node_id and node_type_id
+ manditory_columns = [self.COL_NODE_ID, self.COL_NODE_TYPE_ID]
+
+ # optional columns from either node params or node-type properties
+ opt_columns = []
+ if columns is None:
+ opt_columns = list(self._network.node_params)
+ else:
+ all_cols = self._network.node_params | self._network.node_type_properties
+ for col_name in columns:
+ if col_name not in all_cols:
+ # verify params/properties exist
+ raise Exception('No edge property {} found in network, cannot save {}.'.format(col_name, file_name))
+ else:
+ opt_columns.append(col_name)
+
+ # write to csv
+ with open(file_name, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=self.CSV_DELIMITER)
+ csvw.writerow(manditory_columns + opt_columns)
+ for nid, node in self._network.nodes():
+ row = [node.node_id, node.node_type_id]
+ for cname in opt_columns:
+ row.append(node.get(cname, 'NA'))
+ csvw.writerow(row)
+
+ def save_edge_types(self, file_name, columns=None, **kwargs):
+ """Write edge-types to csv.
+
+ :param file_name: path to csv file. Will be overwritten if it exists
+ :param columns: optional columns (not incl. manditory ones). If None then will use all node properties.
+ :param kwargs: optional
+ """
+ self.__checkpath(file_name, **kwargs)
+
+ # start with edge_type_id, target_query and source_query
+ manditory_cols = [self.COL_EDGE_TYPE_ID, self.COL_TARGET_QUERY, self.COL_SOURCE_QUERY]
+
+ # optional columns
+ edge_props = self._network.edge_type_properties
+ opt_cols = []
+ if columns is None:
+ opt_cols = list(edge_props)
+ else:
+ for col_name in columns:
+ if col_name not in edge_props:
+ raise Exception('No edge property {} found in network, cannot save {}.'.format(col_name, file_name))
+ else:
+ opt_cols.append(col_name)
+
+ # write to csv by iteratively going through all edge-types
+ with open(file_name, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=self.CSV_DELIMITER)
+ csvw.writerow(manditory_cols + opt_cols)
+ for et in self._network._edge_sets:
+ edge = et['edge']
+ targetnodes = edge.targets # get source as NodePools to get the source_query strings
+ sourcenodes = edge.sources # same with target
+ row_array = [edge.id, targetnodes.filter_str, sourcenodes.filter_str]
+ edge_params = edge.parameters
+ for col in opt_cols:
+ row_array.append(edge_params.get(col, 'NA'))
+ csvw.writerow(row_array)
+
+ def save_edges(self, file_name, **kwargs):
+ """Saves edges to edges.h5
+
+ :param file_name: path to hdf5 file. Will be overwritten if it exists
+ :param kwargs: optional
+ """
+ self.__checkpath(file_name, **kwargs)
+
+ # Get sources, targets, nsyns and edge_type_id for all edges.
+ print("> building tables with %d nodes and %d edges" % (self._network.nnodes, self._network.nedges))
+ indptr_table = [0]
+ nsyns_table = []
+ src_gids_table = []
+ edge_types_table = []
+ for trg in self._network.nodes():
+ # the targets have to be ordered.
+ tid = trg[1].node_id
+ for edges in self._network.edges([tid], rank=1):
+ src_gids_table.append(edges[0])
+ nsyns_table.append(edges[2])
+ edge_types_table.append(edges[3])
+
+ indptr_table.append(len(src_gids_table))
+
+ # save to h5
+ print("> saving tables to %s" % (file_name))
+ with h5py.File(file_name, 'w') as hf:
+ hf.create_dataset('edge_ptr', data=indptr_table)
+ hf.create_dataset('num_syns', data=nsyns_table)
+ hf.create_dataset('src_gids', data=src_gids_table)
+ hf.create_dataset('edge_types', data=edge_types_table)
+
+ def __checkpath(self, file_name, **kwargs):
+ """Makes sure file_name is a valid file path and can be written."""
+ dir_path = os.path.dirname(file_name)
+ if not os.path.exists(dir_path):
+ # create file's directory if it doesn't exist
+ os.makedirs(dir_path)
+
+ def __load_nodes(self, nodes_file, node_types_file):
+ """Loads nodes and node_types from exists files
+
+ :param nodes_file: path to nodes csv
+ :param node_types_file: path to node_types csv
+ """
+ def eval(val):
+ # Helper function that can convert csv to an appropiate type. Helpful for cells of lists (positions, etc)
+ # TODO: keep column dtypes in metadata and use that for converting each column
+ if isinstance(val, float) and math.isnan(val):
+ return None
+ elif isinstance(val, basestring):
+ try:
+ # this will be helpful for turning strings into lists where appropiate "(0, 1, 2)" --> (0, 1, 2)
+ return literal_eval(val)
+ except ValueError:
+ return val
+ return val
+
+ if nodes_file is None and node_types_file is None:
+ return None
+
+ elif nodes_file is not None and node_types_file is not None:
+ # Get the array_params from nodes_file and properties from nodes_types_file, combine them to call
+ # add_nodes() function and rebuilt the nodes.
+ nt_df = pd.read_csv(node_types_file, self.CSV_DELIMITER) #, index_col=self.COL_NODE_TYPE_ID)
+ n_df = pd.read_csv(nodes_file, self.CSV_DELIMITER)
+
+ for _, row in nt_df.iterrows():
+ # iterate through the node_types, find all nodes with matching node_type_id and get those node's
+ # parameters as a dictionary of lists
+ node_type_props = {l: eval(row[l]) for l in nt_df.columns if eval(row[l]) is not None}
+ selected_nodes = n_df[n_df[self.COL_NODE_TYPE_ID] == row[self.COL_NODE_TYPE_ID]]
+ N = len(selected_nodes.axes[0])
+ array_params = {l: list(selected_nodes[l]) for l in selected_nodes.columns
+ if l not in ['node_type_id', 'position']}
+
+ # Special function for position_params
+ position = None
+ position_params = None
+ if 'position' in selected_nodes.columns:
+ position_params = {'location': [eval(p) for p in selected_nodes['position']]}
+ position = 'points'
+
+ self._network.add_nodes(N, position=position, position_params=position_params,
+ array_params=array_params, **node_type_props)
+
+ self._network._build_nodes()
+
+ elif node_types_file is not None:
+ # nodes_types exists but nodes doesn't. We convert each row (node_type) in the csv to a collection
+ # of nodes with N=1, no array_params.
+ nt_df = pd.read_csv(node_types_file, self.CSV_DELIMITER)
+ for _, row in nt_df.iterrows():
+ node_type_props = {l: eval(row[l]) for l in nt_df.columns if eval(row[l]) is not None}
+ self._network.add_nodes(N=1, **node_type_props)
+ self._network._build_nodes()
+
+ elif nodes_file is not None:
+ # nodes exists but node_types doesn't. In this case group together all nodes by node_type_id and add them
+ # as a single population (with no node_params)
+ n_df = pd.read_csv(nodes_file, self.CSV_DELIMITER)
+ for nt_id, df in n_df.groupby(self.COL_NODE_TYPE_ID):
+ N = len(df.axes[0])
+ array_params = {l: list(df[l]) for l in df.columns
+ if l not in ['node_type_id', 'position']}
+
+ position = None
+ position_params = None
+ if 'position' in df.columns:
+ position_params = {'location': [eval(p) for p in df['position']]}
+ position = 'points'
+
+ self._network.add_nodes(N, position=position, position_params=position_params,
+ array_params=array_params, node_type_id=nt_id)
+ self._network._build_nodes()
+
+ def __load_edge_types(self, edges_file, edge_types_file):
+ """Loads edges and edge_types
+
+ :param edges_file: path to edges hdf5
+ :param edge_types_file: path to edge_types csv
+ """
+ if edge_types_file is None and edges_file is None:
+ return
+
+ if edge_types_file is not None:
+ # load in the edge-types. iterate through all the rows of edge_types.csv and call connect() function.
+ et_pd = pd.read_csv(edge_types_file, self.CSV_DELIMITER)
+ prop_cols = [label for label in et_pd.columns
+ if label not in [self.COL_SOURCE_QUERY, self.COL_TARGET_QUERY]]
+
+ for _, row in et_pd.iterrows():
+ # the connect function requires a Pool of nodes (like net.nodes()) or a dictionary filter.
+ source_nodes = NodePool.from_filter(self._network, row[self.COL_SOURCE_QUERY])
+ target_nodes = NodePool.from_filter(self._network, row[self.COL_TARGET_QUERY])
+ # TODO: evaluate edge-properties and exclude any that are None.
+ edge_params = {label: row[label] for label in prop_cols}
+
+ # don't try to guess connection rule
+ self._network.connect(source=source_nodes, target=target_nodes, edge_params=edge_params)
+
+ if edges_file is not None:
+ # Create edges from h5.
+ if not self._network.nodes_built:
+ print('The nodes have not been built. Cannot load edges file.')
+ return
+
+ # load h5 tables
+ edges_h5 = h5py.File(edges_file, 'r')
+ edge_types_ds = edges_h5['edge_types']
+ num_syns_ds = edges_h5['num_syns']
+ src_gids_ds = edges_h5['src_gids']
+ edge_ptr_ds = edges_h5['edge_ptr']
+ n_edge_ptr = len(edge_ptr_ds)
+
+ # the network needs edge-types objects while building the edges. If the edge_types_file exists then they
+ # would have been added in the previous section of code. If edge_types_file is missing we will create
+ # filler edge types based on the edge_type_id's found in edge_ptr dataset
+ if edge_types_file is None:
+ for et_id in set(edges_h5['edge_types'][:]):
+ self._network.connect(edge_params={self.COL_NODE_TYPE_ID: et_id})
+
+ # TODO: if edge_types.csv does exists we should check it has matching edge_type_ids with edges.h5/edge_ptr
+
+ def itr_fnc(et):
+ # Creates a generator that will iteratively go through h5 file and return (source_gid, target_gid,
+ # nsyn) values for connections with matching edge_type.edge_type_id
+ edge_type_id = et.id
+ for ep_indx in xrange(n_edge_ptr - 1):
+ trg_gid = ep_indx
+ for syn_indx in xrange(edge_ptr_ds[ep_indx], edge_ptr_ds[ep_indx + 1]):
+ if edge_types_ds[syn_indx] == edge_type_id:
+ src_gid = src_gids_ds[syn_indx]
+ n_syn = num_syns_ds[syn_indx]
+ yield (src_gid, trg_gid, n_syn)
+
+ for edge in self._network.edge_types():
+ # create iterator and directly add edges
+ itr = itr_fnc(edge)
+ self._network._add_edges(edge, itr)
+
+ self.edges_built = True
+
+ def load_dir(self, directory, metadata):
+ def get_path(f):
+ if f not in metadata:
+ return None
+ file_name = metadata[f]
+ if directory is None or os.path.isabs(file_name):
+ return file
+ return os.path.join(directory, file_name)
+
+ nodes_file = get_path('nodes_file')
+ node_types_file = get_path('node_types_file')
+ self.__load_nodes(nodes_file, node_types_file)
+
+ edge_types_file = get_path('edge_types_file')
+ edges_file = get_path('edges_file')
+ self.__load_edge_types(edges_file, edge_types_file)
+
+ def load(self, nodes_file=None, node_types_file=None, edges_file=None, edge_types_file=None):
+ self.__load_nodes(nodes_file, node_types_file)
diff --git a/bmtk-vb/build/lib/bmtk/builder/formats/iformats.py b/bmtk-vb/build/lib/bmtk/builder/formats/iformats.py
new file mode 100644
index 0000000..a29261e
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/formats/iformats.py
@@ -0,0 +1,29 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+class IFormat(object):
+ def __init__(self, network):
+ self._network = network
+
+ @property
+ def format(self):
+ raise NotImplementedError()
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/builder/functor_cache.py b/bmtk-vb/build/lib/bmtk/builder/functor_cache.py
new file mode 100644
index 0000000..0da8fc1
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/functor_cache.py
@@ -0,0 +1,55 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from six import string_types
+import functools
+
+
+class FunctorCache(object):
+ def __init__(self):
+ self.cache = {}
+
+ def create(self, connector, **params):
+ if params is None:
+ params = {}
+
+ if isinstance(connector, string_types):
+ # TODO: don't do this, a user may want to return a string in connection_map params
+ func = self.cache[connector]
+ return functools.partial(func, **params)
+
+ elif isinstance(connector, dict):
+ return lambda *args: connector
+
+ elif isinstance(connector, list):
+ # for the iterator we want to pass backs lists as they are
+ return connector
+
+ elif callable(connector):
+ return functools.partial(connector, **params)
+
+ else:
+ # should include all numericals, non-callable objects and tuples
+ return lambda *args: connector
+
+ def register(self, name, func):
+ self.cache[name] = func
diff --git a/bmtk-vb/build/lib/bmtk/builder/id_generator.py b/bmtk-vb/build/lib/bmtk/builder/id_generator.py
new file mode 100644
index 0000000..9d7b798
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/id_generator.py
@@ -0,0 +1,71 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import threading
+import numpy as np
+import six
+
+
+
+class IDGenerator(object):
+ """ A simple class for fetching global ids. To get a unqiue global ID class next(), which should be thread-safe. It
+ Also has a remove_id(gid) in which case next() will never return the gid. The remove_id function is used for cases
+ when using imported networks and we want to elimnate previously created id.
+
+ TODO:
+ * Implement a bit array to keep track of already existing gids
+ * It might be necessary to implement with MPI support?
+ """
+ def __init__(self, init_val=0):
+ self.__counter = init_val
+ self.__taken = set()
+ self.__lock = threading.Lock()
+
+ def remove_id(self, gid):
+ assert(np.issubdtype(type(gid), np.integer))
+ if gid >= self.__counter:
+ self.__taken.add(gid)
+
+ def next(self):
+ self.__lock.acquire()
+ while self.__counter in self.__taken:
+ self.__taken.remove(self.__counter)
+ self.__counter += 1
+
+ nid = self.__counter
+ self.__counter += 1
+ self.__lock.release()
+
+ return nid
+
+ def __contains__(self, gid):
+ return gid < self.__counter
+
+ def __call__(self, *args, **kwargs):
+ if len(args) == 1:
+ N = args[0]
+ elif 'N' in 'kwargs':
+ N = args['N']
+
+ assert(isinstance(N, (int, long)))
+ return [self.next() for _ in six.moves.range(N)]
+
diff --git a/bmtk-vb/build/lib/bmtk/builder/io/__init__.py b/bmtk-vb/build/lib/bmtk/builder/io/__init__.py
new file mode 100644
index 0000000..00a458f
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/io/__init__.py
@@ -0,0 +1,66 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import h5py
+from ..network import Network
+
+def write_edges_to_h5(network, filename, synapse_key=None, verbose=True):
+ assert(isinstance(network, Network))
+
+ # The network edges may either be a raw value, dictionary or list
+ if synapse_key == None:
+ lookup = lambda x: x
+
+ elif isinstance(synapse_key, str):
+ lookup = lambda x: x[synapse_key]
+
+ elif isinstance(synapse_key, int):
+ lookup = lambda x: x[synapse_key]
+
+ else:
+ raise Exception("Unable to resolve the synapse_key type.")
+
+ # Create the tables for indptr, nsyns and src_gids
+ if verbose:
+ print("> building tables with {} nodes and {} edges.".format(network.nnodes, network.nedges))
+ indptr_table = [0]
+ nsyns_table = []
+ src_gids_table = []
+ for trg in network.nodes():
+ # TODO: check the order of the node list
+ tid = trg[1]['id']
+ for edges in network.edges([tid], rank=1):
+ src_gids_table.append(edges[0])
+ nsyns_table.append(lookup(edges[2]))
+
+ if len(src_gids_table) == indptr_table[-1]:
+ print("node %d doesn't have any edges {}".format(tid))
+ indptr_table.append(len(src_gids_table))
+
+ # Save the tables in h5 format
+ if verbose:
+ print("> Saving table to {}.".format(filename))
+ with h5py.File(filename, 'w') as hf:
+ hf.create_dataset('indptr', data=indptr_table)
+ hf.create_dataset('nsyns', data=nsyns_table)
+ hf.create_dataset('src_gids', data=src_gids_table, dtype=int32)
+ hf.attrs["shape"] = (network.nnodes, network.nnodes)
diff --git a/bmtk-vb/build/lib/bmtk/builder/iterator.py b/bmtk-vb/build/lib/bmtk/builder/iterator.py
new file mode 100644
index 0000000..1469cfa
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/iterator.py
@@ -0,0 +1,124 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import itertools
+import functools
+import types
+
+
+class IteratorCache(object):
+ def __init__(self):
+ self.cache = {}
+
+ def create(self, itr_name, itr_type, **params):
+ if params is None:
+ params = {}
+
+ if (itr_name, itr_type) in self.cache:
+ func = self.cache[(itr_name, itr_type)]
+ return functools.partial(func, **params)
+
+ else:
+ raise Exception("Couldn't find iterator for ({}, {}).".format(itr_name, itr_type))
+
+ def register(self, name, itr_type, func):
+ self.cache[(name, itr_type)] = func
+
+
+def create(iterator, connector, **params):
+ return ITERATOR_CACHE.create(iterator, type(connector), **params)
+
+
+def register(name, dtype, func):
+ ITERATOR_CACHE.register(name, dtype, func)
+
+
+########################################################################
+# Pre-defined iterators
+########################################################################
+def one_to_all_iterator(source_nodes, target_nodes, connector):
+ """Calls the connector function with (1 source, all targets), iterated for each source"""
+ target_list = list(target_nodes) # list of all targets
+ target_node_ids = [t.node_id for t in target_list] # slight improvement than calling node_id S*T times
+ for source in source_nodes:
+ source_node_id = source.node_id
+ edge_vals = connector(source, target_list)
+ for i, target in enumerate(target_list):
+ yield (source_node_id, target_node_ids[i], edge_vals[i])
+
+
+def all_to_one_iterator(source_nodes, target_nodes, connector):
+ """Iterate through all the target nodes and return target node + list of all sources"""
+ source_list = list(source_nodes)
+ for target in target_nodes:
+ val = connector(source_list, target)
+ for i, source in enumerate(source_list):
+ yield (source.node_id, target.node_id, val[i])
+
+
+def one_to_one_iterator(source_nodes, target_nodes, connector):
+ # TODO: may be faster to pull out the node_ids, don't user itertools
+ for source, target in itertools.product(source_nodes, target_nodes):
+ val = connector(source, target)
+ yield (source.node_id, target.node_id, val)
+
+
+def one_to_one_list_iterator(source_nodes, target_nodes, vals):
+ assert(len(vals) == len(source_nodes)*len(target_nodes))
+ for i, (source, target) in enumerate(itertools.product(source_nodes, target_nodes)):
+ yield (source.node_id, target.node_id, vals[i])
+
+
+def one_to_all_list_iterator(source_nodes, target_nodes, vals):
+ assert(len(vals) == len(target_nodes))
+ source_ids = [s.node_id for s in list(source_nodes)]
+ target_ids = [t.node_id for t in list(target_nodes)]
+ for src_id in source_ids:
+ for i, trg_id in enumerate(target_ids):
+ yield (src_id, trg_id, vals[i])
+
+
+def all_to_one_list_iterator(source_nodes, target_nodes, vals):
+ assert(len(vals) == len(source_nodes))
+ source_ids = [s.node_id for s in list(source_nodes)]
+ target_ids = [t.node_id for t in list(target_nodes)]
+ for trg_id in target_ids:
+ for i, src_id in enumerate(source_ids):
+ yield (src_id, trg_id, vals[i])
+
+
+def lambda_iterator(source_nodes, target_nodes, lambda_val):
+ for source, target in itertools.product(source_nodes, target_nodes):
+ yield (source.node_id, target.node_id, lambda_val())
+
+
+ITERATOR_CACHE = IteratorCache()
+register('one_to_one', functools.partial, one_to_one_iterator)
+register('all_to_one', functools.partial, all_to_one_iterator)
+register('one_to_all', functools.partial, one_to_all_iterator)
+
+register('one_to_one', list, one_to_one_list_iterator)
+register('one_to_all', list, one_to_all_list_iterator)
+register('all_to_one', list, all_to_one_list_iterator)
+
+
+register('one_to_one', types.FunctionType, lambda_iterator)
diff --git a/bmtk-vb/build/lib/bmtk/builder/network.py b/bmtk-vb/build/lib/bmtk/builder/network.py
new file mode 100644
index 0000000..90d3ac1
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/network.py
@@ -0,0 +1,478 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import numpy as np
+import types
+import csv
+import six
+
+from .node_pool import NodePool
+from .connection_map import ConnectionMap
+from .node_set import NodeSet
+from .id_generator import IDGenerator
+
+
+class Network (object):
+ def __init__(self, name, **network_props):
+ if len(name) == 0:
+ raise Exception('Network name missing.')
+
+ self._network_name = name
+
+ self._nnodes = 0
+ self._nodes_built = False
+ self._nedges = 0
+ self._edges_built = False
+
+ self._node_sets = []
+ self.__external_node_sets = []
+ self.__node_id_counter = 0
+
+ self._node_types_properties = {}
+ self._node_types_columns = set(['node_type_id'])
+ # self._edge_type_properties = {}
+ # self._edge_types_columns = set(['edge_type_id'])
+ self._connection_maps = []
+ #self._connection_maps = ConnectionTable()
+
+ self._node_id_gen = IDGenerator()
+ self._node_type_id_gen = IDGenerator(100)
+ self._edge_type_id_gen = IDGenerator(100)
+
+ #self._connection_table = []
+ #self._source_networks = []
+ #self._target_networks = []
+ self._network_conns = set()
+ self._connected_networks = {}
+
+ @property
+ def name(self):
+ return self._network_name
+
+ @property
+ def nodes_built(self):
+ return self._nodes_built
+
+ @property
+ def edges_built(self):
+ return self._edges_built
+
+ @property
+ def nnodes(self):
+ raise NotImplementedError
+
+ @property
+ def nedges(self):
+ raise NotImplementedError
+
+ def get_connections(self):
+ return self._connection_maps
+
+ def _add_node_type(self, props):
+ node_type_id = props.get('node_type_id', None)
+ if node_type_id is None:
+ node_type_id = self._node_type_id_gen.next()
+ else:
+ if node_type_id in self._node_types_properties:
+ raise Exception('node_type_id {} already exists.'.format(node_type_id))
+ self._node_type_id_gen.remove_id(node_type_id)
+
+ props['node_type_id'] = node_type_id
+ self._node_types_properties[node_type_id] = props
+
+ def add_nodes(self, N=1, **properties):
+ self._clear()
+
+ # categorize properties as either a node-params (for nodes file) or node-type-property (for node_types files)
+ node_params = {}
+ node_properties = {}
+ for prop_name, prop_value in properties.items():
+ if isinstance(prop_value, (list, np.ndarray)): # TODO: what about pandas series
+ n_props = len(prop_value)
+ if n_props != N:
+ raise Exception('Trying to pass in array of length {} into N={} nodes'.format(n_props, N))
+ node_params[prop_name] = prop_value
+
+ elif isinstance(prop_value, types.GeneratorType):
+ vals = list(prop_value)
+ assert(len(vals) == N)
+ node_params[prop_name] = vals
+
+ else:
+ node_properties[prop_name] = prop_value
+ self._node_types_columns.add(prop_name)
+
+ # If node-type-id exists, make sure there is no clash, otherwise generate a new id.
+ if 'node_type_id' in node_params:
+ raise Exception('There can be only one "node_type_id" per set of nodes.')
+
+ self._add_node_type(node_properties)
+ self._node_sets.append(NodeSet(N, node_params, node_properties))
+
+ def add_edges(self, source=None, target=None, connection_rule=1, connection_params=None, iterator='one_to_one',
+ **edge_type_properties):
+ # TODO: check edge_type_properties for 'edge_type_id' and make sure there isn't a collision. Otherwise create
+ # a new id.
+ if not isinstance(source, NodePool):
+ source = NodePool(self, **source or {})
+
+ if not isinstance(target, NodePool):
+ target = NodePool(self, **target or {})
+
+ self._network_conns.add((source.network_name, target.network_name))
+ self._connected_networks[source.network_name] = source.network
+ self._connected_networks[target.network_name] = target.network
+
+ # TODO: make sure that they don't add a dictionary or some other wried property type.
+ edge_type_id = edge_type_properties.get('edge_type_id', None)
+ if edge_type_id is None:
+ edge_type_id = self._edge_type_id_gen.next()
+ edge_type_properties['edge_type_id'] = edge_type_id
+ elif edge_type_id in self._edge_type_id_gen:
+ raise Exception('edge_type_id {} already exists.'.format(edge_type_id))
+ else:
+ self._edge_type_id_gen.remove_id(edge_type_id)
+
+ edge_type_properties['source_query'] = source.filter_str
+ edge_type_properties['target_query'] = target.filter_str
+
+ if 'nsyns' in edge_type_properties:
+ connection_rule = edge_type_properties['nsyns']
+ del edge_type_properties['nsyns']
+
+ # self._edge_types_columns.update(edge_type_properties.keys())
+ connection = ConnectionMap(source, target, connection_rule, connection_params, iterator, edge_type_properties)
+ self._connection_maps.append(connection)
+ # self._connection_maps.add(source.network_name, target.network_name, connection)
+ return connection
+
+ def nodes(self, **properties):
+ if not self.nodes_built:
+ self._build_nodes()
+
+ return NodePool(self, **properties)
+
+ def nodes_iter(self, nids=None):
+ raise NotImplementedError
+
+ def edges(self, target_nodes=None, source_nodes=None, target_network=None, source_network=None, **properties):
+ """Returns a list of dictionary-like Edge objects, given filter parameters.
+
+ To get all edges from a network
+ edges = net.edges()
+
+ To specify the target and/or source node-set
+ edges = net.edges(target_nodes=net.nodes(type='biophysical'), source_nodes=net.nodes(ei='i'))
+
+ To only get edges with a given edge_property
+ edges = net.edges(weight=100, syn_type='AMPA_Exc2Exc')
+
+ :param target_nodes: gid, list of gid, dict or node-pool. Set of target nodes for a given edge.
+ :param source_nodes: gid, list of gid, dict or node-pool. Set of source nodes for a given edge.
+ :param target_network: name of network containing target nodes.
+ :param source_network: name of network containing source nodes.
+ :param properties: edge-properties used to filter out only certain edges.
+ :return: list of bmtk.builder.edge.Edge properties.
+ """
+ def nodes2gids(nodes, network):
+ """helper function for converting target and source nodes into list of gids"""
+ if nodes is None or isinstance(nodes, list):
+ return nodes, network
+ if isinstance(nodes, int):
+ return [nodes], network
+ if isinstance(nodes, dict):
+ network = network or self._network_name
+ nodes = self._connected_networks[network].nodes(**nodes)
+ if isinstance(nodes, NodePool):
+ if network is not None and nodes.network_name != network:
+ print('Warning. nodes and network don not match')
+ return [n.node_id for n in nodes], nodes.network_name
+ else:
+ raise Exception('Couldnt convert nodes')
+
+ def filter_edges(e):
+ """Returns true only if all the properities match for a given edge"""
+ for k, v in properties.items():
+ if k not in e:
+ return False
+ if e[k] != v:
+ return False
+ return True
+
+ if not self.edges_built:
+ self.build()
+
+ # trg_gids can't be none for edges_itr. if target-nodes is not explicity states get all target_gids that
+ # synapse onto or from current network.
+ if target_nodes is None:
+ trg_gid_set = set(n.node_id for cm in self._connection_maps for n in cm.target_nodes)
+ target_nodes = sorted(trg_gid_set)
+
+ # convert target/source nodes into a list of their gids
+ trg_gids, trg_net = nodes2gids(target_nodes, target_network)
+ src_gids, src_net = nodes2gids(source_nodes, source_network)
+
+ # use the iterator to get edges and return as a list
+ if properties is None:
+ edges = list(self.edges_iter(trg_gids=trg_gids, trg_network=trg_net, src_network=src_net))
+ else:
+ # filter out certain edges using the properties parameters
+ edges = [e for e in self.edges_iter(trg_gids=trg_gids, trg_network=trg_net, src_network=src_net)
+ if filter_edges(e)]
+
+ if src_gids is not None:
+ # if src_gids are set filter out edges some more
+ edges = [e for e in edges if e.source_gid in src_gids]
+
+ return edges
+
+ def edges_iter(self, trg_gids, src_network=None, trg_network=None):
+ """Given a list of target gids, returns a generator for iteratoring over all possible edges.
+
+ It is preferable to use edges() method instead, it allows more flexibibility in the input and can better
+ indicate if their is a problem.
+
+ The order of the edges returned will be in the same order as the trg_gids list, but does not guarentee any
+ secondary ordering by source-nodes and/or edge-type. If their isn't a edge with a matching target-id then
+ it will skip that gid in the list, the size of the generator can 0 to arbitrarly large.
+
+ :param trg_gids: list of gids to match with an edge's target.
+ :param src_network: str, only returns edges coming from the specified source network.
+ :param trg_network: str, only returns edges coming from the specified target network.
+ :return: iteration of bmtk.build.edge.Edge objects representing given edge.
+ """
+ raise NotImplementedError
+
+ def clear(self):
+ self._nodes_built = False
+ self._edges_built = False
+ self._clear()
+
+ def _node_id(self, N):
+ for i in six.moves.range(N):
+ yield self.__node_id_counter
+ self.__node_id_counter += 1
+
+ def _build_nodes(self):
+ """Builds or rebuilds all the nodes, clear out both node and edge sets."""
+ # print 'build_nodes'
+ self._clear()
+ self._initialize()
+
+ for ns in self._node_sets:
+ nodes = ns.build(nid_generator=self._node_id)
+ self._add_nodes(nodes)
+ self._nodes_built = True
+
+ def __build_edges(self):
+ """Builds network edges"""
+ if not self.nodes_built:
+ # only rebuild nodes if necessary.
+ self._build_nodes()
+
+ for i, conn_map in enumerate(self._connection_maps):
+ # print conn_map
+ self._add_edges(conn_map, i)
+
+ self._edges_built = True
+
+ def build(self, force=False):
+ """ Builds nodes (assigns gids) and edges.
+
+ Args:
+ force (bool): set true to force complete rebuilding of nodes and edges, if nodes() or save_nodes() has been
+ called before then forcing a rebuild may change gids of each node.
+ """
+
+ # if nodes() or save_nodes() is called by user prior to calling build() - make sure the nodes
+ # are completely rebuilt (unless a node set has been added).
+ if force:
+ self._clear()
+ self._initialize()
+ self._build_nodes()
+
+ # always build the edges.
+ self.__build_edges()
+
+ def __get_path(self, filename, path_dir, ftype):
+ if filename is None:
+ fname = '{}_{}'.format(self.name, ftype)
+ return os.path.join(path_dir, fname)
+ elif os.path.isabs(filename):
+ return filename
+ else:
+ return os.path.join(path_dir, filename)
+
+ def save(self, output_dir='.'):
+ self.save_nodes(output_dir=output_dir)
+ self.save_edges(output_dir=output_dir)
+
+ def save_nodes(self, nodes_file_name=None, node_types_file_name=None, output_dir='.', force_overwrite=True):
+ nodes_file = self.__get_path(nodes_file_name, output_dir, 'nodes.h5')
+ if not force_overwrite and os.path.exists(nodes_file):
+ raise Exception('File {} exists. Please use different name or use force_overwrite'.format(nodes_file))
+ nf_dir = os.path.dirname(nodes_file)
+ if not os.path.exists(nf_dir):
+ os.makedirs(nf_dir)
+
+ node_types_file = self.__get_path(node_types_file_name, output_dir, 'node_types.csv')
+ if not force_overwrite and os.path.exists(node_types_file):
+ raise Exception('File {} exists. Please use different name or use force_overwrite'.format(node_types_file))
+ ntf_dir = os.path.dirname(node_types_file)
+ if not os.path.exists(ntf_dir):
+ os.makedirs(ntf_dir)
+
+ self._save_nodes(nodes_file)
+ self._save_node_types(node_types_file)
+
+ def _save_nodes(self, nodes_file_name):
+ raise NotImplementedError
+
+ def _save_node_types(self, node_types_file_name):
+ node_types_cols = ['node_type_id'] + [col for col in self._node_types_columns if col != 'node_type_id']
+ with open(node_types_file_name, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=' ')
+ csvw.writerow(node_types_cols)
+ for node_type in self._node_types_properties.values():
+ csvw.writerow([node_type.get(cname, 'NULL') for cname in node_types_cols])
+
+ def import_nodes(self, nodes_file_name, node_types_file_name):
+ raise NotImplementedError
+
+ def save_edges(self, edges_file_name=None, edge_types_file_name=None, output_dir='.', src_network=None,
+ trg_network=None, name=None, force_build=True, force_overwrite=False):
+ # Make sure edges exists and are built
+ if len(self._connection_maps) == 0:
+ print("Warning: no edges have been made for this network, skipping saving.")
+ return
+
+ if self._edges_built is False:
+ if force_build:
+ print("Message: building edges")
+ self.__build_edges()
+ else:
+ print("Warning: Edges are not built. Either call build() or use force_build parameter. Skip saving.")
+ return
+
+ network_params = [(s, t, s+'_'+t+'_edges.h5', s+'_'+t+'_edge_types.csv') for s, t in list(self._network_conns)]
+ if src_network is not None:
+ network_params = [p for p in network_params if p[0] == src_network]
+
+ if trg_network is not None:
+ network_params = [p for p in network_params if p[1] == trg_network]
+
+ if len(network_params) == 0:
+ print("Warning: couldn't find connections. Skip saving.")
+ return
+
+ if (edges_file_name or edge_types_file_name) is not None:
+ network_params = [(network_params[0][0], network_params[0][1], edges_file_name, edge_types_file_name)]
+
+ if not os.path.exists(output_dir):
+ os.mkdir(output_dir)
+
+ for p in network_params:
+ if p[3] is not None:
+ self._save_edge_types(os.path.join(output_dir, p[3]), p[0], p[1])
+
+ if p[2] is not None:
+ self._save_edges(os.path.join(output_dir, p[2]), p[0], p[1], name)
+
+ def _save_edge_types(self, edge_types_file_name, src_network, trg_network):
+
+ # Get edge-type properties for connections with matching source/target networks
+ matching_et = [c.edge_type_properties for c in self._connection_maps
+ if c.source_network_name == src_network and c.target_network_name == trg_network]
+
+ # Get edge-type properties that are only relevant for this source-target network pair
+ cols = ['edge_type_id', 'target_query', 'source_query'] # manditory and should come first
+ merged_keys = [k for et in matching_et for k in et.keys() if k not in cols]
+ cols += list(set(merged_keys))
+
+ # Write to csv
+ with open(edge_types_file_name, 'w') as csvfile:
+ csvw = csv.writer(csvfile, delimiter=' ')
+ csvw.writerow(cols)
+ for edge_type in matching_et:
+ csvw.writerow([edge_type.get(cname, 'NULL') if edge_type.get(cname, 'NULL') is not None else 'NULL'
+ for cname in cols])
+
+ def _save_edges(self, edges_file_name, src_network, trg_network):
+ raise NotImplementedError
+
+ def _initialize(self):
+ raise NotImplementedError
+
+ def _add_nodes(self, node_tuples):
+ raise NotImplementedError
+
+ def _add_edges(self, edge_tuples, i):
+ raise NotImplementedError
+
+ def _clear(self):
+ raise NotImplementedError
+
+ """
+ def _edges_iter(targets=None, sources=None):
+ raise NotImplementedError
+ """
+
+"""
+class ConnectionTable(object):
+ def __init__(self):
+ self.__targets = {}
+ self.__sources = {}
+ self.__connections = []
+
+ def add(self, source_network, target_network, connection_map):
+ # TODO: If the source/target are network objects we can get the network_name
+ assert(isinstance(source_network, basestring))
+ assert(isinstance(target_network, basestring))
+ assert(isinstance(connection_map, ConnectionMap))
+
+ if source_network not in self.__sources:
+ self.__sources[source_network] = []
+ if target_network not in self.__targets:
+ self.__targets[target_network] = []
+
+ cm_index = len(self.__connections)
+ self.__connections.append(connection_map)
+ self.__sources[source_network].append(cm_index)
+ self.__targets[target_network].append(cm_index)
+
+ def get(self, source_network=None, target_network=None):
+ # TODO: Add warning if source/target network is not found
+ cm_indicies = set(range(len(self.__connections)))
+ if source_network is not None:
+ cm_indicies &= set(self.__sources.get(source_network, []))
+
+ if target_network is not None:
+ cm_indicies &= set(self.__targets.get(target_network, []))
+
+ return self.__connections[cm_indicies]
+"""
+
+
+
+
+
diff --git a/bmtk-vb/build/lib/bmtk/builder/networks/__init__.py b/bmtk-vb/build/lib/bmtk/builder/networks/__init__.py
new file mode 100644
index 0000000..45b0922
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/networks/__init__.py
@@ -0,0 +1,30 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .dm_network import DenseNetwork
+NetworkBuilder = dm_network.DenseNetwork
+
+try:
+ # If mpi4py is installed let users access MPIBuilder for parallel building networks
+ from .mpi_network import MPINetwork, MPINetwork as MPIBuilder
+except ImportError as err:
+ pass
diff --git a/bmtk-vb/build/lib/bmtk/builder/networks/dm_network.py b/bmtk-vb/build/lib/bmtk/builder/networks/dm_network.py
new file mode 100644
index 0000000..b6547dc
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/networks/dm_network.py
@@ -0,0 +1,487 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import numpy as np
+import h5py
+import six
+import csv
+
+from ..network import Network
+from bmtk.builder.node import Node
+from bmtk.builder.edge import Edge
+from bmtk.utils import sonata
+
+
+class DenseNetwork(Network):
+ def __init__(self, name, **network_props):
+ super(DenseNetwork, self).__init__(name, **network_props or {})
+
+ self.__edges_types = {}
+ self.__src_mapping = {}
+
+ self.__networks = {}
+ self.__node_count = 0
+ self._nodes = []
+
+ self.__edges_tables = []
+ self._target_networks = {}
+
+ def _initialize(self):
+ self.__id_map = []
+ self.__lookup = []
+
+ def _add_nodes(self, nodes):
+ self._nodes.extend(nodes)
+ self._nnodes = len(self._nodes)
+
+ """
+ id_label = 'node_id' if 'node_id' in nodes[0].keys() else 'id'
+
+ start_idx = len(self.__id_map) #
+ self.__id_map += [n[id_label] for n in nodes]
+ self.__nodes += [(interal_id, nodes[node_idx])
+ for node_idx, interal_id in enumerate(xrange(start_idx, len(self.__id_map)))]
+
+ assert(len(self.__id_map) == len(self.__nodes))
+ """
+
+ def edges_table(self):
+ return self.__edges_tables
+
+ def _save_nodes(self, nodes_file_name):
+ if not self._nodes_built:
+ self._build_nodes()
+
+ # save the node_types file
+ # TODO: how do we add attributes to the h5
+ group_indx = 0
+ groups_lookup = {}
+ group_indicies = {}
+ group_props = {}
+ for ns in self._node_sets:
+ if ns.params_hash in groups_lookup:
+ continue
+ else:
+ groups_lookup[ns.params_hash] = group_indx
+ group_indicies[group_indx] = 0
+ group_props[group_indx] = {k: [] for k in ns.params_keys if k != 'node_id'}
+ group_indx += 1
+
+ node_gid_table = np.zeros(self._nnodes) # todo: set dtypes
+ node_type_id_table = np.zeros(self._nnodes)
+ node_group_table = np.zeros(self._nnodes)
+ node_group_index_tables = np.zeros(self._nnodes)
+
+ for i, node in enumerate(self.nodes()):
+ node_gid_table[i] = node.node_id
+ node_type_id_table[i] = node.node_type_id
+ group_id = groups_lookup[node.params_hash]
+ node_group_table[i] = group_id
+ node_group_index_tables[i] = group_indicies[group_id]
+ group_indicies[group_id] += 1
+
+ group_dict = group_props[group_id]
+ for key, prop_ds in group_dict.items():
+ prop_ds.append(node.params[key])
+
+ # TODO: open in append mode
+ with h5py.File(nodes_file_name, 'w') as hf:
+ # Add magic and version attribute
+ add_hdf5_attrs(hf)
+
+ pop_grp = hf.create_group('/nodes/{}'.format(self.name))
+ pop_grp.create_dataset('node_id', data=node_gid_table, dtype='uint64')
+ pop_grp.create_dataset('node_type_id', data=node_type_id_table, dtype='uint64')
+ pop_grp.create_dataset('node_group_id', data=node_group_table, dtype='uint32')
+ pop_grp.create_dataset('node_group_index', data=node_group_index_tables, dtype='uint64')
+
+ for grp_id, props in group_props.items():
+ model_grp = pop_grp.create_group('{}'.format(grp_id))
+
+ for key, dataset in props.items():
+ # ds_path = 'nodes/{}/{}'.format(grp_id, key)
+ try:
+ model_grp.create_dataset(key, data=dataset)
+ except TypeError:
+ str_list = [str(d) for d in dataset]
+ hf.create_dataset(key, data=str_list)
+
+ def nodes_iter(self, node_ids=None):
+ if node_ids is not None:
+ return [n for n in self._nodes if n.node_id in node_ids]
+ else:
+ return self._nodes
+
+ def _process_nodepool(self, nodepool):
+ return nodepool
+
+ def import_nodes(self, nodes_file_name, node_types_file_name, population=None):
+ sonata_file = sonata.File(data_files=nodes_file_name, data_type_files=node_types_file_name)
+ if sonata_file.nodes is None:
+ raise Exception('nodes file {} does not have any nodes.'.format(nodes_file_name))
+
+ populations = sonata_file.nodes.populations
+ if len(populations) == 1:
+ node_pop = populations[0]
+ elif population is None:
+ raise Exception('The nodes file {} contains multiple populations.'.format(nodes_file_name) +
+ 'Please specify population parameter.')
+ else:
+ for pop in populations:
+ if pop.name == population:
+ node_pop = pop
+ break
+ else:
+ raise Exception('Nodes file {} does not contain population {}.'.format(nodes_file_name, population))
+
+ # print node_pop.node_types_table
+ for node_type_props in node_pop.node_types_table:
+ self._add_node_type(node_type_props)
+
+ for node in node_pop:
+ self._node_id_gen.remove_id(node.node_id)
+ self._nodes.append(Node(node.node_id, node.group_props, node.node_type_properties))
+
+ def _add_edges(self, connection_map, i):
+ syn_table = self.EdgeTable(connection_map)
+ connections = connection_map.connection_itr()
+ for con in connections:
+ if con[2] is not None:
+ syn_table[con[0], con[1]] = con[2]
+
+ target_net = connection_map.target_nodes
+ self._target_networks[target_net.network_name] = target_net.network
+
+ nsyns = np.sum(syn_table.nsyn_table)
+ self._nedges += int(nsyns)
+ edge_table = {'syn_table': syn_table,
+ 'nsyns': nsyns,
+ 'edge_types': connection_map.edge_type_properties,
+ 'edge_type_id': connection_map.edge_type_properties['edge_type_id'],
+ 'source_network': connection_map.source_nodes.network_name,
+ 'target_network': connection_map.target_nodes.network_name,
+ 'params': {},
+ 'params_dtypes': {},
+ 'source_query': connection_map.source_nodes.filter_str,
+ 'target_query': connection_map.target_nodes.filter_str}
+
+
+ for param in connection_map.params:
+ rule = param.rule
+ param_names = param.names
+ edge_table['params_dtypes'].update(param.dtypes)
+ if isinstance(param_names, list) or isinstance(param_names, tuple):
+ tmp_tables = [self.PropertyTable(nsyns) for _ in range(len(param_names))]
+ for source in connection_map.source_nodes:
+ src_node_id = source.node_id
+ for target in connection_map.target_nodes:
+ trg_node_id = target.node_id # TODO: pull this out and put in it's own list
+ for _ in range(syn_table[src_node_id, trg_node_id]):
+ pvals = rule(source, target)
+ for i in range(len(param_names)):
+ tmp_tables[i][src_node_id, trg_node_id] = pvals[i]
+
+ for i, name in enumerate(param_names):
+ # TODO: I think a copy constructor might get called, move this out.
+ edge_table['params'][name] = tmp_tables[i]
+
+ else:
+ pt = self.PropertyTable(np.sum(nsyns))
+ for source in connection_map.source_nodes:
+ src_node_id = source.node_id
+ for target in connection_map.target_nodes:
+ trg_node_id = target.node_id # TODO: pull this out and put in it's own list
+ #print('{}, {}: {}'.format(src_node_id, trg_node_id, edge_table[src_node_id, trg_node_id]))
+ for _ in range(syn_table[src_node_id, trg_node_id]):
+ pt[src_node_id, trg_node_id] = rule(source, target)
+ edge_table['params'][param_names] = pt
+
+ self.__edges_tables.append(edge_table)
+
+ def _save_edges(self, edges_file_name, src_network, trg_network, name=None):
+ groups = {}
+ group_dtypes = {} # TODO: this should be stored in PropertyTable
+ grp_id_itr = 0
+ groups_lookup = {}
+ total_syns = 0
+
+ matching_edge_tables = [et for et in self.__edges_tables
+ if et['source_network'] == src_network and et['target_network'] == trg_network]
+
+ for ets in matching_edge_tables:
+ params_hash = str(ets['params'].keys())
+ group_id = groups_lookup.get(params_hash, None)
+ if group_id is None:
+ group_id = grp_id_itr
+ groups_lookup[params_hash] = group_id
+ grp_id_itr += 1
+
+ ets['group_id'] = group_id
+ groups[group_id] = {}
+ group_dtypes[group_id] = ets['params_dtypes']
+ for param_name in ets['params'].keys():
+ groups[group_id][param_name] = []
+
+ total_syns += int(ets['nsyns'])
+
+ group_index_itrs = [0 for _ in range(grp_id_itr)]
+ trg_gids = np.zeros(total_syns) # set dtype to uint64
+ src_gids = np.zeros(total_syns)
+ edge_groups = np.zeros(total_syns) # dtype uint16 or uint8
+ edge_group_index = np.zeros(total_syns) # uint32
+ edge_type_ids = np.zeros(total_syns) # uint32
+
+ # TODO: Another potential issue if node-ids don't start with 0
+ index_ptrs = np.zeros(len(self._target_networks[trg_network].nodes()) + 1)
+ #index_ptrs = np.zeros(len(self._nodes)+1) # TODO: issue when target nodes come from another network
+ index_ptr_itr = 0
+
+ gid_indx = 0
+ for trg_node in self._target_networks[trg_network].nodes():
+ index_ptrs[index_ptr_itr] = gid_indx
+ index_ptr_itr += 1
+
+ for ets in matching_edge_tables:
+ edge_group_id = ets['group_id']
+ group_table = groups[edge_group_id]
+
+ syn_table = ets['syn_table']
+ if syn_table.has_target(trg_node.node_id):
+ if ets['params']:
+ for src_id, nsyns in syn_table.trg_itr(trg_node.node_id):
+ # Add on to the edges index
+ indx_end = gid_indx+nsyns
+ while gid_indx < indx_end:
+ trg_gids[gid_indx] = trg_node.node_id
+ src_gids[gid_indx] = src_id
+ edge_type_ids[gid_indx] = ets['edge_type_id']
+ edge_groups[gid_indx] = edge_group_id
+ edge_group_index[gid_indx] = group_index_itrs[edge_group_id]
+ group_index_itrs[edge_group_id] += 1
+ gid_indx += 1
+
+ for param_name, param_table in ets['params'].items():
+ param_vals = group_table[param_name]
+ for val in param_table.itr_vals(src_id, trg_node.node_id):
+ param_vals.append(val)
+
+ else:
+ # If no properties just print nsyns table.
+ if 'nsyns' not in group_table:
+ group_table['nsyns'] = []
+ group_dtypes[edge_group_id]['nsyns'] = 'uint16'
+ for src_id, nsyns in syn_table.trg_itr(trg_node.node_id):
+ trg_gids[gid_indx] = trg_node.node_id
+ src_gids[gid_indx] = src_id
+ edge_type_ids[gid_indx] = ets['edge_type_id']
+ edge_groups[gid_indx] = edge_group_id
+ edge_group_index[gid_indx] = group_index_itrs[edge_group_id]
+ # group_dtypes
+ group_index_itrs[edge_group_id] += 1
+ gid_indx += 1
+
+ group_table['nsyns'].append(nsyns)
+
+ trg_gids = trg_gids[:gid_indx]
+ src_gids = src_gids[:gid_indx]
+ edge_groups = edge_groups[:gid_indx]
+ edge_group_index = edge_group_index[:gid_indx]
+ edge_type_ids = edge_type_ids[:gid_indx]
+
+ pop_name = '{}_to_{}'.format(src_network, trg_network) if name is None else name
+
+ index_ptrs[index_ptr_itr] = gid_indx
+ with h5py.File(edges_file_name, 'w') as hf:
+ add_hdf5_attrs(hf)
+ pop_grp = hf.create_group('/edges/{}'.format(pop_name))
+ pop_grp.create_dataset('target_node_id', data=trg_gids, dtype='uint64')
+ pop_grp['target_node_id'].attrs['node_population'] = trg_network
+ pop_grp.create_dataset('source_node_id', data=src_gids, dtype='uint64')
+ pop_grp['source_node_id'].attrs['node_population'] = src_network
+
+ pop_grp.create_dataset('edge_group_id', data=edge_groups, dtype='uint16')
+ pop_grp.create_dataset('edge_group_index', data=edge_group_index, dtype='uint32')
+ pop_grp.create_dataset('edge_type_id', data=edge_type_ids, dtype='uint32')
+ # pop_grp.create_dataset('edges/index_pointer', data=index_ptrs, dtype='uint32')
+
+ for group_id, params_dict in groups.items():
+ model_grp = pop_grp.create_group(str(group_id))
+ for params_key, params_vals in params_dict.items():
+ #group_path = 'edges/{}/{}'.format(group_id, params_key)
+ dtype = group_dtypes[group_id][params_key]
+ if dtype is not None:
+ model_grp.create_dataset(params_key, data=list(params_vals), dtype=dtype)
+ else:
+ model_grp.create_dataset(params_key, data=list(params_vals))
+
+ self._create_index(pop_grp['target_node_id'], pop_grp, index_type='target')
+ self._create_index(pop_grp['source_node_id'], pop_grp, index_type='source')
+
+ def _create_index(self, node_ids_ds, output_grp, index_type='target'):
+ if index_type == 'target':
+ edge_nodes = np.array(node_ids_ds, dtype=np.int64)
+ output_grp = output_grp.create_group('indicies/target_to_source')
+ elif index_type == 'source':
+ edge_nodes = np.array(node_ids_ds, dtype=np.int64)
+ output_grp = output_grp.create_group('indicies/source_to_target')
+
+ edge_nodes = np.append(edge_nodes, [-1])
+ n_targets = np.max(edge_nodes)
+ ranges_list = [[] for _ in six.moves.range(n_targets + 1)]
+
+ n_ranges = 0
+ begin_index = 0
+ cur_trg = edge_nodes[begin_index]
+ for end_index, trg_gid in enumerate(edge_nodes):
+ if cur_trg != trg_gid:
+ ranges_list[cur_trg].append((begin_index, end_index))
+ cur_trg = int(trg_gid)
+ begin_index = end_index
+ n_ranges += 1
+
+ node_id_to_range = np.zeros((n_targets + 1, 2))
+ range_to_edge_id = np.zeros((n_ranges, 2))
+ range_index = 0
+ for node_index, trg_ranges in enumerate(ranges_list):
+ if len(trg_ranges) > 0:
+ node_id_to_range[node_index, 0] = range_index
+ for r in trg_ranges:
+ range_to_edge_id[range_index, :] = r
+ range_index += 1
+ node_id_to_range[node_index, 1] = range_index
+
+ output_grp.create_dataset('range_to_edge_id', data=range_to_edge_id, dtype='uint64')
+ output_grp.create_dataset('node_id_to_range', data=node_id_to_range, dtype='uint64')
+
+ def _clear(self):
+ self._nedges = 0
+ self._nnodes = 0
+
+ def edges_iter(self, trg_gids, src_network=None, trg_network=None):
+ matching_edge_tables = self.__edges_tables
+ if trg_network is not None:
+ matching_edge_tables = [et for et in self.__edges_tables if et['target_network'] == trg_network]
+
+ if src_network is not None:
+ matching_edge_tables = [et for et in matching_edge_tables if et['source_network'] == src_network]
+
+ for trg_gid in trg_gids:
+ for ets in matching_edge_tables:
+ syn_table = ets['syn_table']
+ if syn_table.has_target(trg_gid):
+ for src_id, nsyns in syn_table.trg_itr(trg_gid):
+ if ets['params']:
+ synapses = [{} for _ in range(nsyns)]
+ for param_name, param_table in ets['params'].items():
+ for i, val in enumerate(param_table[src_id, trg_gid]):
+ synapses[i][param_name] = val
+ for syn_prop in synapses:
+ yield Edge(src_gid=src_id, trg_gid=trg_gid, edge_type_props=ets['edge_types'],
+ syn_props=syn_prop)
+ else:
+ yield Edge(src_gid=src_id, trg_gid=trg_gid, edge_type_props=ets['edge_types'],
+ syn_props={'nsyns': nsyns})
+
+ @property
+ def nnodes(self):
+ if not self.nodes_built:
+ return 0
+ return self._nnodes
+
+ @property
+ def nedges(self):
+ return self._nedges
+
+ class EdgeTable(object):
+ def __init__(self, connection_map):
+ # TODO: save column and row lengths
+ # Create maps between source_node gids and their row in the matrix.
+ self.__idx2src = [n.node_id for n in connection_map.source_nodes]
+ self.__src2idx = {node_id: i for i, node_id in enumerate(self.__idx2src)}
+
+ # Create maps betwee target_node gids and their column in the matrix
+ self.__idx2trg = [n.node_id for n in connection_map.target_nodes]
+ self.__trg2idx = {node_id: i for i, node_id in enumerate(self.__idx2trg)}
+
+ self._nsyn_table = np.zeros((len(self.__idx2src), len(self.__idx2trg)), dtype=np.uint8)
+
+ def __getitem__(self, item):
+ # TODO: make sure matrix is column oriented, or swithc trg and srcs.
+ indexed_pair = (self.__src2idx[item[0]], self.__trg2idx[item[1]])
+ return self._nsyn_table[indexed_pair]
+
+ def __setitem__(self, key, value):
+ assert(len(key) == 2)
+ indexed_pair = (self.__src2idx[key[0]], self.__trg2idx[key[1]])
+ self._nsyn_table[indexed_pair] = value
+
+ def has_target(self, node_id):
+ return node_id in self.__trg2idx
+
+ @property
+ def nsyn_table(self):
+ return self._nsyn_table
+
+ @property
+ def target_ids(self):
+ return self.__idx2trg
+
+ @property
+ def source_ids(self):
+ return self.__idx2src
+
+ def trg_itr(self, trg_id):
+ trg_i = self.__trg2idx[trg_id]
+ for src_j, src_id in enumerate(self.__idx2src):
+ nsyns = self._nsyn_table[src_j, trg_i]
+ if nsyns:
+ yield src_id, nsyns
+
+ class PropertyTable(object):
+ # TODO: add support for strings
+ def __init__(self, nvalues):
+ self._prop_array = np.zeros(nvalues)
+ # self._prop_table = np.zeros((nvalues, 1)) # TODO: set dtype
+ self._index = np.zeros((nvalues, 2), dtype=np.uint32)
+ self._itr_index = 0
+
+ def itr_vals(self, src_id, trg_id):
+ indicies = np.where((self._index[:, 0] == src_id) & (self._index[:, 1] == trg_id))
+ for val in self._prop_array[indicies]:
+ yield val
+
+ def __setitem__(self, key, value):
+ self._index[self._itr_index, 0] = key[0] # src_node_id
+ self._index[self._itr_index, 1] = key[1] # trg_node_id
+ self._prop_array[self._itr_index] = value
+ self._itr_index += 1
+
+ def __getitem__(self, item):
+ indicies = np.where((self._index[:, 0] == item[0]) & (self._index[:, 1] == item[1]))
+ return self._prop_array[indicies]
+
+
+def add_hdf5_attrs(hdf5_handle):
+ # TODO: move this as a utility function
+ hdf5_handle['/'].attrs['magic'] = np.uint32(0x0A7A)
+ hdf5_handle['/'].attrs['version'] = [np.uint32(0), np.uint32(1)]
diff --git a/bmtk-vb/build/lib/bmtk/builder/networks/input_network.py b/bmtk-vb/build/lib/bmtk/builder/networks/input_network.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/networks/input_network.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/build/lib/bmtk/builder/networks/mpi_network.py b/bmtk-vb/build/lib/bmtk/builder/networks/mpi_network.py
new file mode 100644
index 0000000..aa6a51e
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/networks/mpi_network.py
@@ -0,0 +1,171 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .dm_network import DenseNetwork
+from mpi4py import MPI
+from heapq import heappush, heappop
+import h5py
+
+comm = MPI.COMM_WORLD
+rank = comm.Get_rank()
+nprocs = comm.Get_size()
+
+
+class MPINetwork(DenseNetwork):
+ def __init__(self, name, **network_props):
+ super(MPINetwork, self).__init__(name, **network_props or {})
+ self._edge_assignment = None
+
+ def _add_edges(self, connection_map, i):
+ if self._assign_to_rank(i):
+ super(MPINetwork, self)._add_edges(connection_map, i)
+
+ def save_nodes(self, nodes_file_name, node_types_file_name):
+ if rank == 0:
+ super(MPINetwork, self).save_nodes(nodes_file_name, node_types_file_name)
+ comm.Barrier()
+
+ """
+ def save_edges(self, edges_file_name=None, edge_types_file_name=None, output_dir='.', src_network=None,
+ trg_network=None, force_build=True, force_overwrite=False):
+
+ if rank == 0:
+ # print rank, len(self.edges_table())
+ super(MPINetwork, self).save_edges(edges_file_name, edge_types_file_name, output_dir, src_network,
+ trg_network, force_build, force_overwrite)
+
+ comm.Barrier()
+ """
+
+ def edges_iter(self, trg_gids, src_network=None, trg_network=None):
+ for trg_gid in trg_gids:
+ edges = list(super(MPINetwork, self).edges_iter([trg_gid], src_network, trg_network))
+ collected_edges = comm.gather(edges, root=0)
+ if rank == 0:
+ for edge_list in collected_edges:
+ for edge in edge_list:
+ # print 'b'
+ yield edge
+ else:
+ yield None
+
+ comm.Barrier()
+
+ def _save_edges(self, edges_file_name, src_network, trg_network):
+ target_gids = [n.node_id for n in self._target_networks[trg_network].nodes()]
+ # TODO: make sure target_gids are sorted
+
+ trg_gids_ds = []
+ src_gids_ds = []
+ edge_type_id_ds = []
+ edge_group_ds = []
+ edge_group_index_ds = []
+
+ eg_collection = {}
+ eg_ids = 0
+ eg_lookup = {}
+ eg_table = {}
+ eg_indices = {}
+ for cm in self.get_connections():
+ col_key = cm.properties_keys()
+ if col_key in eg_collection:
+ group_id = eg_collection[col_key]
+ else:
+ group_id = eg_ids
+ eg_collection[col_key] = group_id
+ eg_ids += 1
+ eg_lookup[cm.edge_type_id] = group_id
+ eg_indices[group_id] = 0
+ eg_table[group_id] = {k: [] for k in cm.property_names}
+
+ for e in self.edges_iter(target_gids, src_network=src_network, trg_network=trg_network):
+ if rank == 0:
+ trg_gids_ds.append(e.target_gid)
+ src_gids_ds.append(e.source_gid)
+ edge_type_id_ds.append(e.edge_type_id)
+
+ group_id = eg_lookup[e.edge_type_id]
+ edge_group_ds.append(group_id)
+ group_id_index = eg_indices[group_id]
+ edge_group_index_ds.append(group_id_index)
+ eg_indices[group_id] += 1
+
+ for k, v in e.synaptic_properties.items():
+ eg_table[group_id][k].append(v)
+
+ if rank == 0:
+ # Create index from target_gids dataset
+ index_pointer_ds = []
+ cur_gid = 0
+ index = 0
+ while index < len(trg_gids_ds):
+ if trg_gids_ds[index] == cur_gid:
+ index += 1
+ else:
+ cur_gid += 1
+ index_pointer_ds.append(index)
+ index_pointer_ds.append(len(trg_gids_ds)+1)
+
+
+ with h5py.File(edges_file_name, 'w') as hf:
+ hf.create_dataset('edges/target_gid', data=trg_gids_ds, dtype='uint64')
+ hf['edges/target_gid'].attrs['network'] = trg_network
+ hf.create_dataset('edges/source_gid', data=src_gids_ds, dtype='uint64')
+ hf['edges/source_gid'].attrs['network'] = src_network
+
+ hf.create_dataset('edges/edge_group', data=edge_group_ds, dtype='uint16')
+ hf.create_dataset('edges/edge_group_index', data=edge_group_index_ds, dtype='uint32')
+ hf.create_dataset('edges/edge_type_id', data=edge_type_id_ds, dtype='uint32')
+ hf.create_dataset('edges/index_pointer', data=index_pointer_ds, dtype='uint32')
+
+ for gid, group in eg_table.items():
+ for col_key, col_ds in group.items():
+ ds_loc = 'edges/{}/{}'.format(gid, col_key)
+ hf.create_dataset(ds_loc, data=col_ds)
+
+ comm.Barrier()
+
+ def _assign_to_rank(self, i):
+ if self._edge_assignment is None:
+ self._build_rank_assignments()
+
+ return rank == self._edge_assignment[i]
+
+ def _build_rank_assignments(self):
+ """Builds the _edge_assignment array.
+
+ Division of connections is decided by the maximum possible edges (i.e. number of source and target nodes). In
+ the end assignment should balance the connection matrix sizes need by each rank.
+ """
+ rank_heap = [] # A heap of tuples (weight, rank #)
+ for a in range(nprocs):
+ heappush(rank_heap, (0, a))
+
+ # find the rank with the lowest weight, assign that rank to build the i'th connection matrix, update the rank's
+ # weight and re-add to the heap.
+ # TODO: sort connection_maps in descending order to get better balance
+ self._edge_assignment = []
+ for cm in self.get_connections():
+ r = heappop(rank_heap)
+ self._edge_assignment.append(r[1])
+ heappush(rank_heap, (r[0] + cm.max_connections(), r[1]))
+
diff --git a/bmtk-vb/build/lib/bmtk/builder/networks/nxnetwork.py b/bmtk-vb/build/lib/bmtk/builder/networks/nxnetwork.py
new file mode 100644
index 0000000..3424fd6
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/networks/nxnetwork.py
@@ -0,0 +1,80 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import networkx as nx
+
+from bmtk.builder.network import Network
+from bmtk.builder.node import Node
+
+
+class NxNetwork(Network):
+ def __init__(self, name, **network_props):
+ super(NxNetwork, self).__init__(name, **network_props or {})
+
+ self.net = nx.MultiDiGraph()
+ self.__nodes = []
+
+
+ def _initialize(self):
+ self.net.clear()
+
+ def _add_nodes(self, nodes):
+ self.__nodes += nodes
+ self.net.add_nodes_from(nodes)
+
+ def _add_edges(self, edge, connections):
+ for src, trg, nsyns in connections:
+ self.net.add_edge(src, trg, nsyns=nsyns, edge_type_id=edge.edge_type_id)
+
+
+ def _clear(self):
+ self.net.clear()
+
+ def _nodes_iter(self, nids=None):
+ if nids is not None:
+ return ((nid, d)
+ for nid, d in self.__nodes
+ if nid in nids )
+ else:
+ return self.__nodes
+ #return self.net.nodes_iter(data=True)
+
+ def _edges_iter(self, nids=None, rank=0):
+ if nids == None or len(nids) == 0:
+ for e in self.net.edges(data=True):
+ yield (e[0], e[1], e[2]['nsyns'], e[2]['edge_type_id'])
+ #return self.net.edges(data=True)
+ elif rank == 0:
+ for e in self.net.out_edges(nids, data=True):
+ yield (e[0], e[1], e[2]['nsyns'], e[2]['edge_type_id'])
+ else:
+ for e in self.net.in_edges(nids, data=True):
+ yield (e[0], e[1], e[2]['nsyns'], e[2]['edge_type_id'])
+ #return self.net.in_edges(nids, data=True)
+
+ @property
+ def nnodes(self):
+ return nx.number_of_nodes(self.net)
+
+ @property
+ def nedges(self):
+ return nx.number_of_edges(self.net)
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/builder/networks/sparse_network.py b/bmtk-vb/build/lib/bmtk/builder/networks/sparse_network.py
new file mode 100644
index 0000000..035aaeb
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/networks/sparse_network.py
@@ -0,0 +1,26 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from bmtk.builder.network import Network
+
+class SparseNetwork(Network):
+ pass
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/builder/node.py b/bmtk-vb/build/lib/bmtk/builder/node.py
new file mode 100644
index 0000000..6d1b295
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/node.py
@@ -0,0 +1,76 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+class Node(dict):
+ def __init__(self, node_id, node_params, node_type_properties, params_hash=-1):
+ super(Node, self).__init__({})
+
+ self._node_params = node_params
+ self._node_params['node_id'] = node_id
+ self._node_type_properties = node_type_properties
+ self._params_hash = params_hash
+ self._node_id = node_id
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def node_type_id(self):
+ return self._node_type_properties['node_type_id']
+
+ @property
+ def params(self):
+ return self._node_params
+
+ @property
+ def node_type_properties(self):
+ return self._node_type_properties
+
+ @property
+ def params_hash(self):
+ return self._params_hash
+
+ def get(self, key, default=None):
+ if key in self._node_params:
+ return self._node_params[key]
+ elif key in self._node_type_properties:
+ return self._node_type_properties[key]
+ else:
+ return default
+
+ def __contains__(self, item):
+ return item in self._node_type_properties or item in self._node_params
+
+ def __getitem__(self, item):
+ if item in self._node_params:
+ return self._node_params[item]
+ else:
+ return self._node_type_properties[item]
+
+ def __hash__(self):
+ return hash(self.node_id)
+
+ def __repr__(self):
+ tmp_dict = dict(self._node_type_properties)
+ tmp_dict.update(self._node_params)
+ return tmp_dict.__repr__()
diff --git a/bmtk-vb/build/lib/bmtk/builder/node_pool.py b/bmtk-vb/build/lib/bmtk/builder/node_pool.py
new file mode 100644
index 0000000..2e1bb18
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/node_pool.py
@@ -0,0 +1,106 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from ast import literal_eval
+from six import string_types
+
+
+class NodePool(object):
+ """Stores a collection of nodes based off some query of the network.
+
+ Returns the results of a query of nodes from a network using the nodes() method. Nodes are still generated and
+ saved by the network, this just stores the query information and provides iterator methods for accessing different
+ nodes.
+
+ TODO:
+ * Implement a collection-set algebra including | and not operators. ie.
+ nodes = net.nodes(type=1) | net.nodes(type=2)
+ * Implement operators on properties
+ nodes = net.nodes(val) > 100
+ nodes = 100 in net.nodes(val)
+ """
+
+ def __init__(self, network, **properties):
+ self.__network = network
+ self.__properties = properties
+ self.__filter_str = None
+
+ def __len__(self):
+ return sum(1 for _ in self)
+
+ def __iter__(self):
+ return (n for n in self.__network.nodes_iter() if self.__query_object_properties(n, self.__properties))
+
+ @property
+ def network(self):
+ return self.__network
+
+ @property
+ def network_name(self):
+ return self.__network.name
+
+ @property
+ def filter_str(self):
+ if self.__filter_str is None:
+ if len(self.__properties) == 0:
+ self.__filter_str = '*'
+ else:
+ self.__filter_str = ''
+ for k, v in self.__properties.items():
+ conditional = "{}=='{}'".format(k, v)
+ self.__filter_str += conditional + '&'
+ if self.__filter_str.endswith('&'):
+ self.__filter_str = self.__filter_str[0:-1]
+
+ return self.__filter_str
+
+ @classmethod
+ def from_filter(cls, network, filter_str):
+ assert(isinstance(filter_str, string_types))
+ if len(filter_str) == 0 or filter_str == '*':
+ return cls(network, position=None)
+
+ properties = {}
+ for condtional in filter_str.split('&'):
+ var, val = condtional.split('==')
+ properties[var] = literal_eval(val)
+ return cls(network, position=None, **properties)
+
+ def __query_object_properties(self, obj, props):
+ if props is None:
+ return True
+
+ for k, v in props.items():
+ ov = obj.get(k, None)
+ if ov is None:
+ return False
+
+ if hasattr(v, '__call__'):
+ if not v(ov):
+ return False
+ elif isinstance(v, list):
+ if ov not in v:
+ return False
+ elif ov != v:
+ return False
+
+ return True
diff --git a/bmtk-vb/build/lib/bmtk/builder/node_set.py b/bmtk-vb/build/lib/bmtk/builder/node_set.py
new file mode 100644
index 0000000..59c1918
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/builder/node_set.py
@@ -0,0 +1,71 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import six
+from .node import Node
+
+
+class NodeSet(object):
+ def __init__(self, N, node_params, node_type_properties):
+ self.__N = N
+ self.__node_params = node_params
+ self.__node_type_properties = node_type_properties
+
+ assert('node_type_id' in node_type_properties)
+ self.__node_type_id = node_type_properties['node_type_id']
+
+ # Used for determining which node_sets share the same params columns
+ columns = list(self.__node_params.keys())
+ columns.sort()
+ self.__params_col_hash = hash(str(columns))
+
+ @property
+ def N(self):
+ return self.__N
+
+ @property
+ def node_type_id(self):
+ return self.__node_type_id
+
+ @property
+ def params_keys(self):
+ return self.__node_params.keys()
+
+ @property
+ def params_hash(self):
+ return self.__params_col_hash
+
+ def build(self, nid_generator):
+ # fetch existing node ids or create new ones
+ node_ids = self.__node_params.get('node_id', None)
+ if node_ids is None:
+ node_ids = [nid for nid in nid_generator(self.N)]
+
+ # turn node_params from dictionary of lists to a list of dictionaries.
+ ap_flat = [{} for _ in six.moves.range(self.N)]
+ for key, plist in self.__node_params.items():
+ for i, val in enumerate(plist):
+ ap_flat[i][key] = val
+
+ # create node objects
+ return [Node(nid, params, self.__node_type_properties, self.__params_col_hash)
+ for (nid, params) in zip(node_ids, ap_flat)]
diff --git a/bmtk-vb/build/lib/bmtk/simulator/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/__init__.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/README.md b/bmtk-vb/build/lib/bmtk/simulator/bionet/README.md
new file mode 100644
index 0000000..5448a66
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/README.md
@@ -0,0 +1,4 @@
+## BioNet source code
+
+For instruction on how to install BioNet please consult the [BioNet tutorial](https://alleninstitute.github.io/bmtk/bionet.html)
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/__init__.py
new file mode 100644
index 0000000..7c86d80
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/__init__.py
@@ -0,0 +1,31 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from bmtk.simulator.bionet.pyfunction_cache import synapse_model, synaptic_weight, cell_model
+from bmtk.simulator.bionet.config import Config
+from bmtk.simulator.bionet.bionetwork import BioNetwork
+from bmtk.simulator.bionet.biosimulator import BioSimulator
+#from bmtk.simulator.bionet.io_tools import io
+
+#io = NEURONIOUtils()
+
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/biocell.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/biocell.py
new file mode 100644
index 0000000..412730c
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/biocell.py
@@ -0,0 +1,323 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+from scipy.stats import norm
+from bmtk.simulator.bionet import utils, nrn
+from bmtk.simulator.bionet.cell import Cell
+import six
+
+from neuron import h
+
+pc = h.ParallelContext() # object to access MPI methods
+
+
+class BioCell(Cell):
+ """Implemntation of a morphologically and biophysically detailed type cell.
+
+ """
+ def __init__(self, node, bionetwork):
+ super(BioCell, self).__init__(node)
+
+ # Set up netcon object that can be used to detect and communicate cell spikes.
+ self.set_spike_detector(bionetwork.spike_threshold)
+
+ self._morph = None
+ self._seg_coords = {}
+
+ # Determine number of segments and store a list of all sections.
+ self._nseg = 0
+ self.set_nseg(bionetwork.dL)
+ self._secs = []
+ self._secs_by_id = []
+ self.set_sec_array()
+
+ self._save_conn = False # bionetwork.save_connection
+ self._synapses = []
+ self._syn_src_net = []
+ self._syn_src_gid = []
+ self._syn_seg_ix = []
+ self._syn_sec_x = []
+ self._edge_type_ids = []
+ self._segments = None
+
+ # potentially used by ecp module
+ self.im_ptr = None
+ self.imVec = None
+
+ # used by xstim module
+ self.ptr2e_extracellular = None
+
+ self.__extracellular_mech = False
+
+ def set_spike_detector(self, spike_threshold):
+ nc = h.NetCon(self.hobj.soma[0](0.5)._ref_v, None, sec=self.hobj.soma[0]) # attach spike detector to cell
+ nc.threshold = spike_threshold
+ pc.cell(self.gid, nc) # associate gid with spike detector
+
+ def set_nseg(self, dL):
+ """Define number of segments in a cell"""
+ self._nseg = 0
+ for sec in self.hobj.all:
+ sec.nseg = 1 + 2 * int(sec.L/(2*dL))
+ self._nseg += sec.nseg # get the total number of segments in the cell
+
+ def calc_seg_coords(self, morph_seg_coords):
+ """Update the segment coordinates (after rotations) for individual cells"""
+ phi_y = self._node.rotation_angle_yaxis
+ phi_z = self._node.rotation_angle_zaxis
+ phi_x = self._node.rotation_angle_xaxis
+
+ # Rotate cell
+ # TODO: Rotations should follow as described in sonata (https://github.com/AllenInstitute/sonata/blob/master/docs/SONATA_DEVELOPER_GUIDE.md).
+ # Need someone with graphics experience to check they are being done correctly (I'm not sure atm).
+ RotX = utils.rotation_matrix([1, 0, 0], phi_x)
+ RotY = utils.rotation_matrix([0, 1, 0], phi_y) # rotate segments around yaxis normal to pia
+ RotZ = utils.rotation_matrix([0, 0, 1], -phi_z) # rotate segments around zaxis to get a proper orientation
+ RotXYZ = np.dot(RotX, RotY.dot(RotZ))
+
+ # rotated coordinates around z axis first then shift relative to the soma
+ self._seg_coords['p0'] = self._pos_soma + np.dot(RotXYZ, morph_seg_coords['p0'])
+ self._seg_coords['p1'] = self._pos_soma + np.dot(RotXYZ, morph_seg_coords['p1'])
+ self._seg_coords['p05'] = self._pos_soma + np.dot(RotXYZ, morph_seg_coords['p05'])
+
+ def get_seg_coords(self):
+ return self._seg_coords
+
+ @property
+ def morphology_file(self):
+ # TODO: Get from self._node.morphology_file
+ return self._node.morphology_file
+
+ @property
+ def morphology(self):
+ return self._morph
+
+ @morphology.setter
+ def morphology(self, morphology_obj):
+ self.set_morphology(morphology_obj)
+
+ def set_morphology(self, morphology_obj):
+ self._morph = morphology_obj
+
+ def get_sections(self):
+ #return self._secs_by_id
+ return self._secs
+
+ def get_sections_id(self):
+ return self._secs_by_id
+
+ def get_section(self, sec_id):
+ return self._secs[sec_id]
+
+ def store_segments(self):
+ self._segments = []
+ for sec in self._secs:
+ for seg in sec:
+ self._segments.append(seg)
+
+ def get_segments(self):
+ return self._segments
+
+ def set_sec_array(self):
+ """Arrange sections in an array to be access by index"""
+ secs = [] # build ref to sections
+ self._secs_by_id = []
+ for sec in self.hobj.all:
+ self._secs_by_id.append(sec)
+ for _ in sec:
+ secs.append(sec) # section to which segments belongs
+
+ self._secs = np.array(secs)
+
+ def set_syn_connection(self, edge_prop, src_node, stim=None):
+ syn_weight = edge_prop.syn_weight(src_node=src_node, trg_node=self._node)
+
+ if edge_prop.preselected_targets:
+ return self._set_connection_preselected(edge_prop, src_node, syn_weight, stim)
+ else:
+ return self._set_connections(edge_prop, src_node, syn_weight, stim)
+
+ def _set_connection_preselected(self, edge_prop, src_node, syn_weight, stim=None):
+ # TODO: synapses should be loaded by edge_prop.load_synapse
+ sec_x = edge_prop['sec_x']
+ sec_id = edge_prop['sec_id']
+ section = self._secs_by_id[sec_id]
+ # section = self._secs[sec_id]
+ delay = edge_prop['delay']
+ synapse_fnc = nrn.py_modules.synapse_model(edge_prop['model_template'])
+ syn = synapse_fnc(edge_prop['dynamics_params'], sec_x, section)
+
+ if stim is not None:
+ nc = h.NetCon(stim.hobj, syn) # stim.hobj - source, syn - target
+ else:
+ nc = pc.gid_connect(src_node.node_id, syn)
+
+ nc.weight[0] = syn_weight
+ nc.delay = delay
+ self._netcons.append(nc)
+ self._synapses.append(syn)
+ if self._save_conn:
+ self._save_connection(src_gid=src_node.node_id, src_net=src_node.network, sec_x=sec_x, seg_ix=sec_id,
+ edge_type_id=edge_prop.edge_type_id)
+
+ return 1
+
+ def _set_connections(self, edge_prop, src_node, syn_weight, stim=None):
+ try:
+ # Compute probability based on proximity to the peak depths given at network build time
+ if edge_prop['prob_peaks']:
+ tar_seg_prob = np.zeros(len(self._secs))
+ prob_peaks = [float(x) for x in edge_prop['prob_peaks'].split(',')]
+ prob_peak_std = [float(x) for x in edge_prop['prob_peak_std'].split(',')]
+ _z = lambda idx: self._seg_coords['p05'][1, idx]
+ for mu, std in zip(prob_peaks, prob_peak_std):
+ tar_seg_prob += np.array([norm.pdf(_z(idx), mu, std) for idx in range(len(self._secs))])
+ tar_seg_prob = tar_seg_prob / sum(tar_seg_prob)
+ tar_seg_ix = range(len(self._secs))
+ else:
+ raise KeyError() # just to trigger the except block below...
+ except KeyError:
+ # Compute probability based on segment length
+ tar_seg_ix, tar_seg_prob = self._morph.get_target_segments(edge_prop)
+
+
+ src_gid = src_node.node_id
+ nsyns = edge_prop.nsyns
+
+ # choose nsyn elements from seg_ix with probability proportional to segment area
+ segs_ix = self.prng.choice(tar_seg_ix, nsyns, p=tar_seg_prob)
+ secs = self._secs[segs_ix] # sections where synapases connect
+ xs = self._morph.seg_prop['x'][segs_ix] # distance along the section where synapse connects, i.e., seg_x
+
+ # DEBUG
+ try:
+ _z = lambda idx: self._seg_coords['p05'][1, idx]
+ edge_prop['prob_peaks']
+ print("DEPTH {}".format(','.join(str(_z(i)) for i in segs_ix)))
+ zs = np.array([_z(i) for i in tar_seg_ix])
+ idx = np.argsort(zs)
+ print '\n'.join(str(s) for s in zip(zs[idx], tar_seg_prob[idx]))
+ except:
+ pass
+ # END DEBUG
+
+ # TODO: this should be done just once
+ synapses = [edge_prop.load_synapses(x, sec) for x, sec in zip(xs, secs)]
+
+ delay = edge_prop['delay']
+ self._synapses.extend(synapses)
+
+ # TODO: Don't save this if not needed
+ self._edge_type_ids.extend([edge_prop.edge_type_id]*len(synapses))
+
+ for syn in synapses:
+ # connect synapses
+ if stim:
+ nc = h.NetCon(stim.hobj, syn)
+ else:
+ nc = pc.gid_connect(src_gid, syn)
+
+ nc.weight[0] = syn_weight
+ nc.delay = delay
+ self.netcons.append(nc)
+
+ return nsyns
+
+ def _save_connection(self, src_gid, src_net, sec_x, seg_ix, edge_type_id):
+ self._syn_src_gid.append(src_gid)
+ self._syn_src_net.append(src_net)
+ self._syn_sec_x.append(sec_x)
+ self._syn_seg_ix.append(seg_ix)
+ self._edge_type_id.append(edge_type_id)
+
+ def get_connection_info(self):
+ # TODO: There should be a more effecient and robust way to return synapse information.
+ return [[self.gid, self._syn_src_gid[i], self.network_name, self._syn_src_net[i], self._syn_seg_ix[i],
+ self._syn_sec_x[i], self.netcons[i].weight[0], self.netcons[i].delay, self._edge_type_id[i], 0]
+ for i in range(len(self._synapses))]
+
+ def init_connections(self):
+ super(BioCell, self).init_connections()
+ self._synapses = []
+ self._syn_src_gid = []
+ self._syn_seg_ix = []
+ self._syn_sec_x = []
+
+ def __set_extracell_mechanism(self):
+ if not self.__extracellular_mech:
+ for sec in self.hobj.all:
+ sec.insert('extracellular')
+ self.__extracellular_mech = True
+
+ def setup_ecp(self):
+ self.im_ptr = h.PtrVector(self._nseg) # pointer vector
+ # used for gathering an array of i_membrane values from the pointer vector
+ self.im_ptr.ptr_update_callback(self.set_im_ptr)
+ self.imVec = h.Vector(self._nseg)
+
+ self.__set_extracell_mechanism()
+ #for sec in self.hobj.all:
+ # sec.insert('extracellular')
+
+ def setup_xstim(self, set_nrn_mechanism=True):
+ self.ptr2e_extracellular = h.PtrVector(self._nseg)
+ self.ptr2e_extracellular.ptr_update_callback(self.set_ptr2e_extracellular)
+
+ # Set the e_extracellular mechanism for all sections on this hoc object
+ if set_nrn_mechanism:
+ self.__set_extracell_mechanism()
+ #for sec in self.hobj.all:
+ # sec.insert('extracellular')
+
+ def set_im_ptr(self):
+ """Set PtrVector to point to the i_membrane_"""
+ jseg = 0
+ for sec in self.hobj.all:
+ for seg in sec:
+ self.im_ptr.pset(jseg, seg._ref_i_membrane_) # notice the underscore at the end
+ jseg += 1
+
+ def get_im(self):
+ """Gather membrane currents from PtrVector into imVec (does not need a loop!)"""
+ self.im_ptr.gather(self.imVec)
+ # Warning: as_numpy() seems to fail with in neuron 7.4 for python 3
+ # return self.imVec.as_numpy() # (nA)
+ return np.array(self.imVec)
+
+ def set_ptr2e_extracellular(self):
+ jseg = 0
+ for sec in self.hobj.all:
+ for seg in sec:
+ self.ptr2e_extracellular.pset(jseg, seg._ref_e_extracellular)
+ jseg += 1
+
+ def set_e_extracellular(self, vext):
+ self.ptr2e_extracellular.scatter(vext)
+
+ def print_synapses(self):
+ rstr = ''
+ for i in six.moves.range(len(self._syn_src_gid)):
+ rstr += '{}> <-- {} ({}, {}, {}, {})\n'.format(i, self._syn_src_gid[i], self.netcons[i].weight[0],
+ self.netcons[i].delay, self._syn_seg_ix[i],
+ self._syn_sec_x[i])
+ return rstr
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/bionetwork.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/bionetwork.py
new file mode 100644
index 0000000..78ec0ae
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/bionetwork.py
@@ -0,0 +1,262 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+from neuron import h
+
+from bmtk.simulator.core.simulator_network import SimNetwork
+from bmtk.simulator.bionet.biocell import BioCell
+from bmtk.simulator.bionet.pointprocesscell import PointProcessCell
+from bmtk.simulator.bionet.pointsomacell import PointSomaCell
+from bmtk.simulator.bionet.virtualcell import VirtualCell
+from bmtk.simulator.bionet.morphology import Morphology
+from bmtk.simulator.bionet.io_tools import io
+from bmtk.simulator.bionet import nrn
+from bmtk.simulator.bionet.sonata_adaptors import BioNodeAdaptor, BioEdgeAdaptor
+
+# TODO: leave this import, it will initialize some of the default functions for building neurons/synapses/weights.
+import bmtk.simulator.bionet.default_setters
+
+
+pc = h.ParallelContext() # object to access MPI methods
+MPI_size = int(pc.nhost())
+MPI_rank = int(pc.id())
+
+
+class BioNetwork(SimNetwork):
+ model_type_col = 'model_type'
+
+ def __init__(self):
+ # property_schema = property_schema if property_schema is not None else DefaultPropertySchema
+ super(BioNetwork, self).__init__()
+ self._io = io
+
+ # TODO: Find a better way that will allow users to register their own class
+ self._model_type_map = {
+ 'biophysical': BioCell,
+ 'point_process': PointProcessCell,
+ 'point_soma': PointSomaCell,
+ 'virtual': VirtualCell
+ }
+
+ self._morphologies_cache = {}
+ self._morphology_lookup = {}
+
+ self._rank_node_gids = {}
+ self._rank_node_ids = {}
+ self._rank_nodes_by_model = {m_type: {} for m_type in self._model_type_map.keys()}
+ self._remote_node_cache = {}
+ self._virtual_nodes = {}
+
+ self._cells_built = False
+ self._connections_initialized = False
+
+ @property
+ def py_function_caches(self):
+ return nrn
+
+ def get_node_id(self, population, node_id):
+ if node_id in self._rank_node_ids[population]:
+ return self._rank_node_ids[population][node_id].node
+
+ elif node_id in self._remote_node_cache[population]:
+ return self._remote_node_cache[population][node_id]
+
+ else:
+ node_pop = self.get_node_population(population)
+ node = node_pop.get_node(node_id)
+ self._remote_node_cache[population][node_id] = node
+ return node
+
+ def cell_type_maps(self, model_type):
+ return self._rank_nodes_by_model[model_type]
+
+ def get_cell_node_id(self, population, node_id):
+ return self._rank_node_ids[population].get(node_id, None)
+
+ def get_cell_gid(self, gid):
+ return self._rank_node_gids[gid]
+
+ def get_local_cells(self):
+ return self._rank_node_gids
+
+ @property
+ def local_gids(self):
+ return list(self._rank_node_gids.keys())
+
+ def get_virtual_cells(self, population, node_id, spike_trains):
+ if node_id in self._virtual_nodes[population]:
+ return self._virtual_nodes[population][node_id]
+ else:
+ node = self.get_node_id(population, node_id)
+ virt_cell = VirtualCell(node, spike_trains)
+ self._virtual_nodes[population][node_id] = virt_cell
+ return virt_cell
+
+ def _build_cell(self, bionode):
+ if bionode.model_type in self._model_type_map:
+ cell = self._model_type_map[bionode.model_type](bionode, self)
+ self._rank_nodes_by_model[bionode.model_type][cell.gid] = cell
+ return cell
+ else:
+ self.io.log_exception('Unrecognized model_type {}.'.format(bionode.model_type))
+
+ def _register_adaptors(self):
+ super(BioNetwork, self)._register_adaptors()
+ self._node_adaptors['sonata'] = BioNodeAdaptor
+ self._edge_adaptors['sonata'] = BioEdgeAdaptor
+
+ def build_nodes(self):
+ for node_pop in self.node_populations:
+ self._remote_node_cache[node_pop.name] = {}
+ node_ids_map = {}
+ if node_pop.internal_nodes_only:
+ for node in node_pop[MPI_rank::MPI_size]:
+ cell = self._build_cell(node)
+ node_ids_map[node.node_id] = cell
+ self._rank_node_gids[cell.gid] = cell
+
+ elif node_pop.mixed_nodes:
+ # node population contains both internal and virtual (external) nodes and the virtual nodes must be
+ # filtered out
+ self._virtual_nodes[node_pop.name] = {}
+ for node in node_pop[MPI_rank::MPI_size]:
+ if node.model_type == 'virtual':
+ continue
+ else:
+ cell = self._build_cell(node)
+ node_ids_map[node.node_id] = cell
+ self._rank_node_gids[cell.gid] = cell
+
+ elif node_pop.virtual_nodes_only:
+ self._virtual_nodes[node_pop.name] = {}
+
+ self._rank_node_ids[node_pop.name] = node_ids_map
+
+ self.make_morphologies()
+ self.set_seg_props() # set segment properties by creating Morphologies
+ self.calc_seg_coords() # use for computing the ECP
+ self._cells_built = True
+
+ def set_seg_props(self):
+ """Set morphological properties for biophysically (morphologically) detailed cells"""
+ for _, morphology in self._morphologies_cache.items():
+ morphology.set_seg_props()
+
+ def calc_seg_coords(self):
+ """Needed for the ECP calculations"""
+ # TODO: Is there any reason this function can't be moved to make_morphologies()
+ for morphology_file, morphology in self._morphologies_cache.items():
+ morph_seg_coords = morphology.calc_seg_coords() # needed for ECP calculations
+
+ for gid in self._morphology_lookup[morphology_file]:
+ self.get_cell_gid(gid).calc_seg_coords(morph_seg_coords)
+
+ def make_morphologies(self):
+ """Creating a Morphology object for each biophysical model"""
+ # TODO: Let Morphology take care of the cache
+ # TODO: Let other types have morphologies
+ # TODO: Get all available morphologies from TypesTable or group
+ for gid, cell in self._rank_node_gids.items():
+ if not isinstance(cell, BioCell):
+ continue
+
+ morphology_file = cell.morphology_file
+ if morphology_file in self._morphologies_cache:
+ # create a single morphology object for each model_group which share that morphology
+ morph = self._morphologies_cache[morphology_file]
+
+ # associate morphology with a cell
+ cell.set_morphology(morph)
+ self._morphology_lookup[morphology_file].append(cell.gid)
+
+ else:
+ hobj = cell.hobj # get hoc object (hobj) from the first cell with a new morphologys
+ morph = Morphology(hobj)
+
+ # associate morphology with a cell
+ cell.set_morphology(morph)
+
+ # create a single morphology object for each model_group which share that morphology
+ self._morphologies_cache[morphology_file] = morph
+ self._morphology_lookup[morphology_file] = [cell.gid]
+
+ self.io.barrier()
+
+ def _init_connections(self):
+ if not self._connections_initialized:
+ for gid, cell in self._rank_node_gids.items():
+ cell.init_connections()
+ self._connections_initialized = True
+
+ def build_recurrent_edges(self):
+ recurrent_edge_pops = [ep for ep in self._edge_populations if not ep.virtual_connections]
+ if not recurrent_edge_pops:
+ return
+
+ self._init_connections()
+ for edge_pop in recurrent_edge_pops:
+ if edge_pop.recurrent_connections:
+ source_population = edge_pop.source_nodes
+ for trg_nid, trg_cell in self._rank_node_ids[edge_pop.target_nodes].items():
+ for edge in edge_pop.get_target(trg_nid):
+ src_node = self.get_node_id(source_population, edge.source_node_id)
+ trg_cell.set_syn_connection(edge, src_node)
+
+ elif edge_pop.mixed_connections:
+ # When dealing with edges that contain both virtual and recurrent edges we have to check every source
+ # node to see if is virtual (bc virtual nodes can't be built yet). This conditional can significantly
+ # slow down build time so we use a special loop that can be ignored.
+ source_population = edge_pop.source_nodes
+ for trg_nid, trg_cell in self._rank_node_ids[edge_pop.target_nodes].items():
+ for edge in edge_pop.get_target(trg_nid):
+ src_node = self.get_node_id(source_population, edge.source_node_id)
+ if src_node.model_type == 'virtual':
+ continue
+ trg_cell.set_syn_connection(edge, src_node)
+
+ def find_edges(self, source_nodes=None, target_nodes=None):
+ selected_edges = self._edge_populations[:]
+
+ if source_nodes is not None:
+ selected_edges = [edge_pop for edge_pop in selected_edges if edge_pop.source_nodes == source_nodes]
+
+ if target_nodes is not None:
+ selected_edges = [edge_pop for edge_pop in selected_edges if edge_pop.target_nodes == target_nodes]
+
+ return selected_edges
+
+ def add_spike_trains(self, spike_trains, node_set):
+ self._init_connections()
+
+ src_nodes = [node_pop for node_pop in self.node_populations if node_pop.name in node_set.population_names()]
+ for src_node_pop in src_nodes:
+ source_population = src_node_pop.name
+ for edge_pop in self.find_edges(source_nodes=source_population):
+ if edge_pop.virtual_connections:
+ for trg_nid, trg_cell in self._rank_node_ids[edge_pop.target_nodes].items():
+ for edge in edge_pop.get_target(trg_nid):
+ src_cell = self.get_virtual_cells(source_population, edge.source_node_id, spike_trains)
+ trg_cell.set_syn_connection(edge, src_cell, src_cell)
+
+ elif edge_pop.mixed_connections:
+ raise NotImplementedError()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/biosimulator.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/biosimulator.py
new file mode 100644
index 0000000..b1a7e56
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/biosimulator.py
@@ -0,0 +1,357 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import time
+from six import string_types
+from neuron import h
+from bmtk.simulator.core.simulator import Simulator
+from bmtk.simulator.bionet.io_tools import io
+from bmtk.simulator.bionet.iclamp import IClamp
+from bmtk.simulator.bionet import modules as mods
+from bmtk.simulator.core.node_sets import NodeSet
+import bmtk.simulator.utils.simulation_reports as reports
+import bmtk.simulator.utils.simulation_inputs as inputs
+from bmtk.utils.io import spike_trains
+
+
+pc = h.ParallelContext() # object to access MPI methods
+
+
+class BioSimulator(Simulator):
+ """Includes methods to run and control the simulation"""
+
+ def __init__(self, network, dt, tstop, v_init, celsius, cao0, nsteps_block, start_from_state=False):
+ self.net = network
+
+ self._start_from_state = start_from_state
+ self.dt = dt
+ self.tstop = tstop
+
+ self._v_init = v_init
+ self._celsius = celsius
+ self._cao0 = cao0
+ self._h = h
+
+ self.tstep = int(round(h.t / h.dt))
+ self.tstep_start_block = self.tstep
+ self.nsteps = int(round(h.tstop/h.dt))
+
+ # make sure the block size isn't small than the total number of steps
+ # TODO: should we send a warning that block-step size is being reset?
+ self._nsteps_block = nsteps_block if self.nsteps > nsteps_block else self.nsteps
+
+ self.__tstep_end_block = 0
+ self.__tstep_start_block = 0
+
+ h.runStopAt = h.tstop
+ h.steps_per_ms = 1/h.dt
+
+ self._set_init_conditions() # call to save state
+ h.cvode.cache_efficient(1)
+
+ h.pysim = self # use this objref to be able to call postFadvance from proc advance in advance.hoc
+ self._iclamps = []
+
+ self._output_dir = 'output'
+ self._log_file = 'output/log.txt'
+
+ self._spikes = {} # for keeping track of different spike times, key of cell gids
+
+ self._cell_variables = [] # location of saved cell variables
+ self._cell_vars_dir = 'output/cellvars'
+
+ self._sim_mods = [] # list of modules.SimulatorMod's
+
+ @property
+ def dt(self):
+ return h.dt
+
+ @dt.setter
+ def dt(self, ms):
+ h.dt = ms
+
+ @property
+ def tstop(self):
+ return h.tstop
+
+ @tstop.setter
+ def tstop(self, ms):
+ h.tstop = ms
+
+ @property
+ def v_init(self):
+ return self._v_init
+
+ @v_init.setter
+ def v_init(self, voltage):
+ self._v_init = voltage
+
+ @property
+ def celsius(self):
+ return self._celsius
+
+ @celsius.setter
+ def celsius(self, c):
+ self._celsius = c
+
+ @property
+ def cao0(self):
+ return self._cao0
+
+ @cao0.setter
+ def cao0(self, cao):
+ self._cao0 = cao
+
+ @property
+ def n_steps(self):
+ return int(round(self.tstop/self.dt))
+
+ @property
+ def cell_variables(self):
+ return self._cell_variables
+
+ @property
+ def cell_var_output(self):
+ return self._cell_vars_dir
+
+ @property
+ def spikes_table(self):
+ return self._spikes
+
+ @property
+ def nsteps_block(self):
+ return self._nsteps_block
+
+ @property
+ def h(self):
+ return self._h
+
+ @property
+ def biophysical_gids(self):
+ return self.net.cell_type_maps('biophysical').keys()
+
+ @property
+ def local_gids(self):
+ # return self.net.get
+ return self.net.local_gids
+
+ def __elapsed_time(self, time_s):
+ if time_s < 120:
+ return '{:.4} seconds'.format(time_s)
+ elif time_s < 7200:
+ mins, secs = divmod(time_s, 60)
+ return '{} minutes, {:.4} seconds'.format(mins, secs)
+ else:
+ mins, secs = divmod(time_s, 60)
+ hours, mins = divmod(mins, 60)
+ return '{} hours, {} minutes and {:.4} seconds'.format(hours, mins, secs)
+
+ def _set_init_conditions(self):
+ """Set up the initial conditions: either read from the h.SaveState or from config["condidtions"]"""
+ pc.set_maxstep(10)
+ h.stdinit()
+ self.tstep = int(round(h.t/h.dt))
+ self.tstep_start_block = self.tstep
+
+ if self._start_from_state:
+ # io.read_state()
+ io.log_info('Read the initial state saved at t_sim: {} ms'.format(h.t))
+ else:
+ h.v_init = self.v_init
+
+ h.celsius = self.celsius
+ h.cao0_ca_ion = self.cao0
+
+ def set_spikes_recording(self):
+ for gid, _ in self.net.get_local_cells().items():
+ tvec = self.h.Vector()
+ gidvec = self.h.Vector()
+ pc.spike_record(gid, tvec, gidvec)
+ self._spikes[gid] = tvec
+
+ def attach_current_clamp(self, amplitude, delay, duration, gids=None):
+ # TODO: verify current clamp works with MPI
+ # TODO: Create appropiate module
+ if gids is None:
+ gids = self.gids['biophysical']
+ if isinstance(gids, int):
+ gids = [gids]
+ elif isinstance(gids, string_types):
+ gids = [int(gids)]
+ elif isinstance(gids, NodeSet):
+ gids = gids.gids()
+
+
+ gids = list(set(self.local_gids) & set(gids))
+ for gid in gids:
+ cell = self.net.get_cell_gid(gid)
+ Ic = IClamp(amplitude, delay, duration)
+ Ic.attach_current(cell)
+ self._iclamps.append(Ic)
+
+ def add_mod(self, module):
+ self._sim_mods.append(module)
+
+ def run(self):
+ """Run the simulation:
+ if beginning from a blank state, then will use h.run(),
+ if continuing from the saved state, then will use h.continuerun()
+ """
+ for mod in self._sim_mods:
+ mod.initialize(self)
+
+ self.start_time = h.startsw()
+ s_time = time.time()
+ pc.timeout(0)
+
+ pc.barrier() # wait for all hosts to get to this point
+ io.log_info('Running simulation for {:.3f} ms with the time step {:.3f} ms'.format(self.tstop, self.dt))
+ io.log_info('Starting timestep: {} at t_sim: {:.3f} ms'.format(self.tstep, h.t))
+ io.log_info('Block save every {} steps'.format(self.nsteps_block))
+
+ if self._start_from_state:
+ h.continuerun(h.tstop)
+ else:
+ h.run(h.tstop) # <- runs simuation: works in parallel
+
+ pc.barrier()
+
+ for mod in self._sim_mods:
+ mod.finalize(self)
+ pc.barrier()
+
+ end_time = time.time()
+
+ sim_time = self.__elapsed_time(end_time - s_time)
+ io.log_info('Simulation completed in {} '.format(sim_time))
+
+ def report_load_balance(self):
+ comptime = pc.step_time()
+ avgcomp = pc.allreduce(comptime, 1)/pc.nhost()
+ maxcomp = pc.allreduce(comptime, 2)
+ io.log_info('Maximum compute time is {} seconds.'.format(maxcomp))
+ io.log_info('Approximate exchange time is {} seconds.'.format(comptime - maxcomp))
+ if maxcomp != 0.0:
+ io.log_info('Load balance is {}.'.format(avgcomp/maxcomp))
+
+ def post_fadvance(self):
+ """
+ Runs after every execution of fadvance (see advance.hoc)
+ Called after every time step to perform computation and save data to memory block or to disk.
+ The initial condition tstep=0 is not being saved
+ """
+ for mod in self._sim_mods:
+ mod.step(self, self.tstep)
+
+ self.tstep += 1
+
+ if (self.tstep % self.nsteps_block == 0) or self.tstep == self.nsteps:
+ io.log_info(' step:{} t_sim:{:.2f} ms'.format(self.tstep, h.t))
+ self.__tstep_end_block = self.tstep
+ time_step_interval = (self.__tstep_start_block, self.__tstep_end_block)
+
+ for mod in self._sim_mods:
+ mod.block(self, time_step_interval)
+
+ self.__tstep_start_block = self.tstep # starting point for the next block
+
+ @classmethod
+ def from_config(cls, config, network, set_recordings=True):
+ # TODO: convert from json to sonata config if necessary
+
+ sim = cls(network=network,
+ dt=config.dt,
+ tstop=config.tstop,
+ v_init=config.v_init,
+ celsius=config.celsius,
+ cao0=config.cao0,
+ nsteps_block=config.block_step)
+
+ network.io.log_info('Building cells.')
+ network.build_nodes()
+
+ network.io.log_info('Building recurrent connections')
+ network.build_recurrent_edges()
+
+ # TODO: Need to create a gid selector
+ for sim_input in inputs.from_config(config):
+ node_set = network.get_node_set(sim_input.node_set)
+ if sim_input.input_type == 'spikes':
+ spikes = spike_trains.SpikesInput.load(name=sim_input.name, module=sim_input.module,
+ input_type=sim_input.input_type, params=sim_input.params)
+ io.log_info('Build virtual cell stimulations for {}'.format(sim_input.name))
+ network.add_spike_trains(spikes, node_set)
+
+ elif sim_input.module == 'IClamp':
+ # TODO: Parse from csv file
+ amplitude = sim_input.params['amp']
+ delay = sim_input.params['delay']
+ duration = sim_input.params['duration']
+ gids = sim_input.params['node_set']
+ sim.attach_current_clamp(amplitude, delay, duration, node_set)
+
+ elif sim_input.module == 'xstim':
+ sim.add_mod(mods.XStimMod(**sim_input.params))
+
+ else:
+ io.log_exception('Can not parse input format {}'.format(sim_input.name))
+
+ if config.calc_ecp:
+ for gid, cell in network.cell_type_maps('biophysical').items():
+ cell.setup_ecp()
+ sim.h.cvode.use_fast_imem(1)
+
+ # Parse the "reports" section of the config and load an associated output module for each report
+ sim_reports = reports.from_config(config)
+ for report in sim_reports:
+ if isinstance(report, reports.SpikesReport):
+ mod = mods.SpikesMod(**report.params)
+
+ elif isinstance(report, reports.SectionReport):
+ mod = mods.SectionReport(**report.params)
+
+ elif isinstance(report, reports.MembraneReport):
+ if report.params['sections'] == 'soma':
+ mod = mods.SomaReport(**report.params)
+
+ else:
+ mod = mods.MembraneReport(**report.params)
+
+ elif isinstance(report, reports.ECPReport):
+ assert config.calc_ecp
+ mod = mods.EcpMod(**report.params)
+ # Set up the ability for ecp on all relevant cells
+ # TODO: According to spec we need to allow a different subset other than only biophysical cells
+ # for gid, cell in network.cell_type_maps('biophysical').items():
+ # cell.setup_ecp()
+
+ elif report.module == 'save_synapses':
+ mod = mods.SaveSynapses(**report.params)
+
+ else:
+ # TODO: Allow users to register customized modules using pymodules
+ io.log_warning('Unrecognized module {}, skipping.'.format(report.module))
+ continue
+
+ sim.add_mod(mod)
+
+ return sim
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/cell.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/cell.py
new file mode 100644
index 0000000..190836a
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/cell.py
@@ -0,0 +1,104 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from neuron import h
+import numpy as np
+
+
+pc = h.ParallelContext() # object to access MPI methods
+MPI_RANK = int(pc.id())
+
+
+class Cell(object):
+ """A abstract base class for any cell object.
+
+ A base class for implementation of a cell-type objects like biophysical cells, LIF cells, etc. Do not instantiate
+ a Cell object directly. Cell classes act as wrapper around HOC cell object with extra functionality for setting
+ positions, synapses, and other parameters depending on the desired cell class.
+ """
+ def __init__(self, node):
+ self._node = node
+ self._gid = node.gid
+ self._node_id = node.node_id
+ self._props = node
+ self._netcons = [] # list of NEURON network connection object attached to this cell
+
+ self._pos_soma = []
+ self.set_soma_position()
+
+ # register the cell
+ pc.set_gid2node(self.gid, MPI_RANK)
+
+ # Load the NEURON HOC object
+ self._hobj = node.load_cell()
+
+ @property
+ def node(self):
+ return self._node
+
+ @property
+ def hobj(self):
+ return self._hobj
+
+ @property
+ def gid(self):
+ return self._gid
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def group_id(self):
+ return self._node.group_id
+
+ @property
+ def network_name(self):
+ return self._node.network
+
+ @property
+ def netcons(self):
+ return self._netcons
+
+ @property
+ def soma_position(self):
+ return self._pos_soma
+
+ def set_soma_position(self):
+ positions = self._node.position
+ if positions is not None:
+ self._pos_soma = positions.reshape(3, 1)
+
+ def init_connections(self):
+ self.rand_streams = []
+ self.prng = np.random.RandomState(self.gid) # generate random stream based on gid
+
+ def scale_weights(self, factor):
+ for nc in self.netcons:
+ weight = nc.weight[0]
+ nc.weight[0] = weight*factor
+
+ def get_connection_info(self):
+ return []
+
+ def set_syn_connections(self, edge_prop, src_node, stim=None):
+ raise NotImplementedError
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/config.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/config.py
new file mode 100644
index 0000000..7a43cd1
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/config.py
@@ -0,0 +1,84 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+
+from neuron import h
+
+#import bmtk.simulator.utils.config as msdk_config
+#from bmtk.utils.sonata.config import SonataConfig
+#from bmtk.simulator.core.config import ConfigDict
+from bmtk.simulator.utils.config import ConfigDict
+from bmtk.simulator.utils.sim_validator import SimConfigValidator
+from bmtk.simulator.bionet.io_tools import io
+from . import nrn
+
+pc = h.ParallelContext() # object to access MPI methods
+MPI_Rank = int(pc.id())
+
+
+# load the configuration schema
+schema_folder = os.path.join(os.path.dirname(__file__), 'schemas')
+config_schema_file = os.path.join(schema_folder, 'config_schema.json')
+
+# json schemas (but not real jsonschema) to describe the various input file formats
+file_formats = [
+ ("csv:nodes_internal", os.path.join(schema_folder, 'csv_nodes_internal.json')),
+ ("csv:node_types_internal", os.path.join(schema_folder, 'csv_node_types_internal.json')),
+ ("csv:edge_types", os.path.join(schema_folder, 'csv_edge_types.json')),
+ ("csv:nodes_external", os.path.join(schema_folder, 'csv_nodes_external.json')),
+ ("csv:node_types_external", os.path.join(schema_folder, 'csv_node_types_external.json'))
+]
+
+# Create a config and input file validator for Bionet
+with open(config_schema_file, 'r') as f:
+ config_schema = json.load(f)
+bionet_validator = SimConfigValidator(config_schema, file_formats=file_formats)
+
+
+class Config(ConfigDict):
+ @property
+ def cao0(self):
+ return self.conditions['cao0']
+
+ @staticmethod
+ def get_validator():
+ return bionet_validator
+
+ def create_output_dir(self):
+ io.setup_output_dir(self.output_dir, self.log_file)
+
+ def load_nrn_modules(self):
+ nrn.load_neuron_modules(self.mechanisms_dir, self.templates_dir)
+
+ def build_env(self):
+ if MPI_Rank == 0:
+ self.create_output_dir()
+ self.copy_to_output()
+
+ if io.mpi_size > 1:
+ # A friendly message requested by fb
+ io.log_info('Running NEURON with mpi ({} cores).'.format(io.mpi_size))
+
+ pc.barrier()
+ self.load_nrn_modules()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/default_setters/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_setters/__init__.py
new file mode 100644
index 0000000..4ad0b56
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_setters/__init__.py
@@ -0,0 +1,25 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from . import cell_models
+from . import synapse_models
+from . import synaptic_weights
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/default_setters/cell_models.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_setters/cell_models.py
new file mode 100644
index 0000000..16d5bfb
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_setters/cell_models.py
@@ -0,0 +1,460 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import numpy as np
+from neuron import h
+try:
+ from sklearn.decomposition import PCA
+except Exception as e:
+ pass
+
+from bmtk.simulator.bionet.pyfunction_cache import add_cell_model, add_cell_processor
+from bmtk.simulator.bionet.io_tools import io
+from bmtk.simulator.bionet.nml_reader import NMLTree
+
+"""
+Functions for loading NEURON cell objects.
+
+Functions will be loaded by bionetwork and called when a new cell object is created. These are for standard models
+loaded with Cell-Types json files or their NeuroML equivelent, but may be overridden by the users.
+"""
+
+
+def IntFire1(cell, template_name, dynamics_params):
+ """Loads a point integrate and fire neuron"""
+ hobj = h.IntFire1()
+ hobj.tau = dynamics_params['tau']*1000.0 # Convert from seconds to ms.
+ hobj.refrac = dynamics_params['refrac']*1000.0 # Convert from seconds to ms.
+ return hobj
+
+
+def Biophys1(cell, template_name, dynamic_params):
+ """Loads a biophysical NEURON hoc object using Cell-Types database objects."""
+ morphology_file = cell.morphology_file
+ hobj = h.Biophys1(str(morphology_file))
+ #fix_axon(hobj)
+ #set_params_peri(hobj, dynamic_params)
+ return hobj
+
+
+def Biophys1_nml(json_file):
+ # TODO: look at examples to see how to convert .nml files
+ raise NotImplementedError()
+
+
+def Biophys1_dict(cell):
+ """ Set parameters for cells from the Allen Cell Types database Prior to setting parameters will replace the
+ axon with the stub
+ """
+ morphology_file = cell['morphology']
+ hobj = h.Biophys1(str(morphology_file))
+ return hobj
+
+
+def aibs_perisomatic(hobj, cell, dynamics_params):
+ if dynamics_params is not None:
+ fix_axon_peri(hobj)
+ set_params_peri(hobj, dynamics_params)
+
+ return hobj
+
+
+def fix_axon_peri(hobj):
+ """Replace reconstructed axon with a stub
+
+ :param hobj: hoc object
+ """
+ for sec in hobj.axon:
+ h.delete_section(sec=sec)
+
+ h.execute('create axon[2]', hobj)
+
+ for sec in hobj.axon:
+ sec.L = 30
+ sec.diam = 1
+ hobj.axonal.append(sec=sec)
+ hobj.all.append(sec=sec) # need to remove this comment
+
+ hobj.axon[0].connect(hobj.soma[0], 0.5, 0)
+ hobj.axon[1].connect(hobj.axon[0], 1, 0)
+
+ h.define_shape()
+
+
+def set_params_peri(hobj, biophys_params):
+ """Set biophysical parameters for the cell
+
+ :param hobj: NEURON's cell object
+ :param biophys_params: name of json file with biophys params for cell's model which determine spiking behavior
+ :return:
+ """
+ passive = biophys_params['passive'][0]
+ conditions = biophys_params['conditions'][0]
+ genome = biophys_params['genome']
+
+ # Set passive properties
+ cm_dict = dict([(c['section'], c['cm']) for c in passive['cm']])
+ for sec in hobj.all:
+ sec.Ra = passive['ra']
+ sec.cm = cm_dict[sec.name().split(".")[1][:4]]
+ sec.insert('pas')
+
+ for seg in sec:
+ seg.pas.e = passive["e_pas"]
+
+ # Insert channels and set parameters
+ for p in genome:
+ sections = [s for s in hobj.all if s.name().split(".")[1][:4] == p["section"]]
+
+ for sec in sections:
+ if p["mechanism"] != "":
+ sec.insert(p["mechanism"])
+ setattr(sec, p["name"], p["value"])
+
+ # Set reversal potentials
+ for erev in conditions['erev']:
+ sections = [s for s in hobj.all if s.name().split(".")[1][:4] == erev["section"]]
+ for sec in sections:
+ sec.ena = erev["ena"]
+ sec.ek = erev["ek"]
+
+
+def aibs_allactive(hobj, cell, dynamics_params):
+ fix_axon_allactive(hobj)
+ set_params_allactive(hobj, dynamics_params)
+ return hobj
+
+
+def fix_axon_allactive(hobj):
+ """Replace reconstructed axon with a stub
+
+ Parameters
+ ----------
+ hobj: instance of a Biophysical template
+ NEURON's cell object
+ """
+ # find the start and end diameter of the original axon, this is different from the perisomatic cell model
+ # where diameter == 1.
+ axon_diams = [hobj.axon[0].diam, hobj.axon[0].diam]
+ for sec in hobj.all:
+ section_name = sec.name().split(".")[1][:4]
+ if section_name == 'axon':
+ axon_diams[1] = sec.diam
+
+ for sec in hobj.axon:
+ h.delete_section(sec=sec)
+
+ h.execute('create axon[2]', hobj)
+ for index, sec in enumerate(hobj.axon):
+ sec.L = 30
+ sec.diam = axon_diams[index] # 1
+
+ hobj.axonal.append(sec=sec)
+ hobj.all.append(sec=sec) # need to remove this comment
+
+ hobj.axon[0].connect(hobj.soma[0], 1.0, 0)
+ hobj.axon[1].connect(hobj.axon[0], 1.0, 0)
+
+ h.define_shape()
+
+
+def set_params_allactive(hobj, params_dict):
+ # params_dict = json.load(open(params_file_name, 'r'))
+ passive = params_dict['passive'][0]
+ genome = params_dict['genome']
+ conditions = params_dict['conditions'][0]
+
+ section_map = {}
+ for sec in hobj.all:
+ section_name = sec.name().split(".")[1][:4]
+ if section_name in section_map:
+ section_map[section_name].append(sec)
+ else:
+ section_map[section_name] = [sec]
+
+ for sec in hobj.all:
+ sec.insert('pas')
+ # sec.insert('extracellular')
+
+ if 'e_pas' in passive:
+ e_pas_val = passive['e_pas']
+ for sec in hobj.all:
+ for seg in sec:
+ seg.pas.e = e_pas_val
+
+ if 'ra' in passive:
+ ra_val = passive['ra']
+ for sec in hobj.all:
+ sec.Ra = ra_val
+
+ if 'cm' in passive:
+ # print('Setting cm')
+ for cm_dict in passive['cm']:
+ cm = cm_dict['cm']
+ for sec in section_map.get(cm_dict['section'], []):
+ sec.cm = cm
+
+ for genome_dict in genome:
+ g_section = genome_dict['section']
+ if genome_dict['section'] == 'glob':
+ io.log_warning("There is a section called glob, probably old json file")
+ continue
+
+ g_value = float(genome_dict['value'])
+ g_name = genome_dict['name']
+ g_mechanism = genome_dict.get("mechanism", "")
+ for sec in section_map.get(g_section, []):
+ if g_mechanism != "":
+ sec.insert(g_mechanism)
+ setattr(sec, g_name, g_value)
+
+ for erev in conditions['erev']:
+ erev_section = erev['section']
+ erev_ena = erev['ena']
+ erev_ek = erev['ek']
+
+ if erev_section in section_map:
+ for sec in section_map.get(erev_section, []):
+ if h.ismembrane('k_ion', sec=sec) == 1:
+ setattr(sec, 'ek', erev_ek)
+ if h.ismembrane('na_ion', sec=sec) == 1:
+ setattr(sec, 'ena', erev_ena)
+ else:
+ io.log_warning("Can't set erev for {}, section array doesn't exist".format(erev_section))
+
+
+def aibs_perisomatic_directed(hobj, cell, dynamics_params):
+ fix_axon_perisomatic_directed(hobj)
+ set_params_peri(hobj, dynamics_params)
+ return hobj
+
+
+def aibs_allactive_directed(hobj, cell, dynamics_params):
+ fix_axon_allactive_directed(hobj)
+ set_params_allactive(hobj, dynamics_params)
+ return hobj
+
+
+def fix_axon_perisomatic_directed(hobj):
+ # io.log_info('Fixing Axon like perisomatic')
+ all_sec_names = []
+ for sec in hobj.all:
+ all_sec_names.append(sec.name().split(".")[1][:4])
+
+ if 'axon' not in all_sec_names:
+ io.log_exception('There is no axonal recostruction in swc file.')
+ else:
+ beg1, end1, beg2, end2 = get_axon_direction(hobj)
+
+ for sec in hobj.axon:
+ h.delete_section(sec=sec)
+ h.execute('create axon[2]', hobj)
+
+ h.pt3dadd(beg1[0], beg1[1], beg1[2], 1, sec=hobj.axon[0])
+ h.pt3dadd(end1[0], end1[1], end1[2], 1, sec=hobj.axon[0])
+ hobj.all.append(sec=hobj.axon[0])
+ h.pt3dadd(beg2[0], beg2[1], beg2[2], 1, sec=hobj.axon[1])
+ h.pt3dadd(end2[0], end2[1], end2[2], 1, sec=hobj.axon[1])
+ hobj.all.append(sec=hobj.axon[1])
+
+ hobj.axon[0].connect(hobj.soma[0], 0.5, 0)
+ hobj.axon[1].connect(hobj.axon[0], 1.0, 0)
+
+ hobj.axon[0].L = 30.0
+ hobj.axon[1].L = 30.0
+
+ h.define_shape()
+
+ for sec in hobj.axon:
+ # print "sec.L:", sec.L
+ if np.abs(30-sec.L) > 0.0001:
+ io.log_exception('Axon stub L is less than 30')
+
+
+def fix_axon_allactive_directed(hobj):
+ all_sec_names = []
+ for sec in hobj.all:
+ all_sec_names.append(sec.name().split(".")[1][:4])
+
+ if 'axon' not in all_sec_names:
+ io.log_exception('There is no axonal recostruction in swc file.')
+ else:
+ beg1, end1, beg2, end2 = get_axon_direction(hobj)
+
+ axon_diams = [hobj.axon[0].diam, hobj.axon[0].diam]
+ for sec in hobj.all:
+ section_name = sec.name().split(".")[1][:4]
+ if section_name == 'axon':
+ axon_diams[1] = sec.diam
+
+ for sec in hobj.axon:
+ h.delete_section(sec=sec)
+ h.execute('create axon[2]', hobj)
+ hobj.axon[0].connect(hobj.soma[0], 1.0, 0)
+ hobj.axon[1].connect(hobj.axon[0], 1.0, 0)
+
+ h.pt3dadd(beg1[0], beg1[1], beg1[2], axon_diams[0], sec=hobj.axon[0])
+ h.pt3dadd(end1[0], end1[1], end1[2], axon_diams[0], sec=hobj.axon[0])
+ hobj.all.append(sec=hobj.axon[0])
+ h.pt3dadd(beg2[0], beg2[1], beg2[2], axon_diams[1], sec=hobj.axon[1])
+ h.pt3dadd(end2[0], end2[1], end2[2], axon_diams[1], sec=hobj.axon[1])
+ hobj.all.append(sec=hobj.axon[1])
+
+ hobj.axon[0].L = 30.0
+ hobj.axon[1].L = 30.0
+
+ h.define_shape()
+
+ for sec in hobj.axon:
+ # io.log_info('sec.L: {}'.format(sec.L))
+ if np.abs(30 - sec.L) > 0.0001:
+ io.log_exception('Axon stub L is less than 30')
+
+
+def get_axon_direction(hobj):
+ for sec in hobj.somatic:
+ n3d = int(h.n3d()) # get number of n3d points in each section
+ soma_end = np.asarray([h.x3d(n3d - 1), h.y3d(n3d - 1), h.z3d(n3d - 1)])
+ mid_point = int(n3d / 2)
+ soma_mid = np.asarray([h.x3d(mid_point), h.y3d(mid_point), h.z3d(mid_point)])
+
+ for sec in hobj.all:
+ section_name = sec.name().split(".")[1][:4]
+ if section_name == 'axon':
+ n3d = int(h.n3d()) # get number of n3d points in each section
+ axon_p3d = np.zeros((n3d, 3)) # to hold locations of 3D morphology for the current section
+ for i in range(n3d):
+ axon_p3d[i, 0] = h.x3d(i)
+ axon_p3d[i, 1] = h.y3d(i) # shift coordinates such to place soma at the origin.
+ axon_p3d[i, 2] = h.z3d(i)
+
+ # Add soma coordinates to the list
+ p3d = np.concatenate(([soma_mid], axon_p3d), axis=0)
+
+ # Compute PCA
+ pca = PCA(n_components=3)
+ pca.fit(p3d)
+ unit_v = pca.components_[0]
+
+ mag_v = np.sqrt(pow(unit_v[0], 2) + pow(unit_v[1], 2) + pow(unit_v[2], 2))
+ unit_v[0] = unit_v[0] / mag_v
+ unit_v[1] = unit_v[1] / mag_v
+ unit_v[2] = unit_v[2] / mag_v
+
+ # Find the direction
+ axon_end = axon_p3d[-1] - soma_mid
+ if np.dot(unit_v, axon_end) < 0:
+ unit_v *= -1
+
+ axon_seg_coor = np.zeros((4, 3))
+ # unit_v = np.asarray([0,1,0])
+ axon_seg_coor[0] = soma_end
+ axon_seg_coor[1] = soma_end + (unit_v * 30.)
+ axon_seg_coor[2] = soma_end + (unit_v * 30.)
+ axon_seg_coor[3] = soma_end + (unit_v * 60.)
+
+ return axon_seg_coor
+
+
+nml_files = {} # For caching neuroml file trees
+def NMLLoad(cell, template_name, dynamic_params):
+ """Convert a NEUROML file to a NEURON hoc cell object.
+
+ Current limitations:
+ * Ignores nml morphology section. You must pass in a swc file
+ * Only for biophysically detailed cell biophysical components. All properties must be assigned to a segment group.
+
+ :param cell:
+ :param template_name:
+ :param dynamic_params:
+ :return:
+ """
+ # Last I checked there is no built in way to load a NML file directly into NEURON through the API, instead we have
+ # to manually parse the nml file and build the NEUROM cell object section-by-section.
+ morphology_file = cell.morphology_file
+ hobj = h.Biophys1(str(morphology_file))
+ # Depending on if the axon is cut before or after setting cell channels and mechanism can create drastically
+ # different results. Currently NML files doesn't produce the same results if you use model_processing directives.
+ # TODO: Find a way to specify model_processing directive with NML file
+ fix_axon_peri(hobj)
+
+ # Load the hoc template containing a swc initialized NEURON cell
+ if template_name in nml_files:
+ nml_params = nml_files[template_name]
+ else:
+ # Parse the NML parameters file xml tree and cache.
+ biophys_dirs = cell.network.get_component('biophysical_neuron_models_dir')
+ nml_path = os.path.join(biophys_dirs, template_name)
+ nml_params = NMLTree(nml_path)
+ nml_files[template_name] = nml_params
+
+ # Iterate through the NML tree by section and use the properties to manually create cell mechanisms
+ section_lists = [(sec, sec.name().split(".")[1][:4]) for sec in hobj.all]
+ for sec, sec_name in section_lists:
+ for prop_name, prop_obj in nml_params[sec_name].items():
+ if prop_obj.element_tag() == 'resistivity':
+ sec.Ra = prop_obj.value
+
+ elif prop_obj.element_tag() == 'specificCapacitance':
+ sec.cm = prop_obj.value
+
+ elif prop_obj.element_tag() == 'channelDensity' and prop_obj.ion_channel == 'pas':
+ sec.insert('pas')
+ setattr(sec, 'g_pas', prop_obj.cond_density)
+ for seg in sec:
+ seg.pas.e = prop_obj.erev
+
+ elif prop_obj.element_tag() == 'channelDensity' or prop_obj.element_tag() == 'channelDensityNernst':
+ sec.insert(prop_obj.ion_channel)
+ setattr(sec, prop_obj.id, prop_obj.cond_density)
+ if prop_obj.ion == 'na' and prop_obj:
+ sec.ena = prop_obj.erev
+ elif prop_obj.ion == 'k':
+ sec.ek = prop_obj.erev
+
+ elif prop_obj.element_tag() == 'concentrationModel':
+ sec.insert(prop_obj.id)
+ setattr(sec, 'gamma_' + prop_obj.type, prop_obj.gamma)
+ setattr(sec, 'decay_' + prop_obj.type, prop_obj.decay)
+
+ return hobj
+
+def set_extracellular(hobj, cell, dynamics_params):
+ for sec in hobj.all:
+ sec.insert('extracellular')
+
+ return hobj
+
+
+add_cell_model(NMLLoad, directive='nml', model_type='biophysical')
+add_cell_model(Biophys1, directive='ctdb:Biophys1', model_type='biophysical', overwrite=False)
+add_cell_model(Biophys1, directive='ctdb:Biophys1.hoc', model_type='biophysical', overwrite=False)
+add_cell_model(IntFire1, directive='nrn:IntFire1', model_type='point_process', overwrite=False)
+
+
+add_cell_processor(aibs_perisomatic, overwrite=False)
+add_cell_processor(aibs_allactive, overwrite=False)
+add_cell_processor(aibs_perisomatic_directed, overwrite=False)
+add_cell_processor(aibs_allactive_directed, overwrite=False)
+add_cell_processor(set_extracellular, overwrite=False)
+add_cell_processor(set_extracellular, 'extracellular', overwrite=False)
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/default_setters/synapse_models.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_setters/synapse_models.py
new file mode 100644
index 0000000..013cbcb
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_setters/synapse_models.py
@@ -0,0 +1,206 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from neuron import h
+
+from bmtk.simulator.bionet.pyfunction_cache import add_synapse_model
+from bmtk.simulator.bionet.nrn import *
+
+
+def exp2syn(syn_params, xs, secs):
+ """Create a list of exp2syn synapses
+
+ :param syn_params: parameters of a synapse
+ :param xs: list of normalized distances along the section
+ :param secs: target sections
+ :return: list of NEURON synpase objects
+ """
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.Exp2Syn(x, sec=sec)
+ syn.e = syn_params['erev']
+ syn.tau1 = syn_params['tau1']
+ syn.tau2 = syn_params['tau2']
+ syns.append(syn)
+ return syns
+
+
+def Exp2Syn(syn_params, sec_x, sec_id):
+ """Create a list of exp2syn synapses
+
+ :param syn_params: parameters of a synapse
+ :param sec_x: normalized distance along the section
+ :param sec_id: target section
+ :return: NEURON synapse object
+ """
+ syn = h.Exp2Syn(sec_x, sec=sec_id)
+ syn.e = syn_params['erev']
+ syn.tau1 = syn_params['tau1']
+ syn.tau2 = syn_params['tau2']
+ return syn
+
+
+
+@synapse_model
+def stp1syn(syn_params, xs, secs):
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.stp1syn(x, sec=sec)
+
+ syn.e = syn_params["erev"]
+ syn.p0 = 0.5
+ syn.tau_r = 200
+ syn.tau_1 = 5
+ syns.append(syn)
+
+ return syns
+
+
+@synapse_model
+def stp2syn(syn_params, x, sec):
+ syn = h.stp2syn(x, sec=sec)
+ syn.e = syn_params["erev"]
+ syn.p0 = syn_params["p0"]
+ syn.tau_r0 = syn_params["tau_r0"]
+ syn.tau_FDR = syn_params["tau_FDR"]
+ syn.tau_1 = syn_params["tau_1"]
+ return syn
+
+
+@synapse_model
+def stp3syn(syn_params, xs, secs):
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.stp3syn(x, sec=sec) # temporary
+ syn.e = syn_params["erev"]
+ syn.p0 = 0.6
+ syn.tau_r0 = 200
+ syn.tau_FDR = 2000
+ syn.tau_D = 500
+ syn.tau_1 = 5
+ syns.append(syn)
+
+ return syns
+
+
+@synapse_model
+def stp4syn(syn_params, xs, secs):
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.stp4syn(x, sec=sec)
+ syn.e = syn_params["erev"]
+ syn.p0 = 0.6
+ syn.tau_r = 200
+ syn.tau_1 = 5
+ syns.append(syn)
+
+ return syns
+
+
+@synapse_model
+def stp5syn(syn_params, x, sec): # temporary
+ syn = h.stp5syn(x, sec=sec)
+ syn.e = syn_params["erev"]
+ syn.tau_1 = syn_params["tau_1"]
+ syn.tau_r0 = syn_params["tau_r0"]
+ syn.tau_FDR = syn_params["tau_FDR"]
+ syn.a_FDR = syn_params["a_FDR"]
+ syn.a_D = syn_params["a_D"]
+ syn.a_i = syn_params["a_i"]
+ syn.a_f = syn_params["a_f"]
+ syn.pbtilde = syn_params["pbtilde"]
+ return syn
+
+
+def stp5isyn(syn_params, xs, secs): # temporary
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.stp5isyn(x, sec=sec)
+ syn.e = syn_params["erev"]
+ syn.tau_1 = syn_params["tau_1"]
+ syn.tau_r0 = syn_params["tau_r0"]
+ syn.tau_FDR = syn_params["tau_FDR"]
+ syn.a_FDR = syn_params["a_FDR"]
+ syn.a_D = syn_params["a_D"]
+ syn.a_i = syn_params["a_i"]
+ syn.a_f = syn_params["a_f"]
+ syn.pbtilde = syn_params["pbtilde"]
+ syns.append(syn)
+
+ return syns
+
+
+@synapse_model
+def tmgsyn(syn_params, xs, secs):
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.tmgsyn(x, sec=sec)
+ syn.e = syn_params["erev"]
+ syn.tau_1 = syn_params["tau_1"]
+ syn.tau_rec = syn_params["tau_rec"]
+ syn.tau_facil = syn_params["tau_facil"]
+ syn.U = syn_params["U"]
+ syn.u0 = syn_params["u0"]
+ syns.append(syn)
+
+ return syns
+
+
+@synapse_model
+def expsyn(syn_params, x, sec):
+ """Create a list of expsyn synapses
+
+ :param syn_params: parameters of a synapse (dict)
+ :param x: normalized distance along the section (float)
+ :param sec: target section (hoc object)
+ :return: synapse objects
+ """
+ syn = h.ExpSyn(x, sec=sec)
+ syn.e = syn_params['erev']
+ syn.tau = syn_params["tau1"]
+ return syn
+
+
+@synapse_model
+def exp1syn(syn_params, xs, secs):
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.exp1syn(x, sec=sec)
+ syn.e = syn_params['erev']
+ syn.tau = syn_params["tau_1"]
+ syns.append(syn)
+ return syns
+
+
+@synapse_model
+def exp1isyn(syn_params, xs, secs):
+ syns = []
+ for x, sec in zip(xs, secs):
+ syn = h.exp1isyn(x, sec=sec)
+ syn.e = syn_params['erev']
+ syn.tau = syn_params["tau_1"]
+ syns.append(syn)
+ return syns
+
+
+add_synapse_model(Exp2Syn, 'exp2syn', overwrite=False)
+add_synapse_model(Exp2Syn, overwrite=False)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/default_setters/synaptic_weights.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_setters/synaptic_weights.py
new file mode 100644
index 0000000..0f0973d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_setters/synaptic_weights.py
@@ -0,0 +1,51 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import math
+
+from bmtk.simulator.bionet.pyfunction_cache import add_weight_function
+
+
+def default_weight_fnc(edge_props, src_props, trg_props):
+ return edge_props['syn_weight']
+
+
+def wmax(edge_props, src_props, trg_props):
+ return edge_props["syn_weight"]
+
+
+def gaussianLL(edge_props, src_props, trg_props):
+ src_tuning = src_props['tuning_angle']
+ tar_tuning = trg_props['tuning_angle']
+
+ w0 = edge_props["syn_weight"]
+ sigma = edge_props["weight_sigma"]
+
+ delta_tuning = abs(abs(abs(180.0 - abs(float(tar_tuning) - float(src_tuning)) % 360.0) - 90.0) - 90.0)
+ weight = w0 * math.exp(-(delta_tuning / sigma) ** 2)
+
+ return weight
+
+
+add_weight_function(wmax, 'wmax', overwrite=False)
+add_weight_function(gaussianLL, 'gaussianLL', overwrite=False)
+add_weight_function(default_weight_fnc, 'default_weight_fnc', overwrite=False)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/default_templates/BioAxonStub.hoc b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_templates/BioAxonStub.hoc
new file mode 100644
index 0000000..df8660d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_templates/BioAxonStub.hoc
@@ -0,0 +1,61 @@
+begintemplate BioAxonStub
+
+public init
+public soma, dend, apic, axon
+public all, somatic, basal, apical, axonal
+
+objref all, somatic, basal, apical, axonal
+objref this
+
+create soma[1]
+create dend[1]
+create apic[1]
+create axon[1]
+
+
+proc init() {localobj nl, import
+ all = new SectionList()
+ somatic = new SectionList()
+ basal = new SectionList()
+ apical = new SectionList()
+ axonal = new SectionList()
+ forall delete_section()
+
+// nl = new Import3d_Neurolucida3()
+ nl = new Import3d_SWC_read()
+ nl.quiet = 1
+ nl.input($s1)
+ import = new Import3d_GUI(nl, 0)
+// import.quite = 1
+ import.instantiate(this)
+
+ simplify_axon()
+}
+
+proc simplify_axon() {
+
+ forsec axonal { delete_section() }
+ create axon[2]
+
+ axon[0] {
+ L = 30
+ diam = 1
+ nseg = 1+2*int(L/40)
+ all.append()
+ axonal.append()
+ }
+ axon[1] {
+ L = 30
+ diam = 1
+ nseg = 1+2*int(L/40)
+ all.append()
+ axonal.append()
+ }
+ connect axon(0), soma(0.5)
+ connect axon[1](0), axon[0](1)
+ define_shape()
+
+
+}
+
+endtemplate BioAxonStub
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/default_templates/Biophys1.hoc b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_templates/Biophys1.hoc
new file mode 100644
index 0000000..e25192a
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_templates/Biophys1.hoc
@@ -0,0 +1,32 @@
+begintemplate Biophys1
+
+public init
+public soma, dend, apic, axon
+public all, somatic, basal, apical, axonal
+
+objref all, somatic, basal, apical, axonal
+objref this
+
+create soma[1]
+create dend[1]
+create apic[1]
+create axon[1]
+
+
+proc init() {localobj nl, import
+ all = new SectionList()
+ somatic = new SectionList()
+ basal = new SectionList()
+ apical = new SectionList()
+ axonal = new SectionList()
+ forall delete_section()
+
+ nl = new Import3d_SWC_read()
+ nl.quiet = 1
+ nl.input($s1)
+ import = new Import3d_GUI(nl, 0)
+ import.instantiate(this)
+
+}
+
+endtemplate Biophys1
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/default_templates/advance.hoc b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_templates/advance.hoc
new file mode 100644
index 0000000..f4ed0b8
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/default_templates/advance.hoc
@@ -0,0 +1,10 @@
+// custom proc advance()
+
+objref pysim // defined in the Simulation as h.pysim = self
+
+pysim = new PythonObject()
+
+proc advance() {
+ fadvance()
+ pysim.post_fadvance() // run Simulation.post_fadvance() function after each fadvance call
+}
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/iclamp.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/iclamp.py
new file mode 100644
index 0000000..fe823ef
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/iclamp.py
@@ -0,0 +1,38 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from neuron import h
+
+
+class IClamp(object):
+ def __init__(self, amplitude, delay, duration):
+ self._iclamp_amp = amplitude
+ self._iclamp_del = delay
+ self._iclamp_dur = duration
+ self._stim = None
+
+ def attach_current(self, cell):
+ self._stim = h.IClamp(cell.hobj.soma[0](0.5))
+ self._stim.delay = self._iclamp_del
+ self._stim.dur = self._iclamp_dur
+ self._stim.amp = self._iclamp_amp
+ return self._stim
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d.hoc b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d.hoc
new file mode 100644
index 0000000..3bcad33
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d.hoc
@@ -0,0 +1,12 @@
+{xopen("import3d/import3d_sec.hoc")}
+{xopen("import3d/read_swc.hoc")}
+{xopen("import3d/read_nlcda.hoc")}
+{xopen("import3d/read_nlcda3.hoc")}
+{xopen("import3d/read_nts.hoc")}
+{xopen("import3d/read_morphml.hoc")}
+{xopen("import3d/import3d_gui.hoc")}
+objref tobj, nil
+proc makeimport3dtool() {
+ tobj = new Import3d_GUI(nil)
+ tobj = nil
+}
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/import3d_gui.hoc b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/import3d_gui.hoc
new file mode 100644
index 0000000..81d6935
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/import3d_gui.hoc
@@ -0,0 +1,1174 @@
+{load_file("celbild.hoc")}
+{load_file("stdlib.hoc")}
+
+begintemplate Import3d_GUI
+public swc, diam_glyph, box, plrot, readfile, redraw, name
+public origin, rotmatold, raworigin, g, rotraw, instantiate
+external hoc_sf_
+objref swc, g, box, this, rotmat, m2, origin, tobj, rotmatold
+objref raworigin, rotsav, viewsec, rawsel, deck
+objref file, nil, problist, types, editbox
+strdef tstr, tstr1, typelabel_, filename
+public quiet
+
+
+proc init() {
+
+ quiet = 0
+
+ if (numarg() == 2) if ($2 == 0) {
+ swc = $o1
+ return
+ }
+ if ($o1 == nil) {
+ file = new File()
+ filename = "choose a file "
+ }else{
+ file = $o1.file
+ hoc_sf_.head(file.getname(), "[^/]*$", tstr)
+ file.chooser("r", "Import 3-D Reconstruction File", "*", "Read", "Cancel", tstr)
+ filename =file.getname()
+ }
+ ztrans_ = 0
+ dummy_ = 0
+ undo_type_ = 0
+ show_point_ = 1
+ show_diam_ = 1
+ if ($o1 == nil) { build() map() return }
+ init1($o1)
+ build()
+ map()
+ init2()
+
+}
+
+proc map() {
+ sprint(tstr, "%s", this)
+ if (numarg() == 0) {
+ box.map(tstr)
+ }else{
+ box.map(tstr, $2, $3, $4, $5)
+ }
+}
+
+proc init1() {
+ i=0 j=0
+ swc = $o1
+ selpoint_ = -1
+ selid_ = swc.pt2id(selpoint_)
+ viewsec = new List()
+ showtype(-10000)
+ rotated_ = 0
+ rotmat = new Matrix(3,3)
+ rotmatold = rotmat.c.ident
+ rotsav = rotmat.c.ident
+ origin = new Vector(3)
+ raworigin = new Vector(3)
+ rawsel = new Vector(3)
+ m2 = new Matrix(3,3)
+}
+proc init2() {
+ rot(0,0)
+ pl()
+ g.exec_menu("View = plot")
+ g.exec_menu("Zoom")
+}
+
+proc build() {local i
+ box = new HBox(3)
+ box.full_request(1)
+ box.save("")
+ box.ref(this)
+ box.intercept(1)
+ box.adjuster(400)
+ g = new Graph(0)
+ g.view(2)
+ g.xaxis(3)
+ deck = new Deck(3)
+ build_panel()
+ deck.map
+ box.intercept(0)
+}
+
+proc build_panel() {local i
+ deck.intercept(1)
+ xpanel("")
+ xcheckbox(filename, &readfile_, "readfile()")
+ if (swc == nil) {
+ xlabel(" accepted file formats:")
+ xlabel(" SWC")
+ xlabel(" Neurolucida (v1 and v3)")
+ xlabel(" Eutectic")
+ if (nrnpython("")) xlabel(" MorphML")
+ for i = 0, 15 { xlabel("") }
+ xpanel(0)
+ deck.intercept(0)
+ deck.flip_to(0)
+ return
+ }
+ sprint(tstr, "File format: %s", swc.filetype)
+ xlabel(tstr)
+ xlabel("-------------------------------")
+ g.menu_remove("Zoom")
+ g.menu_tool("Zoom", "zoom")
+ g.menu_remove("Translate ")
+ g.menu_tool("Translate ", "translate")
+ g.menu_remove("Rotate")
+ g.menu_tool("Rotate (about axis in plane)", "rotate")
+ xcheckbox("Rotate 45deg about y axis", &dummy_, "rot45()")
+ xcheckbox("Rotated (vs Raw view)", &rotated_, "rotraw()")
+ xcheckbox("Show Points", &show_point_, "pl()")
+ xcheckbox("Show Diam", &show_diam_, "pl()")
+ xvarlabel(typelabel_)
+ xmenu("View type")
+ xradiobutton("All", "showtype(-10000) pl()", 1)
+ xradiobutton("Section containing selected point", "showsec() pl()")
+ xradiobutton("Distal (tree) from selected point", "showdistal() pl()")
+ xradiobutton("Proximal (path to root) from selected point", "showprox() pl()")
+ xradiobutton("Root sections", "showroot() pl()")
+ if (swc.type.min != swc.type.max) {
+ for i = swc.type.min, swc.type.max {
+ if (swc.type.indwhere("==", i) != -1) {
+ sprint(tstr, "type %d", i)
+ sprint(tstr1, "showtype(%d) pl()", i)
+ xradiobutton(tstr, tstr1)
+ }
+ }
+ }
+ xmenu()
+ g.menu_remove("Select point")
+ g.menu_tool("Select point", "selpoint", "selpoint1(1)")
+ if (strcmp(swc.filetype, "Neurolucida") == 0) {
+ xpvalue("Line#", &selid_, 1, "selid(1)")
+ if (swc.err) {
+ xbutton("Problem points", "probpointpanel()")
+ }
+ }else if (strcmp(swc.filetype, "Neurolucida V3") == 0) {
+ xpvalue("Line#", &selid_, 1, "selid(1)")
+ }else{
+ xpvalue("Select id", &selid_, 1, "selid(1)")
+ }
+ xlabel("-------------------------------")
+ xbutton("Edit", "map_edit()")
+ xmenu("Export")
+ xbutton("CellBuilder", "cbexport()")
+ xbutton("Instantiate", "instantiate(nil)")
+ xmenu()
+ sprint(tstr, "%s filter facts", swc.filetype)
+ xbutton(tstr, "swc.helptxt()")
+ xpanel(0)
+ deck.intercept(0)
+ deck.flip_to(0)
+}
+
+
+proc map_edit() {
+ if (editbox == nil) {
+ build_edit()
+ }
+ if (editbox.ismapped) { return }
+ sprint(tstr, "Edit %s", this)
+ editbox.map(tstr)
+}
+proc build_edit() {
+ editbox = new VBox()
+ editbox.intercept(1)
+ editbox.save("")
+ xpanel("")
+ ztransitem()
+ xlabel("Select point:")
+ xcheckbox("Largest z change", &dummy_, "sel_largest_dz()")
+ xlabel("then action:")
+ xcheckbox("z-translate rest of tree to parent point", &dummy_, "edit2()")
+ xcheckbox("z-translate to average of adjacent points", &dummy_, "edit1()")
+ xcheckbox("undo last", &dummy_, "edit0()")
+ xlabel("-------------------")
+ xcheckbox("3 point filter of all z values (no undo)", &dummy_, "edit3()")
+ xpanel()
+ editbox.intercept(0)
+}
+
+proc sel_largest_dz() {local i, j, dz, dzmax, imax, jmax localobj sec, tobj
+ dummy_ = 0
+ dzmax = -1
+ for i = 0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ tobj = sec.raw.getrow(2).deriv(1,1).abs
+ j = tobj.max_ind
+ dz = tobj.x[j]
+ if (dz > dzmax) {
+ jmax = j+1
+ imax = i
+ dzmax = dz
+ }
+ }
+ if (dzmax > 0) {
+ selpoint_ = swc.sec2pt(imax, jmax)
+ selpoint_dependent_show()
+ swc.sections.object(imax).raw.getcol(jmax, rawsel)
+ selid_ = swc.pt2id(selpoint_)
+ pl()
+ }
+}
+
+proc ztransitem() {local i, n localobj raw
+ n = 0
+ for i = 0, swc.sections.count-1 {
+ raw = swc.sections.object(i).raw
+ if (abs(raw.x[2][0] - raw.x[2][1]) > 10) {
+ n += 1
+ }
+ }
+ if (n > 0) {
+ sprint(tstr, "z translation for %d abrupt branch backlash", n)
+ xcheckbox(tstr, &ztrans_, "ztrans()")
+ }
+}
+
+proc ztrans() { local i, zd, pn localobj sec
+ if (ztrans_) {
+ for i = 0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (object_id(sec.parentsec) == 0) { continue }
+ if (object_id(sec.parentsec.parentsec) == 0) { continue }
+ zd = sec.raw.x[2][1] - sec.raw.x[2][0]
+ if (abs(zd) > 5) {
+ zd += sec.parentsec.ztrans
+ }else{
+ zd = sec.parentsec.ztrans
+ }
+ sec.ztrans = zd
+ sec.raw.setrow(2, sec.raw.getrow(2).sub(sec.ztrans))
+ pn = sec.parentsec.raw.ncol
+ sec.raw.x[2][0] = sec.parentsec.raw.x[2][pn-1]
+ }
+ }else{
+ for i = 0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (sec.ztrans) {
+sec.raw.setrow(2, sec.raw.getrow(2).add(sec.ztrans))
+ pn = sec.parentsec.raw.ncol
+ sec.raw.x[2][0] = sec.parentsec.raw.x[2][pn-1]
+ sec.ztrans = 0
+ }
+ }
+ }
+ redraw()
+}
+
+proc edit0() {local i, n localobj sec
+ dummy_ = 0
+ if (undo_type_ == 1) {
+ i = swc.pt2sec(undo_selpoint_, sec)
+ sec.raw.x[2][i] = undo_z_
+ sec.raw.getcol(i, rawsel)
+ }else if (undo_type_ == 2) {
+ i = swc.pt2sec(undo_selpoint_, sec)
+ n = sec.raw.ncol
+ for i=i, n-1 {
+ sec.raw.x[2][i] += undo_z_
+ }
+ sec.raw.getcol(i, rawsel)
+ for i=0, swc.sections.count-1 { swc.sections.object(i).volatile = 0 }
+ sec.volatile = 1
+ for i=0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (object_id(sec.parentsec)) if (sec.parentsec.volatile) {
+ sec.volatile = 1
+ sec.raw.setrow(2, sec.raw.getrow(2).add(undo_z_))
+ }
+ }
+ }
+ undo_type_ = 0
+ redraw()
+}
+
+proc edit1() {local i, z1, z2 localobj sec
+ // z translate to average of adjacent points
+ dummy_ = 0
+ if (selpoint_ >= 0) {
+ i = swc.pt2sec(selpoint_, sec)
+ if (i > 0) {
+ z1 = sec.raw.x[2][i-1]
+ }else{
+ return
+ }
+ if (i < sec.raw.ncol-1) {
+ z2 = sec.raw.x[2][i+1]
+ }else{
+ return
+ }
+ undo_selpoint_ = selpoint_
+ undo_type_ = 1
+ undo_z_ = sec.raw.x[2][i]
+ sec.raw.x[2][i] = (z1 + z2)/2
+ sec.raw.getcol(i, rawsel)
+ }
+ redraw()
+}
+
+proc edit2() {local i, ip, z1, n localobj sec
+ // z-translate rest of tree to parent point
+ dummy_ = 0
+ if (selpoint_ >= 0) {
+ ip = swc.pt2sec(selpoint_, sec)
+ if (ip > 0) {
+ z1 = sec.raw.x[2][ip] - sec.raw.x[2][ip-1]
+ }else{
+ return
+ }
+ undo_selpoint_ = selpoint_
+ undo_type_ = 2
+ undo_z_ = z1
+ n = sec.raw.ncol
+ for i=ip, n-1 {
+ sec.raw.x[2][i] -= z1
+ }
+ sec.raw.getcol(ip, rawsel)
+ for i=0, swc.sections.count-1 { swc.sections.object(i).volatile = 0 }
+ sec.volatile = 1
+ for i=0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (object_id(sec.parentsec)) if (sec.parentsec.volatile) {
+ sec.volatile = 1
+ sec.raw.setrow(2, sec.raw.getrow(2).sub(z1))
+ }
+ }
+ }
+ redraw()
+}
+
+proc edit3() {local i localobj sec
+ dummy_ = 0
+ for i=0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ sec.raw.setrow(2, sec.raw.getrow(2).medfltr)
+ }
+ if (selpoint_ >= 0) {
+ i = swc.pt2sec(selpoint_, sec)
+ sec.raw.getcol(i, rawsel)
+ }
+ redraw()
+}
+
+proc probpointpanel() {
+ problist = new List()
+ problist.browser("Problem points", "s")
+ problist.select_action("probpoint(hoc_ac_)")
+ swc.fillproblist(problist)
+ problist.select(-1)
+}
+
+proc probpoint() {local i
+ if ($1 < 0) {return}
+ sscanf(problist.object($1).s, "%d:", &i)
+ selid_ = i
+ selid(0)
+}
+
+proc readfile() {
+ readfile_ = 0
+ if (numarg() == 0) {
+ file.chooser("r", "Import 3-D Reconstruction File", "*", "Read", "Cancel")
+ if (file.chooser()) {
+ if (!some_format()) {
+ return
+ }
+ }else{
+ return
+ }
+ }else{
+ file = new File($s1)
+ if (!some_format()) {
+ return
+ }
+ }
+ // if new file
+ problist = nil
+ deck.flip_to(-1)
+ build_panel()
+ deck.move_last(0)
+ deck.flip_to(0)
+ init1(swc)
+ init2()
+ doNotify()
+ if (swc.err) {
+ printf("\n")
+ sprint(tstr, "%s: File translation problems. See the messages on the terminal", file.getname)
+ continue_dialog(tstr)
+ if (strcmp(swc.filetype, "Neurolucida V3") == 0) {
+ swc.b2spanel(this)
+ }
+ }
+ deck.remove_last()
+}
+
+func some_format() {local i, a,b,c,d,e,f,g, n
+ if (!file.ropen()) {
+ sprint(tstr, "Can't read %s", file.getname)
+ continue_dialog(tstr)
+ return 0
+ }
+ while (1) {
+ if (file.eof) {
+ file.close
+ sprint(tstr, "Can't figure out file format for %s", file.getname)
+ continue_dialog(tstr)
+ return 0
+ }
+ file.gets(tstr)
+ if (hoc_sf_.head(tstr, "^\\<\\?xml", tstr1) != -1) {
+ if (nrnpython("")) {
+ swc = new Import3d_MorphML() break
+ }else{
+ file.close
+ sprint(tstr, "Can't read MorphML: Python not available.")
+ continue_dialog(tstr)
+ return 0
+ }
+ }
+ n = sscanf(tstr, "%f %f %f %f %f %f %f", &a, &b, &c, &d, &e, &f, &g)
+ if (n == 7) { swc = new Import3d_SWC_read() break }
+ n = sscanf(tstr, "[%d,%d] (%f,%f,%f) %f", &a, &b, &c, &d, &e, &f)
+ if (n == 6) { swc = new Import3d_Neurolucida_read() break }
+ n = sscanf(tstr, "%d %s %d %f %f %f %f", &a, tstr, &b, &c, &d, &e, &f)
+ if (n == 7) { swc = new Import3d_Eutectic_read() break }
+ if (hoc_sf_.tail(tstr, "^[ \t]*", tstr1) != -1) {
+ //unfortunately regexp does not allow an explicit "("
+ hoc_sf_.left(tstr1, 1)
+ if (strcmp(tstr1, "(") == 0) {
+ swc = new Import3d_Neurolucida3() break
+ }
+ }
+ if (hoc_sf_.head(tstr, "^;[ \t]*V3", tstr1) != -1) {
+ swc = new Import3d_Neurolucida3() break
+ }
+ }
+ file.close
+ filename = file.getname
+ swc.input(filename)
+ return 1
+}
+
+proc pl_point() { local i, j, i1 localobj m, m0
+ if (viewsec.count) {m0 = swc.sections.object(0).xyz}
+ for i=0, viewsec.count-1 {
+ viewsec.object(i).pl_point(g)
+ }
+}
+
+proc pl_centroid() {local i
+ for i=0, swc.sections.count-1 {
+ swc.sections.object(i).pl_centroid(g)
+ }
+}
+proc pl_diam() {local i localobj sec
+ for i=0, viewsec.count-1 {
+ viewsec.object(i).pl_diam(g)
+ }
+}
+proc pl() { localobj tobj
+ g.erase_all
+ if (show_diam_) {pl_diam()}
+ pl_centroid()
+ if (show_point_) {pl_point()}
+ if (selpoint_ >= 0) {
+ tobj = m2.mulv(rawsel)
+ g.mark(tobj.x[0], tobj.x[1], "O", 12, 2, 1)
+ swc.label(selpoint_, tstr)
+ g.label(.1, .05, tstr, 2, 1, 0, 0, 1)
+ }
+}
+
+proc redraw() { local i localobj sec
+ if (selpoint_ >= 0) {
+ i = swc.pt2sec(selpoint_, sec)
+ sec.raw.getcol(i, rawsel)
+ }
+ showtype(viewtype_)
+ rot(0,0)
+ pl()
+}
+
+proc showtype() {
+ viewtype_ = $1
+ viewsec.remove_all
+ if ($1 == -10000) {
+ typelabel_ = "View all types"
+ for i=0, swc.sections.count - 1 {
+ viewsec.append(swc.sections.object(i))
+ swc.sections.object(i).centroid_color = 2
+ }
+ }else{
+ sprint(typelabel_, "View type %d", viewtype_)
+ for i=0, swc.sections.count - 1 {
+ if (swc.sections.object(i).type == viewtype_) {
+ viewsec.append(swc.sections.object(i))
+ swc.sections.object(i).centroid_color = 2
+ }else{
+ swc.sections.object(i).centroid_color = 9
+ }
+ }
+ }
+}
+
+proc selpoint_dependent_show() {
+ if (viewtype_ == -20000) {
+ showdistal()
+ }else if (viewtype_ == -30000) {
+ showprox()
+ }else if (viewtype_ == -40000) {
+ showsec()
+ }else if (viewtype_ == -50000) {
+ showroot()
+ }
+}
+
+proc showdistal() {local i localobj sec
+ viewtype_ = -20000
+ typelabel_ = "Show distal (tree) from selected point"
+ viewsec.remove_all
+ for i=0, swc.sections.count - 1 {
+ swc.sections.object(i).centroid_color = 9
+ }
+ if (selpoint_ < 0) { return }
+ swc.pt2sec(selpoint_, sec)
+ // recursion is trivial but I want to avoid the depth so use the
+ // fact that children are after the parent in the sections list
+ sec.centroid_color = 2
+ viewsec.append(sec)
+ for i=0, swc.sections.count - 1 {
+ if (swc.sections.object(i).centroid_color == 2) {
+ break
+ }
+ }
+ for i=i+1, swc.sections.count - 1 {
+ sec = swc.sections.object(i)
+ if (sec.parentsec != nil) if (sec.parentsec.centroid_color == 2) {
+ sec.centroid_color = 2
+ viewsec.append(sec)
+ }
+ }
+}
+
+proc showprox() {localobj sec
+ viewtype_ = -30000
+ typelabel_ = "Show proximal (path to root) from selected point"
+ viewsec.remove_all
+ for i=0, swc.sections.count - 1 {
+ swc.sections.object(i).centroid_color = 9
+ }
+ if (selpoint_ < 0) { return }
+ for (swc.pt2sec(selpoint_, sec); sec != nil; sec = sec.parentsec) {
+ viewsec.append(sec)
+ sec.centroid_color = 2
+ }
+}
+
+proc showsec() {localobj sec
+ viewtype_ = -40000
+ typelabel_ = "Show section containing selected point"
+ viewsec.remove_all
+ for i=0, swc.sections.count - 1 {
+ swc.sections.object(i).centroid_color = 9
+ }
+ if (selpoint_ < 0) { return }
+ swc.pt2sec(selpoint_, sec)
+ if (sec != nil) {
+ viewsec.append(sec)
+ sec.centroid_color = 2
+ }
+}
+
+proc showroot() {localobj sec
+ viewtype_ = -50000
+ typelabel_ = "Show root sections"
+ viewsec.remove_all
+ for i=0, swc.sections.count - 1 {
+ sec = swc.sections.object(i)
+ sec.centroid_color = 9
+ if (sec.parentsec == nil) {
+ sec.centroid_color = 2
+ viewsec.append(sec)
+ }
+ }
+}
+
+proc selpoint1() { // deselection not supported by menu_tool
+ if ($1 == 0) {
+ selpoint_ = -1
+ }
+}
+proc selpoint() {local i, j
+ if ($1 == 2) {
+ nearest_point($2, $3, &i, &j)
+ selpoint_ = swc.sec2pt(i, j)
+ selpoint_dependent_show()
+ swc.sections.object(i).raw.getcol(j, rawsel)
+ selid_ = swc.pt2id(selpoint_)
+ pl()
+ }
+}
+
+proc selid() {local i, j localobj sec
+ selpoint_ = swc.id2pt(selid_)
+ selid_ = swc.pt2id(selpoint_)
+ if (selpoint_ >= 0) {
+ i = swc.pt2sec(selpoint_, sec)
+ sec.raw.getcol(i, rawsel)
+ }
+ selpoint_dependent_show()
+ pl()
+ if ($1 == 1) {
+ swc.label(selpoint_, tstr)
+ print tstr
+ }
+}
+
+proc zoom() {local x1,y1,scale,w,h,x0,y0
+ if ($1 == 2) {
+ i = g.view_info()
+ x = $2
+ y = $3
+ xrel=g.view_info(i, 11, $2)
+ yrel=g.view_info(i, 12, $3)
+ width=g.view_info(i,1)
+ height=g.view_info(i,2)
+ }
+ if ($1 == 1) {
+ x1 = g.view_info(i, 11, $2)
+ y1 = g.view_info(i, 12, $3)
+ y1 = (y1 - yrel) + (x1 - xrel)
+ if(y1 > 2) { y1 = 2 } else if (y1 < -2) { y1 = -2 }
+ scale = 10^(y1)
+ w = width/scale
+ h = height/scale
+ x0 = x - w*xrel
+ y0 = y - h*yrel
+ g.view_size(i, x0, x0+w, y0, y0+h)
+ }
+}
+
+proc translate() {local x0,y0
+ if ($1 == 2) {
+ i = g.view_info()
+ x = g.view_info(i, 5)
+ y = g.view_info(i, 7)
+ xrel=g.view_info(i, 11, $2)
+ yrel=g.view_info(i, 12, $3)
+ width=g.view_info(i,1)
+ height=g.view_info(i,2)
+ }
+ if ($1 == 1) {
+ x1 = g.view_info(i, 11, $2)
+ y1 = g.view_info(i, 12, $3)
+ x0 = x - width*(x1 - xrel)
+ y0 = y - height*(y1 - yrel)
+ g.view_size(i, x0, x0 + width, y0, y0 + height)
+ }
+}
+
+func nearest_point() { local i, j, xmin localobj m, v1
+ // return section index and sectionpoint index in $3 and $4
+ xmin = 1e9
+ for i=0, swc.sections.count-1 {
+ m = swc.sections.object(i).xyz
+ v1 = m.getrow(0).sub($1).pow(2).add(m.getrow(1).sub($2).pow(2))
+ j = v1.min_ind
+ if (v1.x[j] < xmin) {
+ xmin = v1.x[j]
+ $&3 = i
+ $&4 = j
+ }
+ }
+ return xmin
+}
+
+proc rotate() {local x, y, x0, y0, len, a
+ if ($1 == 2) {
+ rotated_ = 1
+ nearest_point($2, $3, &i, &j)
+ swc.sections.object(i).xyz.getcol(j, origin)
+ swc.sections.object(i).raw.getcol(j, raworigin)
+//print i, j origin.printf
+ i = g.view_info()
+ xpix = g.view_info(i,13, $2)
+ ypix = g.view_info(i, 14, $3) // from top
+ left = g.view_info(i, 5)
+ bottom = g.view_info(i, 7)
+ width=g.view_info(i,1)
+ height=g.view_info(i,2)
+ }else{
+ x = g.view_info(i,13, $2) - xpix
+ y = ypix - g.view_info(i, 14, $3)
+ // rotation axis is normal to the line, rotation magnitude
+ // proportional to length of line
+ len = sqrt(x*x + y*y)
+ // rotation axis angle
+ if (len > 0) {
+ a = atan2(x, y)
+ b = len/50
+ }else{
+ a = 0
+ b = 0
+ }
+ rot(a, b)
+ pl()
+ tobj = rotmat.mulv(origin)
+ //tobj.x[0] should be at same place as origin.x[0]
+ x0 = left - origin.x[0] + tobj.x[0]
+ y0 = bottom - origin.x[1] + tobj.x[1]
+ g.view_size(i, x0, x0 + width, y0, y0 + height)
+
+ }
+ if ($1 == 3) {
+ m2.c(rotmatold)
+//rotmatold.printf
+ }
+}
+
+proc rotraw() {local x0, y0
+ width = g.view_info(0, 1)
+ height = g.view_info(0, 2)
+ left = g.view_info(0,5)
+ bottom = g.view_info(0,7)
+ if (rotated_ == 0) { //turn off
+ rotmatold.c(rotsav)
+ tobj = rotmatold.mulv(raworigin)
+ //tobj.x[0] should be at same place as origin.x[0]
+ x0 = left + raworigin.x[0] - tobj.x[0]
+ y0 = bottom + raworigin.x[1] - tobj.x[1]
+ rotmatold.ident
+ }else{ // back to previous rotation
+ rotsav.c(rotmatold)
+ tobj = rotmatold.mulv(raworigin)
+ //tobj.x[0] should be at same place as origin.x[0]
+ x0 = left - raworigin.x[0] + tobj.x[0]
+ y0 = bottom - raworigin.x[1] + tobj.x[1]
+ }
+ rot(0,0)
+ pl()
+ g.view_size(0, x0, x0 + width, y0, y0 + height)
+}
+
+proc rot45() {
+ rot(PI/2, PI/4)
+ rotated_=1
+ m2.c(rotmatold)
+ pl()
+ dummy_ = 0
+}
+
+proc rot() {local s, c, i localobj sec
+ s = sin($1) c = cos($1)
+ m2.zero
+ m2.x[2][2] = 1
+ m2.x[1][1] = m2.x[0][0] = c
+ m2.x[1][0] = -s
+ m2.x[0][1] = s
+//m2.printf
+ s = sin($2) c = cos($2)
+ rotmat.zero
+ rotmat.x[0][0] = 1
+ rotmat.x[1][1] = rotmat.x[2][2] = c
+ rotmat.x[1][2] = s
+ rotmat.x[2][1] = -s
+//rotmat.printf
+
+ m2.mulm(rotmat).mulm(m2.transpose(m2), rotmat)
+ rotmat.mulm(rotmatold, m2)
+//rotmat.printf
+ for i=0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ sec.rotate(m2)
+ }
+}
+
+proc cbexport() {local i, j, k localobj sec, cell
+ chk_valid()
+ j = 0
+ for i=0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (sec.is_subsidiary) { continue }
+ if (sec.parentsec == nil) {
+ sec.volatile2 = j
+ j += 1
+ }else{
+ sec.volatile2 = sec.parentsec.volatile2
+ }
+ }
+ cell = new List()
+ for k=0, j-1 {
+ cell.remove_all()
+ for i=0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (sec.is_subsidiary) { continue }
+ if (sec.volatile2 == k) {
+ cell.append(sec)
+ }
+ }
+ cbexport1(cell)
+ }
+}
+
+proc sphere_rep() { local i localobj x, y, z, d
+ x = new Vector(3) y = x.c z = x.c d = x.c
+ x.fill($o1.x[0])
+ y.fill($o2.x[0])
+ z.fill($o3.x[0])
+ d.fill($o4.x[0])
+ x.x[0] -= $o4.x[0]/2
+ x.x[2] += $o4.x[0]/2
+ $o1 = x $o2 = y $o3 = z $o4 = d
+}
+
+proc cbexport1() {local i, j, k, min localobj cb, sec, psec, cbsec, slist, m, subsetindex, xx, yy, zz, dd
+ for i=0, $o1.count-1 {
+ sec = $o1.object(i)
+ sec.volatile = i
+ }
+ min = set_nameindex($o1)
+ cb = new CellBuild()
+ cb.topol.names_off = 1
+ cb.topol.circles_off = 1
+ slist = cb.topol.slist
+ slist.remove_all()
+ for i=0, $o1.count-1 {
+ sec = $o1.object(i)
+ psec = nil
+ if (sec.parentsec != nil) {
+ psec = slist.object(sec.parentsec.volatile)
+ }
+ type2name(sec.type, tstr)
+ cbsec = new CellBuildSection(tstr, sec.nameindex, 0, psec, sec.parentx)
+ slist.append(cbsec)
+ m = sec.raw
+ j = sec.first
+ xx = m.getrow(0).c(j)
+ yy = m.getrow(1).c(j)
+ zz = m.getrow(2).c(j)
+ dd = sec.d.c(j)
+ if (sec.iscontour_) {
+ contour2centroid(xx, yy, zz, dd, sec)
+ }
+ if (sec.parentsec == nil && dd.size == 1) {
+ // represent spherical soma as 3 point cylinder
+ // with L=diam
+ sphere_rep(xx, yy, zz, dd)
+ }
+ k = dd.size-1
+ cbsec.position(xx.x[0], yy.x[0], xx.x[k], yy.x[k])
+ cbsec.i3d = k+1
+ cbsec.p3d = new P3D(k + 1)
+ cbsec.p3d.x = xx
+ cbsec.p3d.y = yy
+ cbsec.p3d.z = zz
+ cbsec.p3d.d = dd
+ if (sec.first == 1) {
+ cbsec.logstyle(m.x[0][0], m.x[1][0], m.x[2][0])
+ }
+ cb.all.add(cbsec)
+ }
+ cb.topol.consist()
+ cb.topol.update()
+ cb.subsets.update()
+ subsetindex = types.c.fill(0)
+ k = 0
+ for i=0, types.size-1 {
+ if (types.x[i] > 0) {
+ k += 1 // after all
+ subsetindex.x[i] = k
+ j = i + min
+ if (j == 1) {
+ tstr = "somatic"
+ }else if (j == 2) {
+ tstr = "axonal"
+ }else if (j == 3) {
+ tstr = "basal"
+ }else if (j == 4) {
+ tstr = "apical"
+ }else if (j < 0) {
+ sprint(tstr, "minus_%dset", -j)
+ }else{
+ sprint(tstr, "dendritic_%d", j)
+ }
+ m = new SNList(tstr)
+ cb.subsets.snlist.append(m)
+ }
+ }
+ for i=0, slist.count-1 {
+ sec = $o1.object(i)
+ cbsec = slist.object(i)
+ cb.subsets.snlist.object(subsetindex.x[sec.type-min]).add(cbsec)
+ }
+ //cb.page(2) //unfortunately not able to blacken the radiobutton
+}
+
+func set_nameindex() {local i, min localobj sec
+ min = swc.type.min
+ types = new Vector(swc.type.max - min + 1)
+ for i = 0, $o1.count-1 {
+ sec = $o1.object(i)
+ if (sec.is_subsidiary) { continue }
+ sec.nameindex = types.x[sec.type - min]
+ types.x[sec.type-min] += 1
+ }
+ return min
+}
+
+proc instantiate() {local i, j, min, haspy localobj sec, xx, yy, zz, dd, pyobj
+ chk_valid()
+ haspy = nrnpython("import neuron")
+ if (haspy) {
+ pyobj = new PythonObject()
+ }
+ min = set_nameindex(swc.sections)
+ // create
+ for i = 0, types.size-1 {
+ type2name(i+min, tstr)
+ if (types.x[i] == 1) {
+ sprint(tstr1, "~create %s[1]\n", tstr)
+ execute(tstr1, $o1)
+ }else if (types.x[i] > 1) {
+ sprint(tstr1, "~create %s[%d]\n", tstr, types.x[i])
+ execute(tstr1, $o1)
+ }
+ if ($o1 != nil) { mksubset($o1, i+min, tstr) }
+ }
+ if ($o1 != nil) {execute("forall all.append", $o1) }
+ // connect
+ for i = 0, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (sec.is_subsidiary) { continue }
+ name(sec, tstr)
+ if (i == 0) {
+ sprint(tstr1, "access %s", tstr)
+ if ($o1 == nil) {
+ execute(tstr1, $o1)
+ }
+ }
+ if (sec.parentsec != nil) {
+ name(sec.parentsec, tstr1)
+ sprint(tstr1, "%s connect %s(0), %g", tstr1, tstr, sec.parentx)
+ execute(tstr1, $o1)
+ }
+ // 3-d point info
+ if (sec.first == 1) {
+ sprint(tstr1, "%s { pt3dstyle(1, %g, %g, %g) }", tstr, sec.raw.x[0][0], sec.raw.x[1][0], sec.raw.x[2][0])
+ execute(tstr1, $o1)
+ }
+ j = sec.first
+ xx = sec.raw.getrow(0).c(j)
+ yy = sec.raw.getrow(1).c(j)
+ zz = sec.raw.getrow(2).c(j)
+ dd = sec.d.c(j)
+ if (sec.iscontour_) {
+ if (haspy) {
+ pyobj.neuron._declare_contour(sec, tstr)
+ }
+ contour2centroid(xx, yy, zz, dd, sec)
+ }
+ if (dd.size == 1) { sphere_rep(xx, yy, zz, dd) }
+ for j = 0, dd.size-1 {
+ sprint(tstr1, "%s { pt3dadd(%g, %g, %g, %g) }",\
+ tstr,xx.x[j], yy.x[j], zz.x[j], dd.x[j])
+ execute(tstr1, $o1)
+ }
+ }
+}
+
+proc chk_valid() {local i, x, replot localobj sec
+ replot = 0
+ // some validity checks added in response to experienced file errors
+ // sometimes we can work around them
+
+ // two point sections with 0 length, remove, unless root
+ for (i=swc.sections.count-1; i >= 0; i -= 1) {
+ sec = swc.sections.object(i)
+ if (sec.parentsec == nil) { continue }
+ if ((sec.raw.ncol - sec.first) <= 1) {
+ if (!quiet) {// addded by Sergey to suppress the warning output
+ printf("One point section %s ending at line %d has been removed\n", sec, swc.iline.x[swc.id2line(sec.id)])
+ }
+ rm0len(i, sec)
+ replot = 1
+ }else if ((sec.raw.ncol - sec.first) <= 2) {
+ if (sec.raw.getcol(sec.first).eq(sec.raw.getcol(sec.first + 1))) {
+ printf("Two point section ending at line %d with 0 length has been removed\n", swc.iline.x[swc.id2line(sec.id)])
+ rm0len(i, sec)
+ replot = 1
+ }
+ }
+ }
+ if (replot && g != nil) {
+ redraw()
+ }
+}
+
+proc rm0len() {local i localobj sec
+ swc.sections.remove($1)
+ for i=$1, swc.sections.count-1 {
+ sec = swc.sections.object(i)
+ if (sec.parentsec == $o2) {
+ sec.parentsec = $o2.parentsec
+ sec.parentx = $o2.parentx
+ if (!quiet) {// addded by Sergey to suppress the warning output
+ printf("\tand child %s reattached\n", sec)
+ }
+ }
+ }
+}
+
+proc mksubset() {
+ if ($2 == 1) {
+ tstr1 = "somatic"
+ }else if ($2 == 2) {
+ tstr1 = "axonal"
+ }else if ($2 == 3) {
+ tstr1 = "basal"
+ }else if ($2 == 4) {
+ tstr1 = "apical"
+ }else if ($2 < 0) {
+ sprint(tstr1, "minus_%dset", -$2)
+ }else{
+ sprint(tstr1, "dendritic_%d", $2)
+ }
+ sprint(tstr1, "forsec \"%s\" %s.append", $s3, tstr1)
+ execute(tstr1, $o1)
+}
+
+proc contour2centroid() {local i, j, imax, imin, ok localobj mean, pts, d, max, min, tobj, rad, rad2, side2, pt, major, m, minor
+ if (object_id($o5.contour_list)) {
+ contourstack2centroid($o1, $o2, $o3, $o4, $o5)
+ return
+ }
+ mean = swc.sections.object(0).contourcenter($o1, $o2, $o3)
+ if (g != nil) {
+ g.beginline(6,1)
+ for i=0, $o1.size-1 {
+ g.line($o1.x[i], $o2.x[i])
+ }
+ g.flush()
+ }
+ pts = new Matrix(3, $o1.size)
+ for i=1,3 { pts.setrow(i-1, $oi.c.sub(mean.x[i-1])) }
+ // find the major axis of the ellipsoid that best fits the shape
+ // assuming (falsely in general) that the center is the mean
+
+ m = new Matrix(3,3)
+ for i=0, 2 {
+ for j=i, 2 {
+ m.x[i][j] = pts.getrow(i).mul(pts.getrow(j)).sum
+ m.x[j][i] = m.x[i][j]
+ }
+ }
+ tobj = m.symmeig(m)
+ // major axis is the one with largest eigenvalue
+ major = m.getcol(tobj.max_ind)
+ // minor is normal and in xy plane
+ minor = m.getcol(3-tobj.min_ind-tobj.max_ind)
+ minor.x[2] = 0
+ minor.div(minor.mag)
+if (g != nil) {
+g.beginline(4, 3) g.line(mean.x[0], mean.x[1])
+g.line(mean.x[0] + 20*major.x[0], mean.x[1] + 20*major.x[1]) g.flush
+}
+ d = new Vector(pts.ncol)
+ rad = new Vector(pts.ncol)
+ for i=0, pts.ncol-1 {
+ pt = pts.getcol(i)
+ d.x[i] = pt.dot(major) // position on the line
+ tobj = major.c.mul(d.x[i])
+ rad.x[i] = pt.dot(minor)
+ }
+ imax = d.max_ind
+ d.rotate(-imax)
+ rad.rotate(-imax)
+ imin = d.min_ind
+ side2 = d.c(imin)
+ rad2 = rad.c(imin)
+ d.resize(imin).reverse
+ rad.resize(imin).reverse
+ // now we have the two sides without the min and max points (rad=0)
+ // we hope both sides now monotonically increase, i.e. convex
+ // make it convex
+ for (j = d.size-1; j > 0; j -= 1) {
+ if (d.x[j] <= d.x[j-1]) {
+//printf("removed d %d %g\n", j, d.x[j])
+ d.remove(j)
+ rad.remove(j)
+ if (j != d.size()) { j += 1 }
+ }
+ }
+ for (j = side2.size-1; j > 0; j -= 1) {
+ if (side2.x[j] <= side2.x[j-1]) {
+//printf("removed side2 %d %g\n", j, side2.x[j])
+ side2.remove(j)
+ rad2.remove(j)
+ if (j != side2.size()) { j += 1 }
+ }
+ }
+ // can interpolate so diams on either side of major have same d
+ tobj = d.c.append(side2)
+ tobj.sort
+ i = tobj.x[1] j = tobj.x[tobj.size-2]
+ tobj.indgen(i, j, (j-i)/20)
+ rad.interpolate(tobj, d)
+ rad2.interpolate(tobj,side2)
+ d = tobj
+ pts.resize(3, d.size)
+ $o4.resize(d.size)
+ for i = 0, d.size-1 {
+ pt = major.c.mul(d.x[i]).add(mean)
+ $o4.x[i] = abs(rad.x[i] - rad2.x[i])
+ tobj = pt.c.add(minor.c.mul(rad.x[i]))
+if (g != nil) g.beginline(5,3) g.line(tobj.x[0], tobj.x[1])
+ tobj = pt.c.add(minor.c.mul(rad2.x[i]))
+if (g != nil) g.line(tobj.x[0], tobj.x[1]) g.flush
+// pt.add(minor.c.mul(rad2.x[i])).add(minor.c.mul(rad.x[i]))
+ pts.setcol(i, pt)
+ }
+ // avoid 0 diameter ends
+ $o4.x[0] = ($o4.x[0]+$o4.x[1])/2
+ i = $o4.size-1
+ $o4.x[i] = ($o4.x[i]+$o4.x[i-1])/2
+ for i=1,3 { $oi = pts.getrow(i-1) }
+// print d d.printf print rad rad.printf
+// print side2 side2.printf print rad2 rad2.printf
+}
+
+proc contourstack2centroid() {local i, j, area, d localobj c
+ area = $o5.stk_triang_area()
+ printf("stk_triang_area = %g\n", area)
+ for i=1,4 { $oi.resize(0) }
+ c = $o5.approximate_contour_by_circle(&d)
+ $o4.append(d) for i=1,3 { $oi.append(c.x[i-1]) }
+ for j=0, $o5.contour_list.count-1 {
+ c = $o5.contour_list.object(j).approximate_contour_by_circle(&d)
+ $o4.append(d) for i=1,3 { $oi.append(c.x[i-1]) }
+ }
+}
+
+proc name() {
+ type2name($o1.type, $s2)
+ if ($o1.nameindex > 0) {
+ sprint($s2, "%s[%d]", $s2, $o1.nameindex)
+ }
+}
+
+proc type2name() {
+ if ($1 == 1) {
+ $s2 = "soma"
+ }else if ($1 == 2) {
+ $s2 = "axon"
+ }else if ($1 == 3) {
+ $s2 = "dend"
+ }else if ($1 == 4) {
+ $s2 = "apic"
+ }else if ($1 < 0) {
+ sprint($s2, "minus_%d", -$1)
+ }else{
+ sprint($s2, "dend_%d", $1)
+ }
+}
+endtemplate Import3d_GUI
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/import3d_sec.hoc b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/import3d_sec.hoc
new file mode 100644
index 0000000..01b0b2d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/import3d_sec.hoc
@@ -0,0 +1,392 @@
+begintemplate Import3d_Section
+// primarily for display. Allows gui without instantiating sections
+// fid refers to the raw index of the point that id refers to.
+// For a root section fid is normally 0. For sections that have
+// parents, fid is normally 1 since the first point is often a copy of
+// the last point of the parent.
+// The variable first=0 means that when diam is shown, there
+// should be a glyph drawn defined by raw indices 0 and 1.
+// if this is a contour it may also contain a list of contours that
+// define a 3-d object
+public raw, xyz, d, id, append, g, mkglyph, rotate, type, centroid_color
+public iscontour_, pid, parentsec, parentx, volatile, nameindex, first, fid
+public contour_list, pl_centroid, pl_diam
+public stk_triang_vec, stk_triang_area, is_subsidiary
+public volatile2, contourcenter, ztrans, approximate_contour_by_circle
+public pl_point, insrt, set_pt, stk_center, accurate_triangle_area
+objref raw, xyz, d, g, parentsec, contour_list, this, stk_triang_vec
+proc init() {
+ is_subsidiary = 0
+ ztrans = 0
+ first = 0
+ fid = 0
+ nameindex=0
+ parentx = 1
+ volatile = 0
+ volatile2 = 0
+ pid = -1
+ iscontour_ = 0
+ type = 0
+ centroid_color = 2
+ id = $1
+ raw = new Matrix(3, $2)
+ xyz = new Matrix(3, $2)
+ d = new Vector($2)
+}
+proc set_pt() {
+ raw.x[0][$1] = $2
+ raw.x[1][$1] = $3
+ raw.x[2][$1] = $4
+ d.x[$1] = $5
+}
+
+proc append() {local i, j
+ for i=0, $3-1 {
+ j = $1 + i
+ k = $2 + i
+ set_pt(j, $o4.x[k], $o5.x[k], $o6.x[k], $o7.x[k])
+ }
+}
+
+proc insrt() {local i, nr, nc
+ nr = raw.nrow nc = raw.ncol
+ d.resize(nc+1)
+ raw.resize(nr, nc+1)
+ xyz.resize(nr, nc+1)
+ for (i=nc-1; i >= $1; i -= 1) {
+ raw.setcol(i+1, raw.getcol(i))
+ d.x[i+1] = d.x[i]
+ }
+ set_pt($1, $2, $3, $4, $5)
+}
+
+proc pl_centroid() {local i, n
+ xyz.getrow(1).line($o1, xyz.getrow(0), centroid_color, 1)
+ if (iscontour_) {
+ n = xyz.ncol - 1
+ $o1.beginline(centroid_color, 1)
+ $o1.line(xyz.x[0][0], xyz.x[1][0])
+ $o1.line(xyz.x[0][n], xyz.x[1][n])
+ }
+ if (0) {
+ if (object_id(contour_list)) {
+ for i=0, contour_list.count-1 {
+ contour_list.object(i).pl_centroid($o1)
+ }
+ }
+ }
+}
+
+proc pl_diam() {local i
+ if (!iscontour_) {
+ mkglyph()
+ $o1.glyph(g, 0, 0)
+ }else{
+ if (object_id(contour_list)) {
+ if (!object_id(contour_list.object(0).stk_triang_vec)) {
+ mk_stk_triang_vec(this, contour_list.object(0))
+ for i=1, contour_list.count-1 {
+ mk_stk_triang_vec(contour_list.object(i-1), contour_list.object(i))
+ }
+ }
+ pl_stk_triang($o1, this, contour_list.object(0))
+ for i=1, contour_list.count-1 {
+ pl_stk_triang($o1, contour_list.object(i-1), contour_list.object(i))
+ }
+ }
+ }
+}
+
+proc pl_point() {local i
+ for i=first, xyz.ncol-1 {
+ $o1.mark(xyz.x[0][i], xyz.x[1][i], "s", 5, 3, 1)
+ }
+ if (object_id(parentsec) == 0) {
+ $o1.mark(xyz.x[0][0], xyz.x[1][0], "S", 8, 3, 1)
+ }
+ if (0) {
+ if (object_id(contour_list)) {
+ for i=0, contour_list.count-1 {
+ contour_list.object(i).pl_point($o1)
+ }
+ }
+ }
+}
+
+proc mkglyph() {local i, d1, d2 localobj x, y, norm, x1, y1, i1
+ g = new Glyph()
+ if (xyz.ncol - first < 1) { return }
+ // normal
+ x1 = xyz.getrow(0)
+ y1 = xyz.getrow(1)
+ if (xyz.ncol - first == 1) {
+ // render as spherical
+ g.circle(x1.x[0], y1.x[0], d.x[0]/2)
+ g.fill(1)
+ return
+ }
+ // may or may not want to include parent point in glyph
+ x = x1.c(first).deriv(1,1)
+ y = y1.c(first).deriv(1,1)
+ // point separations
+ norm = x.c.mul(x).add(y.c.mul(y)).sqrt.mul(2) // d is diam, need radius
+ // only want frustra for the non-zero separations
+ i1=norm.c.indvwhere("!=", 0)
+ if (i1.size == 0) {
+// printf("Section with id=%d has 0 length in this projection\n", id)
+ return
+ }
+ norm.index(norm, i1)
+ x.index(x, i1).div(norm)
+ y.index(y, i1).div(norm)
+
+ // but take care of the possible index offset due to missing parent point
+ if (first) { i1.add(first) }
+ i1.append(x1.size-1)
+ x1.index(x1, i1)
+ y1.index(y1, i1)
+
+ for i = 0, x.size-1 {
+ d1 = d.x[i1.x[i]] d2=d.x[i1.x[i]+1]
+ g.path()
+ g.m(x1.x[i]+y.x[i]*d1, y1.x[i]-x.x[i]*d1)
+ g.l(x1.x[i+1]+y.x[i]*d2, y1.x[i+1]-x.x[i]*d2)
+ g.l(x1.x[i+1]-y.x[i]*d2, y1.x[i+1]+x.x[i]*d2)
+ g.l(x1.x[i]-y.x[i]*d1, y1.x[i]+x.x[i]*d1)
+ g.close()
+ g.fill(1)
+ }
+}
+
+proc rotate() {
+ $o1.mulm(raw, xyz)
+ if (1) {
+ if (object_id(contour_list)) {
+ for i=0, contour_list.count-1 {
+ contour_list.object(i).rotate($o1)
+ }
+ }
+ }
+}
+
+
+// a utility function
+obfunc contourcenter() {local i localobj mean, pts, perim, d
+ // convert contour defined by $o1, $o2, $o3 vectors to
+ // 100 uniform points around perimeter
+ // and return the center coordinates as well as the uniform contour
+ // vectors (in $o1, $o2, $o3)
+ pts = new Matrix(3, $o1.size)
+ for i=1,2 { pts.setrow(i-1, $oi) }
+ for i=0,2 {pts.setrow(i, pts.getrow(i).append(pts.x[i][0]).deriv(1,1)) }
+ perim = new Vector(pts.ncol)
+ for i=1, pts.ncol-1 { perim.x[i] = perim.x[i-1] + pts.getcol(i-1).mag }
+ d = new Vector(101)
+ d.indgen(perim.x(perim.size-1)/100)
+ for i=1,3 $oi.interpolate(d, perim)
+ mean = new Vector(3)
+ for i=1, 3 { mean.x[i-1] = $oi.mean }
+ return mean
+}
+
+// return center (Vector.size=3) and average diameter in $&1
+obfunc approximate_contour_by_circle() {local i,n, perim localobj center, x, y, z
+ x=raw.getrow(0)
+ y=raw.getrow(1)
+ z=raw.getrow(2)
+ perim = 0
+ n = x.size
+ for i = 0, n-1 {
+ perim += edgelen(raw.getcol(i), raw.getcol((i+1)%n))
+ }
+ center = contourcenter(x, y, z)
+ if (0) {
+ $&1 = perim/PI
+ }else{
+ x.sub(center.x[0]).mul(x)
+ y.sub(center.x[1]).mul(y)
+ z.sub(center.x[2]).mul(z)
+// $&1 = 2*x.add(y).add(z).sqrt.mean
+ // average of radius based on perim and mean radius of all points
+ $&1 = x.add(y).add(z).sqrt.mean + perim/(2*PI)
+ }
+// printf("%g %g %g %g\n", center.x[0], center.x[1], center.x[2], $&1)
+// printf("perimeter approx = %g actual = %g\n", PI*$&1, perim)
+ return center
+}
+
+proc mk_stk_triang_vec() {local i, j, n1, n2, d1, d2 localobj i1, i2, trv
+ trv = new Vector()
+ $o2.stk_triang_vec = trv
+ // contour indices are chosen so points 0 cross 1 of a contour from center
+ // are in +z direction and points 0 between the two contours are
+ // guaranteed to be an edge. An extra index added to end to close the polygon
+ // I suppose this could fail if angle does not increase monotonically
+ stk_contour_indices($o1, i1, $o1.raw.getcol(0))
+ stk_contour_indices($o2, i2, $o1.raw.getcol(0))
+ i = 0 j = 0
+ n1 = i1.size-1
+ n2 = i2.size-1
+ while(i < n1 || j < n2) {
+ trv.append(i1.x[i], i2.x[j])
+ if (i < n1 && j < n2) {
+ // which next one is shorter
+ d1 = ($o1.raw.x[0][i1.x[i]] - $o2.raw.x[0][i2.x[j+1]])^2 + ($o1.raw.x[1][i1.x[i]] - $o2.raw.x[1][i2.x[j+1]])^2
+ d2 = ($o1.raw.x[0][i1.x[i+1]] - $o2.raw.x[0][i2.x[j]])^2 + ($o1.raw.x[1][i1.x[i+1]] - $o2.raw.x[1][i2.x[j]])^2
+ if (d2 < d1) {
+ i += 1
+ }else{
+ j += 1
+ }
+ }else{
+ if (i < n1) {
+ i += 1
+ }else{
+ j += 1
+ }
+ }
+ }
+ trv.append(i1.x[i], i2.x[j])
+}
+
+proc stk_contour_indices() {local i, d, dmin, imin localobj c, x, y, z
+ $o2 = new Vector($o1.raw.ncol)
+ $o2.indgen()
+ // order the points counterclockwise. ie 0 cross 1 in -z direction
+ x = $o1.raw.getrow(0)
+ y = $o1.raw.getrow(1)
+ z = $o1.raw.getrow(2)
+ c = contourcenter(x, y, z)
+ x = $o1.raw.getcol(0).sub(c)
+ y = $o1.raw.getcol(1).sub(c)
+ if (x.x[0]*y.x[1] - x.x[1]*y.x[0] > 0) {
+ $o2.reverse()
+ }
+
+ // which point is closest to $o3
+ imin = -1
+ dmin = 1e9
+ for i=0, $o2.size - 1 {
+ d = edgelen($o1.raw.getcol($o2.x[i]), $o3)
+ if (d < dmin) {
+ dmin = d
+ imin = i
+ }
+ }
+ $o2.rotate(-imin)
+
+ $o2.append($o2.x[0])
+}
+
+proc pl_stk_triang() {local i, j localobj g, m1, m2, trv
+ g = $o1
+ m1 = $o2.xyz
+ m2 = $o3.xyz
+ trv = $o3.stk_triang_vec
+ for i=0, trv.size-1 {
+ g.beginline(centroid_color, 1)
+ j = trv.x[i]
+ g.line(m1.x[0][j], m1.x[1][j])
+ i += 1
+ j = trv.x[i]
+ g.line(m2.x[0][j], m2.x[1][j])
+ }
+}
+
+func edgelen() {
+ return sqrt($o1.c.sub($o2).sumsq)
+}
+
+func stk_triang_area1() {local area, i, i1, i2, j1, j2, a, b, c, na localobj m1, m2, trv
+ area = 0
+ m1 = $o1.raw
+ m2 = $o2.raw
+ trv = $o2.stk_triang_vec
+ i1 = trv.x[0]
+ i2 = trv.x[1]
+ a = edgelen(m1.getcol(i1), m2.getcol(i2))
+ na = 0
+ for i=2, trv.size-1 {
+ j1 = trv.x[i]
+ i += 1
+ j2 = trv.x[i]
+ b = edgelen(m1.getcol(j1), m2.getcol(j2))
+
+ // which contour for side c
+ if (i1 == j1) {
+ c = edgelen(m2.getcol(i2), m2.getcol(j2))
+ }else{
+ c = edgelen(m1.getcol(i1), m1.getcol(j1))
+ }
+
+ area += accurate_triangle_area(a, b, c)
+ na += 1
+ i1 = j1
+ i2 = j2
+ a = b
+ }
+//printf("stk_triang_area1 na=%d npoints=%d\n", na, m1.ncol+m2.ncol)
+ // missing one triangle
+ return area
+}
+
+func stk_triang_area() {local area, i
+ area = stk_triang_area1(this, contour_list.object(0))
+ for i=1, contour_list.count-1 {
+ area += stk_triang_area1(contour_list.object(i-1), contour_list.object(i))
+ }
+ return area
+}
+
+// the center of the centroid of the contour stack
+obfunc stk_center() {local i, len, th localobj c, centroid, x, y, z, r, lenvec
+ centroid = new Matrix(3, 1 + contour_list.count)
+ lenvec = new Vector(centroid.ncol) lenvec.resize(1)
+ x = raw.getrow(0)
+ y = raw.getrow(1)
+ z = raw.getrow(2)
+ c = contourcenter(x, y, z)
+ centroid.setcol(0, c)
+ len = 0
+ for i=0, contour_list.count-1 {
+ r = contour_list.object(i).raw
+ x = r.getrow(0)
+ y = r.getrow(1)
+ z = r.getrow(2)
+ c = contourcenter(x, y, z)
+ centroid.setcol(i+1, c)
+
+ len += sqrt(c.sub(centroid.getcol(i)).sumsq)
+ lenvec.append(len)
+ }
+ len = len/2
+ if (len == 0) {
+ c = centroid.getcol(0)
+ return c
+ }
+ i = lenvec.indwhere(">", len)
+ th = (len - lenvec.x[i-1])/(lenvec.x[i] - lenvec.x[i-1])
+ for j=0, 2 {
+ c.x[j] = th*centroid.x[j][i] + (1 - th)*centroid.x[j][i-1]
+ }
+ return c
+}
+
+func accurate_triangle_area() {local x localobj a
+ // from http://http.cs.berkeley.edu/~wkahan/Triangle.pdf
+ // W. Kahan
+ x = float_epsilon
+ float_epsilon = 0
+ a = new Vector(3) a.resize(0)
+ a.append($1, $2, $3).sort
+ if ((a.x[0] - (a.x[2] - a.x[1])) < 0) {
+ float_epsilon = x
+ execerror("accurate_triangle_area:","not a triangle")
+ }
+ float_epsilon = x
+ x = .25*sqrt((a.x[2]+(a.x[1]+a.x[0])) * (a.x[0]-(a.x[2]-a.x[1])) \
+ * (a.x[0]+(a.x[2]-a.x[1])) * (a.x[2]+(a.x[1]-a.x[0])))
+ return x
+}
+
+endtemplate Import3d_Section
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_morphml.hoc b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_morphml.hoc
new file mode 100644
index 0000000..c6801b4
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_morphml.hoc
@@ -0,0 +1,78 @@
+
+begintemplate Import3d_MorphML
+public input, filetype, type, sections, err, parsed
+public pt2id, id2pt, pt2sec, sec2pt, label, id2line
+objref type, sections, this, p, nil
+objref cables, points, cableid2index
+strdef filetype, tstr
+proc init() {
+ nrnpython("from neuron.neuroml.rdxml import rdxml")
+ //print "Import3d_MorphML"
+ filetype = "MorphML"
+ p = new PythonObject()
+}
+proc input() {
+ //print "Import3d_MorphML.input"
+ type = new Vector()
+ sections = new List(1000)
+ err = 0
+ p.rdxml($s1, this)
+}
+proc parsed() {local i, j, ip, jp localobj cab, sec, pt
+ cables = $o1.cables_
+ points = $o1.points_
+ cableid2index = $o1.cableid2index_
+ // ptid2pt = $o1.ptid2pt_
+ //print $o1, cables.__len__()
+ for i=0, cables.__len__() - 1 {
+ cab = cables._[i]
+ sec = new Import3d_Section(cab.first_, cab.pcnt_)
+ sections.append(sec)
+ if (cab.parent_cable_id_ >= 0) {
+ ip = $o1.cableid2index_[cab.parent_cable_id_]
+ sec.parentsec = sections.object(ip)
+ sec.parentx = cab.px_
+ }
+ //print i, cab.id_, cab.name_
+ for j=0, cab.pcnt_ - 1 {
+ jp = cab.first_ + j
+ pt = points._[jp]
+ sec.set_pt(j, pt.x_, pt.y_, pt.z_, pt.d_)
+ }
+ }
+}
+func pt2id() {
+ //print "pt2id ", $1
+ if ($1 < 0) { return 0 }
+ if ($1 >= points.__len__()) { return points.__len__() - 1 }
+ return $1
+}
+func id2pt() {
+ //print "id2pt ", $1
+ return $1
+}
+func pt2sec() {local cid, cindex
+ //print "pt2sec ", $1, " cid=", points._[$1].cid_
+ cid = points._[$1].cid_
+ cindex = cableid2index._[cid]
+ //print " cindex=", cindex, " first=", cables._[cindex].first_
+ $o2 = sections.object(cindex)
+ //printf("pt2sec %s\n", $o2)
+ return $1 - cables._[cindex].first_
+}
+func sec2pt() {local i localobj sec
+ sec = sections.object($1)
+ //print "sec2pnt ", $1, $2, " secid=", sec.id, " cabid=", cables._[$1].id_
+ i = sec.id + $2 - sec.fid
+ return i
+}
+func id2line() {
+ //print "id2line ", $1
+ return $1
+}
+proc label() {localobj pt
+ pt = points._[$1]
+ sprint($s2, "pt[%d] Line %d x=%g y=%g z=%g d=%g", $1, pt.lineno_, pt.x_, pt.y_, pt.z_, pt.d_)
+}
+endtemplate Import3d_MorphML
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_nlcda.hoc b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_nlcda.hoc
new file mode 100644
index 0000000..9a8e450
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_nlcda.hoc
@@ -0,0 +1,550 @@
+// Assume that except for soma, the move and line items form a tree
+// where, generally, a move is at the same point of the line to which
+// it is connected. Under this assumption, all major codes except 1 and 2
+// can be ignored.
+// An exception is the [10,5] code for branch point. The next point
+// is generally a line (not a move) with the same x,y,z of the branch point.
+
+begintemplate Import3d_Neurolucida_read
+public input, pheader
+public type, x, y, z, d, iline, header, point2sec, sections, lines
+public label, id2pt, id2line, pt2id, pt2sec, sec2pt, file, filetype, err
+public points, pointtype, branchpoints, firstpoints
+public helptxt, iline2pt, mark, fillproblist
+external hoc_sf_
+objref major, minor, x, y, z, d, iline, header, lines, iline2sec
+objref type, pointtype, points, iline2pt
+objref file, vectors, sec2point, point2sec, sections
+objref firstpoints, branchpoints
+objref cursec, diam, nil, gm
+objref line_branch_err, parse_err, xyparent_err, xynotnearest_err, noparent_err
+objref line_coincide_err, line_branch_err_pt, somabbox_err
+strdef tstr, line, filetype
+double a[7]
+
+proc init() {
+ filetype = "Neurolucida"
+ vectors = new List()
+ header = new List()
+ lines = new List()
+ gm = new GUIMath()
+}
+
+proc input() {
+ err = 0
+ line_branch_err = new List()
+ parse_err = new List()
+ xyparent_err = new List()
+ xynotnearest_err = new List()
+ noparent_err = new List()
+ line_coincide_err = new List()
+ somabbox_err = new List()
+ line_branch_err_pt = new Vector()
+
+ rdfile($s1)
+ find_parents()
+ repair_diam()
+ connect2soma()
+ if (err) { errout() }
+}
+
+proc repair_diam() {local i localobj sec
+ // I am told, and it seems the case, that
+ // the first point incorrectly always has the diameter of
+ // the last point of the previous branch. For this reason
+ // we set the diameter of the first point to the diameter
+ // of the second point in the section
+ for i=0, sections.count-1 {
+ sec = sections.object(i)
+ if (sec.parentsec != nil) {
+ if (sec.first < sec.d.size-1){
+ sec.d.x[sec.first] = sec.d.x[sec.first + 1]
+ }
+ }
+ }
+}
+
+proc rdfile() {local i, j
+ file = new File($s1)
+ // count lines for vector allocation space (not really necessary)
+ if (!file.ropen()) {
+ err = 1
+ printf("could not open %s\n", $s1)
+ }
+ for (i = 0; !file.eof(); i += 1) {
+ file.gets(line)
+ }
+ file.close()
+// printf("%s has %d lines\n", $s1, i)
+ alloc(i, major, minor, x, y, z, d, iline, pointtype, points)
+ diam = d
+ file.ropen()
+ for (i = 1; !file.eof(); i += 1) {
+ file.gets(line)
+ parse(i, line)
+ }
+ file.close()
+ iline2pt = new Vector(iline.x[iline.size-1])
+ j = 0
+ for i=0, points.size-2 {
+ while(j <= iline.x[points.x[i]]) {
+ iline2pt.x[j] = i
+ j += 1
+ }
+ }
+ for j=j, iline2pt.size-1 {
+ iline2pt.x[j] = points.size-1
+ }
+}
+
+proc alloc() { local i // $oi.size = 0 but enough space for $1 elements
+ for i = 2, numarg() {
+ $oi = new Vector($1)
+ $oi.resize(0)
+ vectors.append($oi)
+ }
+}
+
+func dist() {local x1, y1, z1
+ x1 = ($1 - x.x[$4])
+ y1 = ($2 - y.x[$4])
+ z1 = ($3 - z.x[$4])
+ return sqrt(x1*x1 + y1*y1 + z1*z1)
+}
+
+func xydist() {local x1, y1
+ x1 = (x.x[$1] - x.x[$2])
+ y1 = (y.x[$1] - y.x[$2])
+ return sqrt(x1*x1 + y1*y1)
+}
+
+func xysame() {
+ if ($1 == x.x[$3]) {
+ if ($2 == y.x[$3]) {
+ return 1
+ }
+ }
+ return 0
+}
+
+proc parse() {local i, n, m
+ n = sscanf($s2, "[%d,%d] (%f,%f,%f) %f", &a[0], &a[1], &a[2],\
+ &a[3], &a[4], &a[5])
+ hoc_sf_.left($s2, hoc_sf_.len($s2)-1)
+ if (n == 6) {
+ a[5] *= 2
+ iline_ = major.size
+ if (a[0] == 1) { // line
+ m = major.x[iline_ - 1]
+ if (m == 10 && minor.x[iline_-1] == 5) {
+ pointtype.append(0)
+ points.append(iline_)
+ if (!xysame(a[2], a[3], iline_-1)) {
+ err = 1
+ line_branch_err_pt.append(points.size-1)
+sprint(tstr, "%d: %s separated by %g from branch",\
+$1, $s2, dist(a[2], a[3], a[4], iline_-1))
+line_branch_err.append(new String(tstr))
+ }
+ }else if (m == 1 || m == 2) {
+ pointtype.append(1)
+ points.append(iline_)
+ }else{
+ pointtype.append(1)
+ points.append(iline_)
+ }
+ }else if (a[0] == 2) { // move
+ pointtype.append(0)
+ points.append(iline_)
+ }else if (a[0] == 10 && a[1] == 5) { // branch
+ pointtype.append(2)
+ points.append(iline_)
+ }else{
+ }
+ for i=0, 5 {
+ vectors.object(i).append(a[i])
+ }
+ iline.append($1) // for error messages
+ lines.append(new String($s2))
+ } else if (n == 0) { // comment
+ header.append(new String($s2))
+ } else {
+ err = 1
+ sprint(tstr, "%d: %s parse failure after item %d", $1, $s2, n)
+ parse_err.append(new String(tstr))
+ }
+}
+
+proc mark() {local i, a,b,c,d,e,f
+ print $o1, $2, iline, lines
+ i = iline.indwhere("==",$2)
+ printf("%d,%d: %s\n", i, iline.x[i], lines.object(i).s)
+ n = sscanf(lines.object(i).s, "[%d,%d] (%f,%f,%f) %f", &a,&b,&c,\
+ &d,&e,&f)
+ if (n == 6) {
+ print a,b,c,d,e,f
+ $o1.mark(c,d,"S",12,4,1)
+ }
+}
+
+proc pheader() {local i
+ for i=0, header.count-1 {
+ printf("%s", header.object(i).s)
+ }
+}
+
+proc find_parents() {local i, j, m, ip, jp, jpmin, d, dmin, xi,yi,zi, bp, ip1
+ // we need to associate all pointtype=0 with a branch point (except the
+ // ones conceptually connected to the soma
+ // assume the pid is earlier than the pointtype=0
+ point2sec = points.c.fill(-1)
+ branchpoints = pointtype.c.indvwhere("==", 2)
+ firstpoints = pointtype.c.indvwhere("==", 0)
+ sections = new List()
+ type = firstpoints.c.fill(0)
+ for i=0, firstpoints.size-1 {
+ ip = points.x[firstpoints.x[i]]
+ newsec(i)
+ type.x[i] = cursec.type
+ xi = x.x[ip] yi = y.x[ip] zi = z.x[ip]
+ dmin = 1e9
+ jpmin = -1
+ m = minor.x[ip]
+ if (m == 41) { // soma start (contour
+ continue
+/* some files use these as branch beginnings so check this after seeing if
+there are coincident points.
+ }else if (m == 1) { // dendrite start
+ continue
+ }else if (m == 21) { // axon start
+ continue
+ }else if (m == 61) { // apical dendrite start
+ continue
+*/
+ }
+ if (line_branch_err_pt.size) {
+ j = line_branch_err_pt.x[0]
+ if (ip == points.x[j]) {
+ physcon(i, ip, ip-1, j-1)
+ line_branch_err_pt.remove(0)
+ continue
+ }
+ }
+ for j=0, branchpoints.size-1 {
+ jp = points.x[branchpoints.x[j]]
+ if (ip <= jp) { break }
+ d = dist(xi, yi, zi, jp)
+ if (d < dmin) {
+ bp = branchpoints.x[j]
+ dmin = d
+ jpmin = jp
+ }
+ }
+ if (dmin <= 0) {
+ cursec.parentsec = sections.object(point2sec.x[bp])
+ }else if (m == 1) { // dendrite start
+ continue
+ }else if (m == 21) { // axon start
+ continue
+ }else if (m == 61) { // apical dendrite start
+ continue
+ }else{
+ err = 1
+sprint(tstr, "%d: %s branch at line %d is %.4g away",\
+iline.x[ip], lines.object(ip).s, iline.x[jpmin], dmin)
+ d = xydist(ip, jpmin)
+ if (d <= 0) { // overlay branch point in xy plane?
+ xyparent_err.append(new String(tstr))
+ physcon(i, ip, jpmin, bp)
+ }else if (ip > 0) {
+ // sometime it coincides with a previous LineTo
+ ip1 = firstpoints.x[i]-1
+ d = dist(xi, yi, zi, points.x[ip1])
+ if (d <= 0) {
+sprint(tstr, "%s\n but coincides with line %d", tstr, iline.x[points.x[ip1]])
+ line_coincide_err.append(new String(tstr))
+ cursec.parentsec = sections.object(point2sec.x[ip1])
+ }else if (try_xy_coincide(i, ip)){
+ xynotnearest_err.append(new String(tstr))
+ }else{
+ noparent_err.append(new String(tstr))
+ }
+ }
+ }
+ }
+}
+
+func try_xy_coincide() {local j, jp, d
+ // sometimes it coincides in the xy plane with a branch point
+ // even though it is not the nearest point and therefore we
+ // assume that is the parent point
+ for j=0, branchpoints.size-1 {
+ jp = points.x[branchpoints.x[j]]
+ if ($2 <= jp) { break }
+ d = xydist($2, jp)
+ if (d <= 0) {
+sprint(tstr, "%s\n but coincides with branch point at line %d", tstr, iline.x[jp])
+ bp = branchpoints.x[j]
+ physcon($1, $2, jp, bp)
+ return 1
+ }
+ }
+ return 0
+}
+
+proc physcon() {
+ cursec.parentsec = sections.object(point2sec.x[$4])
+ cursec.insrt(0, x.x[$3], y.x[$3], z.x[$3], d.x[$2])
+ cursec.id -= 1
+}
+
+proc newsec() {local i, ip, n, m, first, isec
+ first = firstpoints.x[$1]
+ ip = points.x[first]
+ if ($1 < firstpoints.size-1) {
+ n = firstpoints.x[$1+1] - first
+ }else{
+ n = points.size - first
+ }
+ cursec = new Import3d_Section(first, n)
+ isec = sections.count
+ sections.append(cursec)
+ for i = 0, n-1 {
+ cursec.append(i, points.x[i+first], 1, x, y, z, d)
+ point2sec.x[i+first] = isec
+ }
+ m = minor.x[ip]
+ if (m == 1 || m == 2) { // dendrite
+ cursec.type = 3
+ }else if (m == 21 || m == 22) { //axon
+ cursec.type = 2
+ }else if (m == 41 || m == 42) { // soma
+ cursec.type = 1
+ cursec.iscontour_ = 1
+ }else if (m == 61 || m == 62) { // apdendrite
+ cursec.type = 4
+ }else{
+ err = 1
+printf("%s line %d: don't know section type: %s\n",\
+ file.getname, iline.x[ip], lines.object(ip).s)
+ }
+}
+
+proc connect2soma() {local i, ip, j, jp, bp, jpmin, dmin, d, xmin, xmax, ymin, ymax localobj soma, sec, xc, yc, zc, c, psec, r
+ // find centroid of soma if outline and connect all dangling
+ // dendrites to that if inside the contour
+ for i=0, sections.count-1 {
+ sec = sections.object(i)
+ if (sec.type == 1 && sec.iscontour_ == 1) {
+ soma = sec
+ sections.remove(i)
+ sections.insrt(0, soma)
+ break
+ }
+ }
+ if (soma == nil) { return }
+ xc = soma.raw.getrow(0)
+ yc = soma.raw.getrow(1)
+ zc = soma.raw.getrow(2)
+ xmin = xc.min-.5 xmax = xc.max + .5
+ ymin = yc.min-.5 ymax = yc.max + .5
+ c = soma.contourcenter(xc, yc, zc)
+ for i=0, sections.count-1 {
+ sec = sections.object(i)
+ if (sec.parentsec == nil && sec != soma) {
+ if (gm.inside(sec.raw.x[0][0], sec.raw.x[1][0], xmin, ymin, xmax, ymax)) {
+ sec.parentsec = soma
+ sec.parentx = .5
+ sec.insrt(0, c.x[0], c.x[1], c.x[2], .01)
+ sec.id -= 1
+ sec.first = 1
+ }else{
+ // is same as end point of earlier section?
+ ip = points.x[sec2pt(i, 0)]
+ d = 1e9
+ for j=0, i-1 {
+ psec = sections.object(j)
+ jp = psec.d.size-1
+ r = psec.raw
+ d = dist(r.x[0][jp], r.x[1][jp], r.x[2][jp], ip)
+ if (d == 0) {
+ sec.parentsec = psec
+ break
+ }
+ }
+ if (d == 0) { continue }
+ ip = points.x[sec2pt(i, 0)]
+ dmin = dist(c.x[0], c.x[1], c.x[2], ip)
+ jpmin = -1
+ for j=0, branchpoints.size-1 {
+ jp = points.x[branchpoints.x[j]]
+ if (ip <= jp) { break }
+ d = dist(x.x[ip], y.x[ip], z.x[ip], jp)
+ if (d < dmin) {
+ bp = branchpoints.x[j]
+ dmin = d
+ jpmin = jp
+ }
+ }
+ err = 1
+sprint(tstr, "%d: %s is outside soma, logically connect to", iline.x[ip], lines.object(ip).s)
+ if (jpmin == -1) {
+ sprint(tstr, "%s soma", tstr)
+ sec.parentsec = soma
+ sec.insrt(0, c.x[0], c.x[1], c.x[2], .01)
+ sec.id -= 1
+ }else{
+ jp = jpmin
+ sprint(tstr, "%s %d", tstr, iline.x[jp])
+ sec.parentsec = sections.object(point2sec.x[bp])
+ sec.insrt(0, x.x[jp], y.x[jp], z.x[jp], .01)
+ sec.id -= 1
+ }
+ sec.first = 1
+ somabbox_err.append(new String(tstr))
+ }
+ }
+ }
+}
+
+// note selpoint defined in swc_gui.hoc as sec.id + j
+// selpoint is the points index
+// ie. the first points of the sections are firstpoints
+proc label() {local i
+ i = points.x[$1]
+ sprint($s2, "Line %d: %s", iline.x[i], lines.object(i).s)
+}
+func id2pt() {
+ if ($1 < 0) { return -1 }
+ if ($1 >= iline2pt.size) { return iline2pt.x[iline2pt.size-1]}
+ return iline2pt.x[$1]
+}
+func id2line() { return points.x[$1] }
+func pt2id() {
+ if ($1 < 0) {return -1}
+ return iline.x[points.x[$1]]
+}
+func pt2sec() {local i
+ i = firstpoints.indwhere(">", $1)
+ if (i == -1) {
+ i = firstpoints.size
+ }
+ $o2 = sections.object(i-1)
+ j = $1 - $o2.id
+ return j
+}
+func sec2pt() {
+//print "sec2pt ", $1, $2, sections.object($1).id
+ return sections.object($1).id + $2
+}
+
+proc helptxt() {
+ xpanel("Neurolucida file filter characteristics")
+xlabel(" The only lines utilized are [1,x], [2,x], and [5,10]. i.e , LineTo,")
+xlabel("MoveTo, and Branch lines. ")
+xlabel(" Sections generally consist of MoveTo followed by sequence of LineTo,")
+xlabel("and possibly ending with Branch. Intervening lines of other major types")
+xlabel("are ignored. ")
+xlabel(" The type of the section (dendrite, axon, soma outline, or apical) is")
+xlabel("determined by the minor code of the first point in the branch. ")
+xlabel(" Coincidence of the first x,y,z point of a section with the last")
+xlabel("(branch) point of some section defines a connection between child and")
+xlabel("parent section. However most files contain errors and the following")
+xlabel("heuristics are applied to the first points of problem sections when the")
+xlabel("parent is not obvious. EACH PROBLEM POINT SHOULD BE EXAMINED to")
+xlabel("determine if the correction is suitable. ")
+xlabel(" 1) The first point after a Branch point is a MoveTo which is")
+xlabel("coincident in the xy plane but not in the z axis. A physical connection")
+xlabel("is made with the diam of the MoveTo. ")
+xlabel(" 2) The nearest branch point is coincident in the xy plane. A physical")
+xlabel("connection is made with the diam of the MoveTo.")
+xlabel(" 3) There is no coincident branchpoint in the xy plane but the MoveTo")
+xlabel("is 3-d coincident with the preceding LineTo point. A logical connection")
+xlabel("is made to the section containing the LineTo point.")
+xlabel(" 4) There is an xy plane coincident branch point but it is not the")
+xlabel("nearest in a 3-d sense. A physical connection is made to the section")
+xlabel("containing the xy plane coincident point. ")
+xlabel(" 5) The first point of the branch is not a soma, dendrite, axon, or")
+xlabel("apical start point and there is no xy plane coincident branch point. ")
+xlabel("The branch remains unattached (but see heuristic 6). ")
+xlabel(" 6) All unattached branches within 0.5 microns of the soma contour")
+xlabel("bounding box are logically connected to the soma contour section. ")
+xlabel("I am told, and it seems to be the case, that the first point in a")
+xlabel("branch always has a diameter value of the last point in the previous")
+xlabel("branch. For this reason we set the first point to the diameter of")
+xlabel("of the second point in each section that has a parent branch.")
+xlabel("If this is not the right thing to do then comment out the call to")
+xlabel("repair_diam() in the input() procedure of read_nlcda.hoc")
+ xpanel(1)
+}
+
+proc errout() {local i
+ printf("\n%s problems and default fixes\n\n", file.getname)
+ if (parse_err.count) {
+ printf(" Following lines could not be parsed\n")
+ for i=0, parse_err.count-1 {
+ printf(" %s\n", parse_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (line_branch_err.count) {
+printf(" LINETO follows branch and does not coincide in the xy plane.\n")
+printf(" Make a physical connection using the LINETO diameter.\n")
+ for i = 0, line_branch_err.count-1 {
+ printf(" %s\n", line_branch_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (xyparent_err.count) {
+ printf(" Nearest branch point is coincident in xy plane.\n Make a physical connection with diam of the MOVETO\n")
+ for i=0, xyparent_err.count-1 {
+ printf(" %s\n", xyparent_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (line_coincide_err.count) {
+ printf(" No coincident branchpoint in xy plane but 3-d coincident to previous LINETO.\n")
+ printf(" point. Make a logical connection to the section containing that LINETO\n")
+ for i=0, line_coincide_err.count-1 {
+ printf(" %s\n", line_coincide_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (xynotnearest_err.count) {
+ printf(" The xy plane coincident branch point is not the nearest in the 3-d sense.\n")
+ printf(" However we connect physically to the indicated xy coincident branch point\n")
+ for i=0, xynotnearest_err.count-1 {
+ printf(" %s\n", xynotnearest_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (noparent_err.count) {
+ printf(" Cannot figure out which is the parent\n")
+ printf(" No coincident (even in xy plane) branch point.\n")
+ for i=0, noparent_err.count-1 {
+ printf(" %s\n", noparent_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (somabbox_err.count) {
+ printf(" Unconnected branch is more than .5 microns outside the soma bounding box.\n")
+ printf(" Connect logically to nearest branch point\n")
+ for i=0, somabbox_err.count-1 {
+ printf(" %s\n", somabbox_err.object(i).s)
+ }
+ printf("\n")
+ }
+}
+
+proc fillproblist() {
+ fillproblist1($o1, parse_err, line_branch_err, xyparent_err, line_coincide_err, xynotnearest_err, noparent_err, somabbox_err)
+}
+proc fillproblist1() { local i, j
+ for i=2, numarg() {
+ for j=0, $oi.count-1 {
+ $o1.append($oi.object(j))
+ }
+ }
+}
+
+endtemplate Import3d_Neurolucida_read
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_nlcda3.hoc b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_nlcda3.hoc
new file mode 100644
index 0000000..0402dbb
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_nlcda3.hoc
@@ -0,0 +1,1194 @@
+// Read neurolucida
+// ; V3 text file written for MicroBrightField products.
+// file.
+// The format is given by a context free grammar that would be easy
+// to handle with lex/yacc but we can do reasonably well using recursive descent
+// that more or less matches each production rules for the grammar.
+// Presently we only handle contours and trees but with spines ignored.
+
+begintemplate Branch2SomaInfo
+// info to carry out decision about which to connect to for
+// possible root branch mistakes
+// may have to split the parent
+public sec, sindex, pbranch, ipoint, d2p, d2s, connected2p
+objref sec, pbranch
+proc init() {
+ sec = $o1
+ pbranch = $o2
+ sindex = $3
+ d2p = $4
+ d2s = $5
+ ipoint = $6
+ connected2p = 0
+}
+endtemplate Branch2SomaInfo
+
+begintemplate Import3d_LexToken
+public token, x, s, itok, iline, clone
+strdef s
+token = 0
+x = 0
+itok = 0
+iline = 0
+obfunc clone() { localobj r
+ r = new Import3d_LexToken()
+ r.s = s
+ r.token = token
+ r.x = x
+ r.itok = itok
+ r.iline = iline
+ return r
+}
+endtemplate Import3d_LexToken
+
+begintemplate Import3d_Neurolucida3
+public type
+public filetype, input, file, sections
+public label, id2pt, id2line, pt2id, pt2sec, sec2pt, helptxt, mark, err, b2spanel
+public x, y, z, d, iline, lines, quiet
+external hoc_sf_
+objref type, firstpoints, gm, plist
+objref current, look_ahead, look_ahead2
+objref file, tokens, sections, cursec, parentsec, nil
+objref x, y, z, d, iline, lines
+objref somas, centers, b2serr, b2sinfo
+strdef line, tstr, tstr2, filetype, fline
+
+proc init() {
+ quiet = 0
+ debug_on = 0
+ gm = new GUIMath()
+ filetype = "Neurolucida V3"
+ current = new Import3d_LexToken()
+ look_ahead = new Import3d_LexToken()
+ look_ahead2 = new Import3d_LexToken()
+ eof=0
+ number=1 leftpar=2 rightpar=3 comma=4 bar=5
+ set=6 rgb=7 string=8 label_=9 err_=10
+ leftsp=11 rightsp=12
+ tokens = new List()
+ tokensappend("eof", "number", "leftpar", "rightpar", "comma", "bar")
+ tokensappend("set", "rgb", "string", "label", "err")
+ tokensappend("leftsp", "rightsp")
+ plist = new List()
+}
+proc tokensappend() {local i
+ for i=1, numarg() {
+ tokens.append(new String($si))
+ }
+}
+
+proc input() {
+ b2serr = new List()
+ b2sinfo = new List()
+ nspine = 0
+ err = 0
+ type = new Vector()
+ sections = new List(1000)
+ alloc(25000, x, y, z, d, iline)
+ lines = new List(25000)
+ itoken = 0
+ depth = 0
+ rdfile($s1)
+ firstpoints = new Vector(sections.count)
+ set_firstpoints()
+ connect2soma()
+ if (err) { errout() }
+}
+
+proc set_firstpoints() {local i
+ firstpoints.resize(sections.count)
+ for i=0, sections.count-1 {
+ firstpoints.x[i] = sections.object(i).id
+ }
+}
+
+proc alloc() {local i
+ for i=2, numarg() {
+ $oi = new Vector($1)
+ $oi.resize(0)
+ }
+}
+proc connect2soma() {local i, j, d, dmin localobj sec, roots, xx
+ // first make sure all somas are at the beginning
+ centers = new List()
+ j = 0 // next soma index
+ somas = new List()
+ roots = new List()
+ for i=0, sections.count-1 {
+ sec = sections.object(i)
+ if (sec.iscontour_) {
+ if (i > j) {
+ sections.remove(i)
+ sections.insrt(j, sec)
+ }
+ somas.append(sec)
+ j += 1
+ }
+ }
+ // mark the soma contours that are part of a
+ // contour stack and link them into a list
+ // that is in the main contour section.
+ // we do not remove them from the sections since
+ // we want to be able to select their points
+ soma_contour_stack()
+ for i=0, sections.count-1 {
+ sec = sections.object(i)
+ if (!sec.iscontour_) if (sec.parentsec == nil) {
+ roots.append(sec)
+ }
+ }
+ if (somas.count == 0) { return }
+ // note that j is the number of soma's
+ for i = 0, somas.count-1 {
+ connect2soma_2(somas.object(i), roots)
+ }
+ for i=0, roots.count-1 {
+ sec = roots.object(i)
+ xx = sec.raw.getcol(0)
+ dmin = 1e9
+ for j=0, centers.count-1 {
+ d = xx.c.sub(centers.object(j)).mag
+ if (d < dmin) {
+ imin = j
+ dmin = d
+ }
+ }
+ err = 1
+ xx = centers.object(imin)
+ sprint(tstr, "\nMain branch starting at line %d is outside the soma bounding boxes", pt2id(sec.id))
+ b2serr.append(new String(tstr))
+ sprint(tstr, " Making a logical connection to center of nearest soma")
+ b2serr.append(new String(tstr))
+ sec.parentsec = somas.object(imin)
+ sec.parentx = .5
+ sec.insrt(0, xx.x[0], xx.x[1], xx.x[2], .01)
+ sec.first = 1
+ sec.fid = 1
+ opt_connect(sec, imin, dmin)
+ }
+}
+
+proc soma_contour_stack() {local i, j localobj bb1, bb2, first, next
+ // if soma contour bounding boxes overlap, treat as single soma
+ if (somas.count == 0) return
+ first = somas.object(0)
+ bb1 = bounding_box(first)
+ j = 0
+ for i = 1, somas.count-1 {
+ j += 1
+ next = somas.object(j)
+ bb2 = bounding_box(next)
+ if (xy_intersect(bb1, bb2)) {
+ if (!object_id(first.contour_list)) {
+ first.contour_list = new List()
+ }
+ first.contour_list.append(next)
+ next.is_subsidiary = 1
+ somas.remove(j)
+ j -= 1
+ }else{
+ first = next
+ }
+ bb1 = bb2
+ }
+ for i=0, somas.count-1 {
+ somastack_makes_sense(somas.object(i))
+ somastack_process(somas.object(i))
+ }
+}
+
+obfunc bounding_box() {localobj bb
+ bb = new Vector(6)
+ bb.x[0] = $o1.raw.getrow(0).min
+ bb.x[1] = $o1.raw.getrow(1).min
+ bb.x[2] = $o1.raw.getrow(2).min
+ bb.x[3] = $o1.raw.getrow(0).max
+ bb.x[4] = $o1.raw.getrow(1).max
+ bb.x[5] = $o1.raw.getrow(2).max
+ return bb
+}
+
+func xy_intersect() {local i
+ for i = 0, 1 {
+if ($o1.x[i] > $o2.x[3+i] || $o2.x[i] > $o1.x[3+i]) { return 0 }
+ }
+ return 1
+}
+
+proc somastack_makes_sense() {local i, j, z, z2, dz, dz2 localobj sec
+ if (!object_id($o1.contour_list)) { return }
+ // the soma stack must be monotonic in the z axis and all points
+ // on a contour must have same z value.
+ z = $o1.raw.x[2][0]
+ for i = 1, $o1.raw.ncol-1 if (z != $o1.raw.x[2][i]) {
+ sprint(tstr, "Soma stack contour %s does not have constant z value.", $o1)
+ b2serr.append(new String(tstr))
+ b2serr.append(new String(" Soma area calculation may be serious in error."))
+ return
+ }
+ dz = 0
+ for j=0, $o1.contour_list.count-1 {
+ sec = $o1.contour_list.object(j)
+ z2 = sec.raw.x[2][0]
+ dz2 = z2 - z
+ if (dz2 == 0) {
+ sprint(tstr, "Adjacent contour %d of soma stack %s has same z coordinate and previous.", j, $o1)
+ b2serr.append(new String(tstr))
+ return
+ }else if (dz2 > 0) {
+ dz2 = 1
+ }else{
+ dz2 = -1
+ }
+ if (dz == 0) {
+ dz = dz2
+ }else if (dz != dz2) {
+ sprint(tstr, "Contour %d of the Soma stack %s is not monotonic in z.", j, $o1)
+ b2serr.append(new String(tstr))
+ b2serr.append(new String(" Manually edit the neurolucida file and reorder or eliminate some contours."))
+ b2serr.append(new String(" Presently the soma surface is nonsense."))
+ return
+ }
+ z = z2
+ for i = 1, sec.raw.ncol-1 if (z != sec.raw.x[2][i]) {
+ sprint(tstr, "contour %d of the Soma stack %s does not have constant z value.", j, $o1)
+ b2serr.append(new String(tstr))
+ b2serr.append(new String(" Soma area calculation may be serious in error."))
+ return
+ }
+ }
+}
+
+proc somastack_process() {local i, n, n1 localobj pts, m, center, pv
+ if (!object_id($o1.contour_list)) { return }
+ printf("somastack_process %d\n", $o1.contour_list.count + 1)
+ // The stack defines a volume. Determine the principle axes
+ // and slice the volume along the major axis, approximating
+ // each slice by a circle and shifting the circle to be
+ // along the major axis. So the set of soma contours ends
+ // up being one straight cylindrically symetric soma centroid
+ // note then that curved carrots don't look quite right but
+ // straight carrots do.
+
+ // for each contour use 100 points equally spaced.
+ // we should, but do not, make the stack equally spaced.
+ // then all the points are used to find the principle axes
+ // this pretty much follows the corresponding analysis in
+ // Import3d_GUI
+ // Heck. Let's just use all the contour points and approximate
+ // the thing as an ellipsoid
+
+ // copy all the centroids into one matrix
+ // size of matrix
+ n = $o1.raw.nrow
+ for i=0, $o1.contour_list.count-1 { n += $o1.contour_list.object(i).raw.nrow}
+ pts = new Matrix(3, n)
+ n = 0
+ n1 = $o1.raw.nrow
+ $o1.raw.bcopy(0, 0, 3, n1, 0, n, pts)
+ n = n1
+ for i=0, $o1.contour_list.count-1 {
+ n1 = $o1.contour_list.object(i).raw.nrow
+ $o1.contour_list.object(i).raw.bcopy(0, 0, 3, n1, 0, n, pts)
+ n += n1
+ }
+ center = new Vector(3)
+ for i=0, 2 { center.x[i] = pts.getrow(i).mean }
+ printf("center\n") center.printf
+
+ //principle axes
+ m = new Matrix(3,3)
+ for i=0, 2 { pts.setrow(i, pts.getrow(i).sub(center.x[i])) }
+ for i=0, 2 {
+ for j=i, 2 {
+ m.x[i][j] = pts.getrow(i).mul(pts.getrow(j)).sum
+ m.x[j][i] = m.x[i][j]
+ }
+ }
+ pv = m.symmeig(m)
+ printf("Principle values\n") pv.printf()
+ printf("Principle axes\n") m.printf()
+}
+
+proc stk_bbox() {local i, j localobj bbs, bbc
+ bbs = bounding_box($o1)
+ for i=0, $o1.contour_list.count-1 {
+ bbc = bounding_box($o1.contour_list.o(i))
+ for j=0, 2 {
+ if (bbs.x[j] > bbc.x[j]) bbs.x[j] = bbc.x[j]
+ if (bbs.x[j+3] < bbc.x[j+3]) bbs.x[j+3] = bbc.x[j+3]
+ }
+ }
+ $&2 = bbs.x[0] $&3 = bbs.x[3] $&4 = bbs.x[1] $&5 = bbs.x[4]
+}
+
+proc connect2soma_2() {local i, xmin, xmax, ymin, ymax localobj sec, xc, yc, zc, center
+ // find centroid of soma if outline and connect all dangling
+ // dendrites to that if inside the contour
+ if (object_id($o1.contour_list)) {
+ center = $o1.stk_center()
+ stk_bbox($o1, &xmin, &xmax, &ymin, &ymax)
+ }else{
+ xc = $o1.raw.getrow(0)
+ yc = $o1.raw.getrow(1)
+ zc = $o1.raw.getrow(2)
+ xmin = xc.min-.5 xmax = xc.max + .5
+ ymin = yc.min-.5 ymax = yc.max + .5
+ center = $o1.contourcenter(xc, yc, zc)
+ }
+ centers.append(center)
+
+ for (i=$o2.count-1; i >= 0; i -= 1) {
+ sec = $o2.object(i)
+ if (gm.inside(sec.raw.x[0][0], sec.raw.x[1][0], xmin, ymin, xmax, ymax)) {
+ sec.parentsec = $o1
+ sec.parentx = .5
+ sec.insrt(0, center.x[0], center.x[1], center.x[2], .01)
+ sec.first = 1
+ sec.fid = 1
+ $o2.remove(i)
+ }
+ }
+}
+
+proc opt_connect() {local i, j, d, dmin, imin, n, ip localobj psec, xx
+ dmin = 1e9
+ xx = $o1.raw.getcol(1)
+ for i=0, sections.count - 1 {
+ psec = sections.object(i)
+ if (psec == $o1) { break }
+ n = psec.raw.ncol
+ for j=0, n-1 {
+ d = xx.c.sub(psec.raw.getcol(j)).set(2,0).mag
+ if (d < dmin) {
+ dmin = d
+ imin = i
+ ip = j
+ }
+ }
+ }
+ if (dmin == 1e9) { return }
+ psec = sections.object(imin)
+// if (dmin < psec.d.x[psec.d.size-1]) {
+ if (dmin < $3) {
+ b2sinfo.append(new Branch2SomaInfo($o1, psec, $2, dmin, $3, ip))
+ }
+}
+
+proc b2spanel() {local i localobj b2s
+ if (b2sinfo.count == 0) { return }
+ xpanel("Possible root branch errors")
+ xlabel("Default logical connection to nearest soma.")
+ xlabel("Check to physically connect to closest parent")
+ xlabel(" in the xy plane.")
+ xlabel(" (Note: may split the parent into two sections)")
+ for i=0, b2sinfo.count -1 {
+ b2s = b2sinfo.object(i)
+sprint(tstr, "Line #%d connect to #%d %g (um) away", pt2id(sec2pto(b2s.sec, 1)), \
+pt2id(sec2pto(b2s.pbranch, b2s.ipoint)), b2s.d2p)
+sprint(tstr2, "b2soption_act(%d, \"%s\")", i, $o1)
+ xcheckbox(tstr, &b2s.connected2p(), tstr2)
+ }
+ xpanel()
+}
+
+proc b2soption_act() {local i localobj b2s, sec, parent, soma, xx
+ b2s = b2sinfo.object($1)
+ sec = b2s.sec
+ soma = somas.object(b2s.sindex)
+ parent = b2s.pbranch
+ if (sec.parentsec == soma) { // connect to parent
+ if (b2s.ipoint != parent.raw.ncol-1) { // need to split
+ b2soption_split(b2s)
+ parent = b2s.pbranch
+ set_firstpoints()
+ }
+ xx = parent.raw.getcol(b2s.ipoint)
+ sec.parentsec = parent
+ sec.parentx = 1
+ sec.raw.setcol(0, xx)
+ sec.d.x[0] = sec.d.x[1]
+ sec.first = 0
+ sec.fid = 1
+ }else{ // connect to soma
+ xx = centers.object(b2s.sindex)
+ sec.parentsec = soma
+ sec.parentx = .5
+ sec.raw.setcol(0, xx)
+ sec.d.x[0] = .01
+ sec.first = 1
+ sec.fid = 1
+ }
+ sprint(tstr, "%s.redraw()", $s2)
+ execute(tstr)
+}
+
+proc b2soption_split() {local i, n, id, ip localobj p, newsec, tobj
+ p = $o1.pbranch
+ ip = $o1.ipoint
+ id = sec2pto(p, ip)
+ n = p.raw.ncol
+ newsec = new Import3d_Section(p.id, ip+1)
+ p.id = id
+
+ tobj = p.raw.c
+ tobj.bcopy(0,0,3,ip+1,newsec.raw)
+ p.raw.resize(3, n - ip)
+ p.xyz.resize(3, n - ip)
+ tobj.bcopy(0, ip, 3, n - ip, p.raw)
+
+ tobj = p.d.c
+ newsec.d.copy(tobj, 0, ip)
+ p.d.resize(n - ip)
+ p.d.copy(tobj, ip, n-1)
+
+ newsec.parentsec = p.parentsec
+ p.parentsec = newsec
+ newsec.parentx = p.parentx
+ p.parentx = 1
+ newsec.type = p.type
+ newsec.first = p.first
+ newsec.fid = p.fid
+ p.first = 0
+ p.fid = 0
+ newsec.type = p.type
+ $o1.pbranch = newsec
+ $o1.ipoint = newsec.d.size-1
+ // now adjust any screwed up b2sinfo items that also reference p
+ for i=0, b2sinfo.count-1 {
+ tobj = b2sinfo.object(i)
+ if (tobj == $o1) { continue }
+ if (tobj.pbranch == p) {
+ if (tobj.ipoint <= ip) { // on newsec
+ tobj.pbranch = newsec
+ }else{ // still on p
+ tobj.ipoint -= ip
+ }
+ }
+ }
+ sections.insrt(sections.index(p), newsec)
+}
+
+func lex() {local n
+ $o1.x = 0
+ $o1.s = ""
+ while (hoc_sf_.len(line) <= 1 || sscanf(line, " ;%[^@]", line) == 1) {
+ if (!getline(fline)) {
+ $o1.token = eof
+ itoken += 1
+ $o1.itok = itoken
+ $o1.iline = iline_
+ return eof
+ }
+ line = fline
+ hoc_sf_.left(fline, hoc_sf_.len(fline)-1)
+ }
+ if (sscanf(line, " %lf%[^@]", &$o1.x, line) == 2) {
+ $o1.token = number
+ }else if (sscanf(line, " (%[^@]", line) == 1) {
+ $o1.token = leftpar
+ }else if (sscanf(line, " )%[^@]", line) == 1) {
+ $o1.token = rightpar
+ }else if (sscanf(line, " ,%[^@]", line) == 1) {
+ $o1.token = comma
+ }else if (sscanf(line, " |%[^@]", line) == 1) {
+ $o1.token = bar
+ }else if (sscanf(line, " <%[^@]", line) == 1) {
+ $o1.token = leftsp
+ }else if (sscanf(line, " >%[^@]", line) == 1) {
+ $o1.token = rightsp
+ }else if (sscanf(line, " set %[^@]", line) == 1) {
+ $o1.token = set
+ }else if (sscanf(line, " Set %[^@]", line) == 1) {
+ $o1.token = set
+ }else if (sscanf(line, " SET %[^@]", line) == 1) {
+ $o1.token = set
+ }else if (sscanf(line, " RGB %[^@]", line) == 1) {
+ $o1.token = rgb
+ }else if ((n = sscanf(line, " \"%[^\"]\"%[^@]", $o1.s, line)) > 0) {
+ // not allowing quotes in quote
+ $o1.token = string
+ if (n == 1) {
+ printf("Lexical error: no closing '\"' in string. The entire line %d in ||is\n", iline_)
+ printf("|%s|\n", fline)
+ line = ""
+ $o1.token = err_
+ }
+ }else if (sscanf(line, " %[A-Za-z0-9_]%[^@]", $o1.s, line) == 2) {
+ $o1.token = label_
+ }else{
+ $o1.token = err_
+ }
+ itoken += 1
+ $o1.itok = itoken
+ $o1.iline = iline_
+ return $o1.token
+}
+
+func getline() {
+ if (file.eof) {
+ if (!quiet) {
+ printf("\r%d lines read\n", iline_)
+ }
+ return 0
+ }
+ file.gets($s1)
+ iline_ += 1
+// printf("%d: %s", iline_, $s1)
+ if ((iline_%1000) == 0) {
+ if (!quiet) {
+ printf("\r%d lines read", iline_)
+ }
+ }
+ return 1
+}
+
+proc rdfile() {local i
+ iline_ = 0
+ file = new File($s1)
+ if (!file.ropen()) {
+ err = 1
+ printf("could not open %s\n", $s1)
+ }
+ for (i=0; !file.eof(); i += 1) {
+ file.gets(line)
+ }
+ alloc(i, x, y, z, d, iline)
+ file.close
+ lines = new List(25000)
+ line=""
+ if (!quiet) {
+ printf("\n")
+ }
+ file.ropen()
+ p_file()
+ file.close
+}
+
+objref rollback
+
+proc save_for_rollback() {
+ if (object_id(rollback)) {
+ printf("rollback in use\n")
+ p_err()
+ }
+ rollback = new List()
+ rollback.append(current.clone())
+ rollback.append(look_ahead.clone())
+ rollback.append(look_ahead2.clone())
+ use_rollback_ = 0
+}
+proc use_rollback() {
+ use_rollback_ = 1
+ current = rollback.o(0) rollback.remove(0)
+ look_ahead = rollback.o(0) rollback.remove(0)
+ look_ahead2 = rollback.o(0) rollback.remove(0)
+ if (rollback.count == 0) {clear_rollback()}
+}
+proc clear_rollback() {localobj nil
+ rollback = nil
+ use_rollback_ = 0
+}
+
+proc read_next_token() {
+ if (use_rollback_) {
+ current = look_ahead
+ look_ahead = look_ahead2
+ look_ahead2 = rollback.o(0)
+ rollback.remove(0)
+ if (rollback.count == 0) {
+ clear_rollback()
+ }
+ }else{
+ read_next_token_lex()
+ if (object_id(rollback)){
+ rollback.append(look_ahead2.clone())
+ }
+ }
+}
+proc read_next_token_lex() {localobj tobj
+ tobj = current
+ current = look_ahead
+ look_ahead = look_ahead2
+ look_ahead2 = tobj
+ if (look_ahead.token != eof) {
+ lex(look_ahead2)
+ }else{
+ look_ahead2.token = eof
+ }
+// printf("current token=%s x=%g s=%s\n", tokens.object(current.token).s, current.x, current.s)
+}
+
+func need_extra() {local i, n localobj m
+ if (parentsec == nil) { return 0 }
+ m = parentsec.raw
+ n = m.ncol-1
+ if ( m.x[0][n] == x.x[$1]) {
+ if ( m.x[1][n] == y.x[$1]) {
+ if ( m.x[2][n] == z.x[$1]) {
+ return 0
+ }
+ }
+ }
+ return 1
+}
+proc newsec() {local i, n, first, n1 localobj m
+ first = 0
+ n = $2 - $1
+ if (need_extra($1)) {
+ cursec = new Import3d_Section($1, n+1)
+ first = 1
+ cursec.fid = 1
+ m = parentsec.raw
+ n1 = m.ncol-1
+ cursec.set_pt(0, m.x[0][n1], m.x[1][n1], m.x[2][n1], d.x[$1])
+ }else{
+ cursec = new Import3d_Section($1, n)
+ }
+ cursec.type = sectype
+ type.append(sectype)
+ sections.append(cursec)
+ cursec.append(first, $1, n, x, y, z, d)
+}
+proc set_sectype() {localobj tobj
+ sectype = 0
+ if (plist.count) {
+ tobj = plist.object(plist.count-1)
+ if (strcmp(tobj.s, "Axon") == 0) {
+ sectype = 2
+ }else if (strcmp(tobj.s, "Dendrite") == 0) {
+ sectype = 3
+ }else if (strcmp(tobj.s, "Apical") == 0) {
+ sectype = 4
+ }
+ }
+}
+
+proc label() {
+ sprint($s2, "Line %d: %s", iline.x[$1], lines.object($1).s)
+}
+func id2pt() {local i
+ i = iline.indwhere(">=", $1)
+ if (i < 0) { i = iline.size-1 }
+ return i
+}
+func id2line() { return $1 }
+func pt2id() {local i
+ i = $1
+ if (i < 0) { i == 0 }
+ if (i >= iline.size) { i = iline.size-1 }
+ return iline.x[i]
+}
+func pt2sec() {local i, j
+ i = firstpoints.indwhere(">", $1)
+ if (i == -1) {
+ i = firstpoints.size
+ }
+ $o2 = sections.object(i-1)
+ j = $1 - $o2.id + $o2.fid
+ return j
+}
+func sec2pt() {local i localobj sec
+ sec = sections.object($1)
+ i = sec.id + $2 - sec.fid
+ return i
+}
+func sec2pto() {local i localobj sec
+ sec = $o1
+ i = sec.id + $2 - sec.fid
+ return i
+}
+proc mark() {local i
+ print $o1, $2, iline, lines
+ i = iline.indwhere("==", $2)
+ if (i != -1) {
+ printf("%d,%d,%d (%g,%g): %s\n", $2, iline.x[i], i, x.x[i], y.x[i], lines.object(i).s)
+ $o1.mark(x.x[i], y.x[i], "S",12,4,1)
+ }
+}
+
+proc helptxt() {
+ xpanel("Neurolucida V3 file filter characteristics")
+xlabel("The elaborate file format is handled by a reasonably complete")
+xlabel("recursive descent parser that more or less matches the production")
+xlabel("rules for the grammar. However, at present, only contours and trees")
+xlabel("are given any semantic actions (in particular, spines are ignored).")
+ xpanel(1)
+}
+
+proc chk() {
+ if (current.token != $1) { p_err() }
+}
+proc demand() {
+ read_next_token()
+ chk($1)
+}
+proc pcur() {
+ printf("itok=%d on line %d token=%s x=%g s=%s\n", current.itok, current.iline, tokens.object(current.token).s, current.x, current.s)
+}
+proc plook() {
+// printf("lookahead: itok=%d token=%s x=%g s=%s\n", look_ahead.itok, tokens.object(look_ahead.token).s, look_ahead.x, look_ahead.s)
+}
+proc enter() {local i
+ if (debug_on == 0) {return}
+ for i=1, depth {printf(" ")}
+ printf("enter %s: ", $s1)
+ pcur()
+ depth += 1
+}
+proc leave() {local i
+ if (debug_on == 0) {return}
+ depth -= 1
+ for i=1, depth {printf(" ")}
+ printf("leave %s: ", $s1)
+ pcur()
+}
+// p stands for production if needed to avoid conflict with variable
+proc p_file() {
+ look_ahead2.token = eof
+ look_ahead.token = eof
+ if (lex(current) != eof) {
+ if (lex(look_ahead) != eof) {
+ lex(look_ahead2)
+ }
+ }
+ enter("p_file")
+ objects()
+ leave("p_file")
+}
+proc objects() {
+ enter("objects")
+ object()
+ while(1) {
+ optionalcomma()
+ if (current.token != leftpar) {
+ break
+ }
+ object()
+ }
+ leave("objects")
+}
+proc object() {local i
+ i = current.itok
+ enter("object")
+ if (current.token == leftpar) {
+ plook()
+ if (look_ahead.token == string) {
+ contour()
+ }else if (look_ahead.token == label_) {
+ marker_or_property()
+ }else if (look_ahead.token == leftpar) {
+ tree_or_text()
+ }else if (look_ahead.token == set) {
+ p_set()
+ }else{
+ p_err()
+ }
+ }else{
+ p_err()
+ }
+ leave("object")
+ if (i == current.itok) {
+ print "internal error: ", "object consumed no tokens"
+ stop
+ }
+}
+proc marker_or_property() {
+ enter("marker_or_property")
+ if (look_ahead2.token == leftpar) {
+ marker()
+ }else{
+ property()
+ }
+ leave("marker_or_property")
+}
+proc tree_or_text() {
+ // the tree and text productions are poorly conceived since they
+ // match each other for arbitrarily long sequences of Properties tokens.
+ // And after the properties they both have a Point.
+ // For now just assume it is a tree.
+ // It will be painful to consume the [ '(' Properties Point ] here
+ // and then disambiguate between Tree or Text and then more
+ // often than not, start the tree production after having already
+ // read the first point (Branch currently assumes it is supposed
+ // to read the first point of the tree.)
+ enter("tree_or_text")
+ save_for_rollback()
+ if (text()) {
+ clear_rollback()
+ }else{
+ use_rollback()
+ tree()
+ }
+ leave("tree_or_text")
+}
+proc properties() {
+ enter("properties")
+ plist.remove_all()
+ if (current.token == leftpar) {
+ if(look_ahead.token == label_ || look_ahead.token == set) {
+ property_or_set()
+ while (1) {
+ optionalcomma()
+if (current.token != leftpar || (look_ahead.token != label_ && look_ahead.token != set)) {
+ break
+ }
+ property_or_set()
+ }
+ }
+ }
+ leave("properties")
+}
+proc property_or_set() {
+ if (look_ahead.token == label_) {
+ property()
+ }else{
+ p_set()
+ }
+}
+proc property() {
+ enter("property")
+ chk(leftpar)
+ demand(label_)
+ plist.append(new String(current.s))
+ read_next_token()
+ optionalvalues()
+ chk(rightpar)
+ read_next_token()
+ leave("property")
+}
+proc optionalvalues() {local c
+ enter("optionalvalues")
+ c = current.token
+ if (c == number || c == string || c == label_ || c == rgb) {
+ values()
+ }
+ leave("optionalvalues")
+}
+proc values() {local c
+ enter("values")
+ value()
+ while (1) {
+ c = current.token
+ if (c != number && c != string && c != label_ && c != rgb) {
+ break
+ }
+ value()
+ }
+ leave("values")
+}
+proc value() {local c
+ enter("value")
+ c = current.token
+ if (c == number) {
+ }else if (c == string) {
+ }else if (c == label_) {
+ }else if (c == rgb) {
+ demand(leftpar)
+ demand(number)
+ read_next_token()
+ optionalcomma()
+ chk(number)
+ read_next_token()
+ optionalcomma()
+ chk(number)
+ demand(rightpar)
+ }else{
+ p_err()
+ }
+ read_next_token()
+ leave("value")
+}
+proc p_set() {
+ // presently, I am imagining that we ignore sets
+ // and I hope we never see objects() in them.
+ enter("p_set")
+ chk(leftpar)
+ demand(set)
+ demand(string)
+ read_next_token()
+ if (current.token != rightpar) {
+ objects()
+ }
+ chk(rightpar)
+ read_next_token()
+ leave("p_set")
+}
+proc contour() {local begin, end, keep, il
+ enter("contour")
+ chk(leftpar)
+ begin = x.size
+ keep = 0
+ demand(string)
+ if (strcmp(current.s, "CellBody") == 0) { keep = 1 }
+ if (strcmp(current.s, "Cell Body") == 0) { keep = 1 }
+ il = current.iline
+ read_next_token()
+ contourinfo()
+ if (keep) {
+ end = x.size
+ if (end - begin > 2) {
+ sectype = 1
+ newsec(begin, end)
+ cursec.iscontour_ = 1
+ }else{
+sprint(tstr, "CellBody contour has less than three points at line %d. Ignoring.", il)
+ b2serr.append(new String(tstr))
+ }
+ }
+ chk(rightpar)
+ read_next_token()
+ leave("contour")
+}
+proc contourinfo() {
+ enter("contourinfo")
+ properties()
+ points()
+ morepoints()
+ leave("contourinfo")
+}
+proc morepoints() {
+ enter("morepoints")
+ optmarkerlist()
+ leave("morepoints")
+}
+proc optmarkerlist() {
+ enter("optmarkerlist")
+ leave("optmarkerlist")
+}
+proc markerlist() {local pcnt
+ enter("markerlist")
+ chk(leftpar)
+ pcnt = 1
+ // not handling markers. when pcnt goes to 0 then leave
+ while (pcnt != 0) {
+ read_next_token()
+ if (current.token == rightpar) {
+ pcnt -= 1
+ }else if (current.token == leftpar) {
+ pcnt += 1
+ }
+ }
+ read_next_token()
+ leave("markerlist")
+}
+proc tree() {
+ enter("tree")
+ parentsec = nil
+ chk(leftpar)
+ read_next_token()
+ properties()
+ set_sectype()
+ branch()
+ chk(rightpar)
+ read_next_token()
+ parentsec = nil
+ leave("tree")
+}
+proc branch() {local begin, end localobj psav
+ enter("branch")
+ psav = parentsec
+ begin = x.size
+ treepoints()
+ end = x.size
+ newsec(begin, end)
+ cursec.parentsec = parentsec
+ parentsec = cursec
+ branchend()
+ parentsec = psav
+ leave("branch")
+}
+proc treepoints() {
+ enter("treepoints")
+ treepoint()
+ while (1) {
+ optionalcomma()
+ if (current.token != leftpar || look_ahead.token != number) {
+ break
+ }
+ treepoint()
+ }
+ leave("treepoints")
+}
+proc treepoint() {
+ enter("treepoint")
+ point()
+ if (current.token == leftsp) {
+ spines()
+ }
+ leave("treepoint")
+}
+proc spines() {
+ enter("spines")
+ spine()
+ while(current.token == leftsp) {
+ spine()
+ }
+ leave("spines")
+}
+proc spine() {
+ enter("spine")
+ chk(leftsp) read_next_token()
+ nspine += 1 err = 1
+// properties() points()
+ while (current.token != rightsp) {
+ read_next_token()
+ }
+ chk(rightsp) read_next_token()
+ leave("spine")
+}
+proc branchend() {
+ enter("branchend")
+ optionalcomma()
+ if (current.token == leftpar) {
+ while (look_ahead.token == label_) {
+ markerlist()
+ }
+ }
+ optionalcomma()
+ if (current.token == leftpar || current.token == label_) {
+ node()
+ }
+ leave("branchend")
+}
+proc node() {
+ enter("node")
+ if (current.token == leftpar) {
+ read_next_token() split()
+ chk(rightpar) read_next_token()
+ }else if (current.token == label_) {
+ read_next_token()
+ }else{
+ p_err()
+ }
+ leave("node")
+}
+proc split() {
+ enter("split")
+ branch()
+ while (current.token == bar) {
+ read_next_token()
+ branch()
+ }
+ leave("split")
+}
+proc marker() {
+ enter("marker")
+ chk(leftpar)
+ demand(label_)
+ read_next_token()
+ properties() points()
+ chk(rightpar) read_next_token()
+ leave("marker")
+}
+func text() {
+ // if text fails then it may be a tree
+ enter("text")
+ chk(leftpar) read_next_token()
+ properties() point()
+ if (current.token != string) {
+ leave("text invalid --- expect string")
+ return 0
+ }
+ chk(string)
+// demand(rightpar)
+ read_next_token()
+ if (current.token != rightpar) {
+ leave("text invalid --- expect rightpar")
+ return 0
+ }
+ chk(rightpar)
+ read_next_token()
+ leave("text")
+ return 1
+}
+proc points() {
+ enter("points")
+ point()
+ while (1) {
+ optionalcomma()
+ if (current.token != leftpar) {
+ break
+ }
+ point()
+ }
+ leave("points")
+}
+proc point() {
+ enter("point")
+ chk(leftpar)
+ demand(number)
+ xval = current.x
+ iline.append(iline_) lines.append(new String(fline))
+ read_next_token() optionalcomma()
+ chk(number)
+ yval = current.x
+ zval = dval = 0
+ read_next_token() optz()
+ x.append(xval) y.append(yval) z.append(zval) d.append(dval)
+ chk(rightpar) read_next_token()
+//printf("%g %g %g %g\n", xval, yval, zval, dval)
+ leave("point")
+}
+proc optz() {
+ enter("optz")
+ optionalcomma()
+ if (current.token == number) {
+ zval = current.x
+ read_next_token()
+ optmodifier()
+ }
+ leave("optz")
+}
+proc optmodifier() {
+ enter("optmodifier")
+ optionalcomma()
+ if (current.token == number) {
+ dval = current.x
+ read_next_token()
+ optionalcomma()
+ if (current.token == label_) {
+ read_next_token()
+ }
+ optbezier()
+ }
+ leave("optmodifier")
+}
+proc optbezier() {
+ enter("optbezier")
+ optionalcomma()
+ if (current.token == leftpar) {
+ demand(number)
+ read_next_token()
+ optionalcomma() chk(number) read_next_token()
+ optionalcomma() chk(number) read_next_token()
+ optionalcomma() chk(number) demand(rightpar)
+ read_next_token()
+ }
+ leave("optbezier")
+}
+proc optionalcomma() {
+ enter("optionalcomma")
+ if (current.token == comma) {
+ read_next_token()
+ }
+ leave("optionalcomma")
+}
+proc p_err() {
+ printf("\nparse error\n")
+ pcur()
+ printf("line %d: %s\n", iline_, fline)
+ stop
+}
+proc errout() {local i
+ if (quiet) { return }
+ printf("\n%s problems\n\n", file.getname)
+ if (nspine) {
+ printf("Ignored %d spines\n", nspine)
+ }
+ for i=0, b2serr.count-1 {
+ printf("%s\n", b2serr.object(i).s)
+ }
+}
+endtemplate Import3d_Neurolucida3
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_nts.hoc b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_nts.hoc
new file mode 100644
index 0000000..c58e9d6
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_nts.hoc
@@ -0,0 +1,331 @@
+// translation of ntscable's read_nts.c file for importing
+// eutectic files. After reading and parsing lines, the logic
+// follows that in nlcda_read.hoc
+
+begintemplate Import3d_Eutectic_read
+public filetype, sections, input, type, file, err
+public label, id2pt, id2line, pt2id, pt2sec, sec2pt, mark
+external hoc_sf_
+public id, ptype, tag, x, y, z, d, iline, pointtype, points, type
+public firstpoints, lastpoints
+objref sections, file, stack, cursec, firstpoints, lastpoints, gm
+objref id, ptype, tag, x, y, z, d, iline, pointtype, points, type
+objref iline2pt, vectors, header, lines, diam, parse_err, nil, soma
+strdef tstr, tstr1, point_type_names, filetype, line
+
+proc init() {
+ filetype = "Eutectic"
+ vectors = new List()
+ header = new List()
+ lines = new List()
+ gm = new GUIMath()
+ MTO = 0
+ TTO = 3
+ BTO = 6
+ CP = 9+1
+ FS = 12+1
+ SB = 15+1
+ BP = 18+1
+ NE = 21+1
+ ES = 24+1
+ MAE = 27
+ TAE = 30
+ BAE = 33
+ SOS = 36
+ SCP = 39
+ SOE = 42
+ OS = 45+1
+ OCP = 48
+ OE = 51+1
+ DS = 54+1
+ DCP = 57
+ DE = 60+1
+ point_type_names = \
+"MTOTTOBTO CP FS SB BP NE ESMAETAEBAESOSSCPSOE OSOCP OE DSDCP DE"
+// note numbering for two char item is 1 more than in read_nts.c
+// since space is not included in first char
+}
+
+proc input() {local i
+ nspine = 0
+ err = 0
+ parse_err = new List()
+ sections = new List()
+ stack = new List()
+ lastpoints = new Vector()
+ firstpoints = new Vector()
+
+ rdfile($s1)
+ parse2()
+ type = new Vector(sections.count)
+ for i=0, sections.count-1 {
+ type.x[i] = tag.x[sections.object(i).id]
+ }
+ connect2soma()
+ if (err) { errout() }
+}
+
+proc rdfile() {local i, j
+ file = new File($s1)
+ // count lines for vector allocation space (not really necessary)
+ if (!file.ropen()) {
+ err = 1
+ printf("could not open %s\n", $s1)
+ }
+ for (i = 0; !file.eof(); i += 1) {
+ file.gets(line)
+ }
+ file.close()
+// printf("%s has %d lines\n", $s1, i)
+ alloc(i, id, ptype, tag, x, y, z, d, iline, pointtype, points)
+ tag
+ diam = d
+ file.ropen()
+ for (i = 1; !file.eof(); i += 1) {
+ file.gets(line)
+ parse(i, line)
+ }
+ file.close()
+}
+
+proc alloc() { local i // $oi.size = 0 but enough space for $1 elements
+ for i = 2, numarg() {
+ $oi = new Vector($1)
+ $oi.resize(0)
+ vectors.append($oi)
+ }
+}
+
+proc parse() {local a1 ,a2, a3, a4, a5, a6, a7
+ n = sscanf($s2, "%d %s %d %f %f %f %f", &a1, tstr, &a3, &a4, &a5, &a6, &a7)
+ hoc_sf_.left($s2, hoc_sf_.len($s2)-1)
+ if (n <= 0) {
+ header.append(new String($s2))
+ return
+ }
+ if (n != 7) {
+ err = 1
+ sprint(tstr, "%d: %s parse failure after item %d", $1, $s2, n)
+ parse_err.append(new String(tstr))
+ return
+ }
+ a2 = hoc_sf_.head(point_type_names, tstr, tstr1)
+// print tstr, " ", a2
+ // first points of branches (before physical connection) is 1
+ // continuation points are 2
+ // branch are 3
+ // ends are 4
+ // a branch point can also be a first point
+ // so easiest to accumulate them here
+ if (a2 == MTO) {
+ last = 1
+ firstpoints.append(id.size)
+ }else if (a2 == BP ){
+ if (last == 3 || last == 4){
+ firstpoints.append(id.size)
+ }
+ last = 3
+ }else if (a2 == FS || a2 == SB || a2 == CP){
+ if (a2 == SB) { err = 1 nspine += 1 }
+ if (last == 3 || last == 4){
+ firstpoints.append(id.size)
+ last = 1
+ }else{
+ last = 2
+ }
+ }else if (a2 == NE || a2 == ES || a2 == MAE || a2 == TAE || a2 == BAE){
+ if (last == 3 || last == 4){
+ firstpoints.append(id.size)
+ }
+ last = 4
+ }else if (a2 == SOS){
+ last = 10
+ }else if (a2 == SCP){
+ last = 10
+ }else if (a2 == SOE){
+ last = 10
+ }else if (a2 == OS){
+ return
+ }else if (a2 == DS){
+ return
+ }else if (a2 == DCP || OCP){
+ return
+ }else if (a2 == DE || a2 == OE){
+ return
+ }else{
+ return
+ }
+ pointtype.append(last)
+ points.append(a1)
+ id.append(a1)
+ ptype.append(a2)
+ tag.append(a3)
+ x.append(a4)
+ y.append(a5)
+ z.append(a6)
+ d.append(a7)
+ iline.append($1)
+ lines.append(new String($s2))
+}
+proc parse2() {local i, j, k localobj parent
+ i = ptype.indwhere("==", SOS)
+ j = ptype.indwhere("==", SOE)
+ if (i > -1 && j > i) {
+ mksec(i, j, nil)
+ cursec.iscontour_ = 1
+// cursec.type=1
+ soma = cursec
+ }
+ for i=0, firstpoints.size-1 {
+ j = firstpoints.x[i]
+ for (k=j; pointtype.x[k] <= 2; k += 1) {
+ }
+ parent = pop()
+ if (parent != nil) {
+ if (parent.volatile < 1) {
+ push(parent)
+ parent.volatile += 1
+ }
+ }
+ mksec(j, k, parent)
+//printf("%s %d %d: %s | %s\n", cursec, j, k, lines.object(j).s, lines.object(k).s)
+ cursec.parentsec = parent
+// logic_connect(cursec, parent)
+ if (pointtype.x[k] == 3) {
+ push(cursec)
+ }
+ }
+ if (stack.count > 0) {
+ err = 1
+ }
+}
+
+proc push() {
+ stack.append($o1)
+}
+obfunc pop() {localobj p
+ if (stack.count > 0) {
+ p = stack.object(stack.count-1)
+ stack.remove(stack.count-1)
+ }else{
+ p = nil
+ }
+ return p
+}
+
+proc mksec() {local i, x1, y1, z1, d1
+ if ($o3 == nil) {
+ cursec = new Import3d_Section($1, $2-$1+1)
+ cursec.append(0, $1, $2-$1+1, x, y, z, d)
+ }else{
+ cursec = new Import3d_Section($1, $2-$1+2)
+ cursec.append(1, $1, $2-$1+1, x, y, z, d)
+ cursec.first = 0 // physical connection
+ i = $o3.raw.ncol-1
+ x1 = $o3.raw.x[0][i]
+ y1 = $o3.raw.x[1][i]
+ z1 = $o3.raw.x[2][i]
+ //d1 = $o3.d.x[i]
+ cursec.set_pt(0, x1, y1, z1, cursec.d.x[1])
+ cursec.fid = 1
+ }
+ cursec.volatile = 0
+ cursec.type = tag.x[$1]
+ sections.append(cursec)
+ lastpoints.append($2)
+}
+
+proc logic_connect() {local i, x1, y1, z1, d1
+ if ($o2 == nil) { return }
+ i = $o2.raw.ncol-1
+ x1 = $o2.raw.x[0][i]
+ y1 = $o2.raw.x[1][i]
+ z1 = $o2.raw.x[2][i]
+ d1 = $o2.d.x[i]
+ $o1.insrt(0, x1, y1, z1, $o1.d.x[0])
+ $o1.first = 1
+}
+
+proc connect2soma() {local i, ip, j, jp, bp, jpmin, dmin, d, xmin, xmax, ymin, ymax localobj sec, xc, yc, zc, c
+ // find centroid of soma if outline and connect all dangling
+ // dendrites to that if inside the contour
+ if (soma == nil) { return }
+ xc = soma.raw.getrow(0)
+ yc = soma.raw.getrow(1)
+ zc = soma.raw.getrow(2)
+ xmin = xc.min-.5 xmax = xc.max + .5
+ ymin = yc.min-.5 ymax = yc.max + .5
+ c = soma.contourcenter(xc, yc, zc)
+ for i=0, sections.count-1 {
+ sec = sections.object(i)
+ if (sec.parentsec == nil && sec != soma) {
+ if (gm.inside(sec.raw.x[0][0], sec.raw.x[1][0], xmin, ymin, xmax, ymax)) {
+ sec.parentsec = soma
+ sec.parentx = .5
+ sec.insrt(0, c.x[0], c.x[1], c.x[2], .01)
+ sec.first = 1
+ sec.fid = 1
+ }
+ }
+ }
+}
+
+proc label(){
+ sprint($s2, "Line %d: %s", iline.x[$1], lines.object($1).s)
+}
+func id2pt() {
+ i = id.indwhere(">=", $1)
+//print "id2pt ", $1, i, id.x[i]
+ return i
+}
+func id2line() { return points.x[$1] }
+func pt2id() {local i
+//print "pt2id ", $1, id.x[$1]
+ return id.x[$1]
+}
+func pt2sec(){local i, j
+ i = lastpoints.indwhere(">=", $1)
+ if (i == -1) {
+ i = lastpoints.size-1
+ }
+ $o2 = sections.object(i)
+ j = $1 - $o2.id + $o2.fid
+//print "pt2sec ", $1, $o2, $o2.id, j
+ return j
+}
+func sec2pt(){local i localobj sec
+ sec = sections.object($1)
+ i = sec.id + $2 - sec.fid
+//print "sec2pt ", $1, $2, sec.id, sec.first, i
+ return i
+}
+
+proc mark() {local i, a,b,c,d,e,f
+ print $o1, $2, iline, lines
+ i = id.indwhere("==",$2)
+ printf("%d,%d,%d: %s\n", i, id.x[i], iline.x[i], lines.object(i).s)
+ n = sscanf(lines.object(i).s, "%d %s %d %f %f %f %f", &a, tstr, &b, &c, &d, &e, &f)
+ if (n == 7) {
+ print a," ",tstr," ",b,c,d,e,f
+ $o1.mark(c,d,"S",12,4,1)
+ }
+}
+
+proc errout() {
+ printf("\n%s problems and default fixes\n\n", file.getname)
+ if (parse_err.count) {
+ printf(" Following lines could not be parsed\n")
+ for i=0, parse_err.count-1 {
+ printf(" %s\n", parse_err.object(i).s)
+ }
+ printf("\n")
+ }
+ if (stack.count > 0) {
+ printf(" stack.count = %d\n", stack.count)
+ }
+ if (nspine > 0) {
+ printf(" Ignore %d spines\n", nspine)
+ }
+}
+
+endtemplate Import3d_Eutectic_read
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_swc.hoc b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_swc.hoc
new file mode 100644
index 0000000..2dddd72
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/import3d/read_swc.hoc
@@ -0,0 +1,428 @@
+// read swc file, create and verify that it is a single tree,
+// and identify the lists of unbranched points.
+
+begintemplate Import3d_SWC_read
+public input, pheader, instantiate
+public id, type, x, y, z, d, pid, iline, header, point2sec, sections, lines
+public idoffset, label, id2pt, id2line, pt2id, pt2sec, sec2pt, file, mark
+public filetype, err, helptxt
+public quiet
+external hoc_sf_
+objref id, type, x, y, z, d, pid, iline, header, lines
+objref file, vectors, sec2point, point2sec, sections
+objref connect2prox
+strdef tstr, line, filetype
+double a[7]
+objref id2index_
+
+// id and pid contain the raw id values (1st and 7th values on each line)
+// from the file. After the file is read id2index(id.x[i]) == i
+// Note that the only requireement for a valid swc file is the tree
+// topology condition pid.x[i] < id.x[i]
+
+
+proc init() {
+ quiet = 0
+ filetype = "SWC"
+ vectors = new List()
+ header = new List()
+ lines = new List()
+}
+
+func id2index() {
+ return id2index_.x[$1]
+}
+func pix2ix() {local pid_
+ pid_ = pid.x[$1]
+ if (pid_ < 0) { return -1 }
+ return id2index_.x[pid_]
+}
+
+proc input() {
+ err = 0
+ rdfile($s1)
+ check_pid() // and also creates id2index_
+ sectionify() // create point2sec index map
+ mksections() // Import3dSection list
+// instantiate()
+}
+
+proc rdfile() {local i
+ file = new File($s1)
+ // count lines for vector allocation space (not really necessary)
+ if (!file.ropen()) {
+ err = 1
+ printf("could not open %s\n", $s1)
+ }
+ for (i = 0; !file.eof(); i += 1) {
+ file.gets(line)
+ }
+ file.close()
+// printf("%s has %d lines\n", $s1, i)
+ alloc(i, id, type, x, y, z, d, pid, iline)
+ file.ropen()
+ for (i = 1; !file.eof(); i += 1) {
+ file.gets(line)
+ parse(i, line)
+ }
+ file.close()
+}
+
+proc alloc() { local i // $oi.size = 0 but enough space for $1 elements
+ for i = 2, numarg() {
+ $oi = new Vector($1)
+ $oi.resize(0)
+ vectors.append($oi)
+ }
+}
+
+proc parse() {local i, n
+ n = sscanf($s2, "%f %f %f %f %f %f %f", &a[0], &a[1], &a[2],\
+ &a[3], &a[4], &a[5], &a[6])
+ if (n == 7) {
+ a[5] *= 2 // radius to diameter
+ for i=0, 6 {
+ vectors.object(i).append(a[i])
+ }
+ iline.append($1) // for error messages
+ hoc_sf_.left($s2, hoc_sf_.len($s2)-1)
+ lines.append(new String($s2))
+ } else if (hoc_sf_.head($s2, "#", tstr) == 0) { // comment
+ header.append(new String($s2))
+ } else {
+ err = 1
+ printf("error %s line %d: could not parse: %s", file.getname, $1, $s2)
+// Note: only swcdata/n120.swc and swcdata/n423.swc last lines are invalid
+ }
+}
+
+proc pheader() {local i
+ for i=0, header.count-1 {
+ printf("%s", header.object(i).s)
+ }
+}
+
+proc shift_id() { local i, ierr, imin
+ // Note: swcdata/*.swc have sequential id's
+ // shift id and pid so that id.x[0] == 0. Then verify that
+ // id.x[i] == i
+ if (id.size > 0) {
+ imin = id.min_ind
+ idoffset = id.x[imin]
+ // is the first one the smallest?
+ if (id.x[0] != idoffset) {
+ err = 1
+printf("error %s lines %d and %d: id's %d and %d are not sequential\n", \
+ file.getname, iline.x[0], iline.x[imin], \
+ id.x[0], idoffset)
+ }
+ id.sub(idoffset)
+ pid.sub(idoffset)
+ }
+ ierr = 0
+ for i=0, id.size-1 {
+ if (id.x[i] != i ) {
+ err = 1
+printf("error %s line %d: id's shifted by %d are not sequential: id.x[%d] != %g\n", \
+ file.getname, iline.x[i], idoffset, i, id.x[i])
+ ierr += 1
+ }
+ if (ierr > 5) { break }
+ }
+}
+
+proc check_pid() {local i, ierr, needsort localobj tobj
+ // if all pid.x[i] < id.x[i] then we must be 1 or more trees with no loops
+ // Note: swcdata/*.swc conforms.
+ needsort = 0
+ ierr = 0
+ for i=0, id.size-1 {
+ if (i > 0) if (id.x[i] <= id.x[i-1]) { needsort = 1 }
+ if (pid.x[i] >= id.x[i]) {
+ err = 1
+printf("error %s line %d: index %d pid=%d is not less than id=%d\n",\
+ file.getname, iline.x[i], i, pid.x[i], id.x[i])
+ }
+ }
+ if (needsort) { // sort in id order
+ tobj = id.sortindex()
+ id.sortindex(id, tobj)
+ pid.sortindex(pid, tobj)
+ x.sortindex(x, tobj)
+ y.sortindex(y, tobj)
+ z.sortindex(z, tobj)
+ d.sortindex(diam, tobj)
+ iline.sortindex(iline, tobj)
+ }
+ // the number of trees is just the number of pid's < 0
+ // Note: swcdata/*.swc have only one tree
+ tobj = new Vector()
+ tobj.indvwhere(pid, "<", 0)
+ if (tobj.size > 1) {
+ err = 1
+
+ if (!quiet) {// added by Sergey to suppress the warning output
+
+printf("warning %s: more than one tree:\n", file.getname)
+ printf(" root at line:")
+ for i=0, tobj.size-1 {
+ printf(" %d,", iline.x[tobj.x[i]])
+ }
+ printf(" \n")
+ }// end of quiet
+ }
+ // check for duplicate id
+ for i=1, id.size-1 if (id.x[i] == id.x[i-1]) {
+ err = 1
+printf("error %s: duplicate id:\n", file.getname)
+printf(" %d: %s\n", iline.x[i-1], lines.o(iline.x[i-1]).s)
+printf(" %d: %s\n", iline.x[i], lines.o(iline.x[i]).s)
+ }
+ // create the id2index_ map
+ id2index_ = new Vector(id.max()+1)
+ id2index_.fill(-1)
+ for i=0, id.size-1 {
+ id2index_.x[id.x[i]] = i
+ }
+}
+
+proc sectionify() {local i, si localobj tobj
+ // create point2sec map and sections list
+ // point2sec gives immediate knowledge of the section a point is in
+ // sections list is for display purposes
+ if (id.size < 2) { return }
+
+ // tobj stores the number of child nodes with pid equal to i
+ // actually every non-contiguous child adds 1.01 and a contiguous
+ // child adds 1
+ mark_branch(tobj)
+
+ point2sec = new Vector(id.size)
+ // first point in the root and if only one point it will be interpreted
+ // as spherical.
+ point2sec.x[0] = 0
+ si = 0
+ for i=1, id.size-1 {
+ if (tobj.x[pix2ix(i)] > 1 || connect2prox.x[i]) {
+ si += 1
+ }
+ point2sec.x[i] = si
+ }
+ sec2point = new Vector(si)
+ tobj.x[0] = 1
+ sec2point.indvwhere(tobj, "!=", 1)
+ // sec2point.x[i] is the last point of section i
+ // 0 is the first point of section 0
+ // sec2point.x[i-1]+1 is the first point of section i
+}
+
+proc mark_branch() { local i, p
+ //$o1 is used to store the number of child nodes with pid equal to i
+ // actually add a bit more than 1
+ // if noncontiguous child and 1 if contiguous child
+ // this is the basic computation that defines sections, i.e.
+ // contiguous 1's with perhaps a final 0 (a leaf)
+ // As usual, the only ambiguity will be how to treat the soma
+
+ // Another wrinkle is that we do not want any sections that
+ // have multiple point types. E.g. point type 1 is often
+ // associated with the soma. Therefore we identify
+ // point type changes with branch points.
+
+ // however warn if the first two points do not have the same type
+ // if single point soma set the spherical_soma flag
+ if ( type.x[0] != type.x[1]) {
+ err = 1
+ if (0 && !quiet) {
+printf("\nNotice: %s:\nThe first two points have different types (%d and %d) but\n a single point NEURON section is not allowed.\n Interpreting the point as the center of a sphere of\n radius %g at location (%g, %g, %g)\n Will represent as 3-point cylinder with L=diam and with all\n children kept at their 1st point positions and connected\n with wire to middle point.\n If this is an incorrect guess, then change the file.\n"\
+, file.getname, type.x[0], type.x[1], d.x[0]/2, x.x[0], y.x[0], z.x[0])
+ }
+ }
+
+ // another wrinkle is that when a dendrite connects to the soma
+ // by a wire,
+ // another branch may connect to the first point of that dendrite
+ // In this case (to avoid single point sections)
+ // that branch should be connected to position 0
+ // of the dendrite (and the first point of that branch should be
+ // the same position as the first point of the dendrite.
+ // use connect2prox to indicate the parent point is not
+ // the distal end but the proximal end of the parent section
+ connect2prox = new Vector(id.size)
+
+ $o1 = new Vector(id.size)
+ for i=0, id.size-1 {
+ p = pix2ix(i)
+ if (p >= 0) {
+ $o1.x[p] += 1
+ if ( p != i-1) {
+ $o1.x[p] += .01
+//i noncontiguous with parent and
+// if parent is not soma and parent of parent is soma
+// then i appended to connect2prox
+if (p > 1) if (type.x[p] != 1 && type.x[pix2ix(p)] == 1) {
+ connect2prox.x[i] = 1
+ $o1.x[p] = 1 // p not treated as a 1pt section
+ err = 1
+ if (0 && !quiet) {
+printf("\nNotice: %s:\n %d parent is %d which is the proximal point of a section\n connected by a wire to the soma.\n The dendrite is being connected to\n the proximal end of the parent dendrite.\n If this is an incorrect guess, then change the file.\n"\
+, file.getname, id.x[i], id.x[p])
+ }
+}
+
+ }
+ if (type.x[p] != type.x[i]) {
+ // increment enough to get past 1
+ // so force end of section but
+ // not really a branch
+ $o1.x[p] += .01
+ }
+ }
+ }
+}
+
+proc mksections() {local i, j, isec, first localobj sec, psec, pts
+ sections = new List()
+ isec = 0
+ first = 0
+ for i=0, id.size-1 {
+ if (point2sec.x[i] > isec) {
+ mksection(isec, first, i)
+ isec += 1
+ first = i
+ }
+ }
+ mksection(isec, first, i)
+}
+
+proc mksection() { local i, isec, first localobj sec
+ isec = $1 first=$2 i=$3
+ if (isec > 0) {// branches have pid as first point
+ sec = new Import3d_Section(first, i-first+1)
+ pt2sec(pix2ix(first), sec.parentsec)
+ // but if the parent is the root and the branch has more than
+ // one point, then connect to center of root with wire
+ if (point2sec.x[pix2ix(first)] == 0 && i > 1) {
+ sec.parentx = 0.5
+ sec.first = 1
+ }else{
+ if (pix2ix(first) == 0) { sec.parentx = 0 }
+ }
+ sec.append(0, pix2ix(first), 1, x, y, z, d)
+ sec.append(1, first, i-first, x, y, z, d)
+ }else{// pid not first point in root section
+ sec = new Import3d_Section(first, i-first)
+ sec.append(0, first, i-first, x, y, z, d)
+ }
+ sec.type = type.x[first]
+ sections.append(sec)
+ if (object_id(sec.parentsec)) {
+ if (sec.parentsec.type == 1 && sec.type != 1) {
+ sec.d.x[0] = sec.d.x[1]
+ }
+ }
+ if (connect2prox.x[first]) {
+ sec.pid = sec.parentsec.id
+ sec.parentx = 0
+ }
+}
+
+func same() {
+ if ($2 < 0) return 0
+ if (x.x[$1] == x.x[$2]) {
+ if (y.x[$1] == y.x[$2]) {
+// if (z.x[$1] == z.x[$2]) {
+ return 1
+// }
+ }
+ }
+ return 0
+}
+
+proc instantiate() {local i, isec, psec, pp, si, px
+ if (id.size < 2) { return }
+
+ sprint(tstr, "~create K[%d]", sec2point.size)
+ execute(tstr)
+
+ // connect
+ for i = 2, id.size-1 {
+ if (point2sec.x[pix2ix(i)] == point2sec.x[i]) { continue }
+ if (pix2ix(i) == 0) { px = 0 } else { px = 1 }
+ sprint(tstr, "K[%d] connect K[%d](0), (%g)", \
+ point2sec.x[pix2ix(i)], point2sec.x[i], px)
+ execute(tstr)
+ }
+
+ // 3-d point info
+ // needs some thought with regard to interior duplicate
+ // points, and whether it is appropriate to make the first
+ // point in the section the same location and diam as the
+ // pid point
+ isec = 0
+ for i=0, id.size-1 {
+ if (point2sec.x[i] > isec ) { // in next section
+ ptadd(pix2ix(i), point2sec.x[i])
+ }
+ isec = point2sec.x[i]
+ ptadd(i, isec)
+ }
+}
+
+proc ptadd() {
+ sprint(tstr, "K[%d] { pt3dadd(%g, %g, %g, %g) }", \
+ $2, x.x[$1], y.x[$1], z.x[$1], d.x[$1])
+ execute(tstr)
+}
+
+proc label() {
+ sprint($s2, "Line %d: %s", iline.x[$1], lines.object($1).s)
+}
+func id2pt() {local i
+ if ($1 < 0) {
+ $1 = 0
+ }else if ( $1 > id2index_.size-1) {
+ $1 = id2index_.size-1
+ }
+ return id2index($1)
+}
+func id2line() { return $1 }
+func pt2id() { return id.x[$1] }
+func pt2sec() { local i,j //from selpoint
+ i = point2sec.x[$1]
+ $o2 = sections.object(i)
+ j = $1 - $o2.id
+ if (i > 0) { j += 1 }
+ return j
+}
+func sec2pt() {local i
+ i = sections.object($1).id + $2
+ if ($1 > 0) {
+ i -= 1
+ }
+ return i
+}
+proc mark() {local i
+ print $o1, $2, iline, lines
+ i = id2index($2)
+ printf("%d %d %g %g: %s\n", i, iline.x[i], x.x[i], y.x[i], lines.object(i).s)
+ $o1.mark(x.x[i], y.x[i], "S", 12, 4, 1)
+}
+
+proc helptxt() {
+ xpanel("SWC file filter characteristics")
+xlabel(" Sections consist of unbranched sequences of points having")
+xlabel("the same type. All sections connect from 0 to 1")
+xlabel("(except those connecting to the first point")
+xlabel("of the root section connect from 0 to 0).")
+xlabel("With one exception, all child sections have as their first pt3d")
+xlabel("point a copy of the parent point and the diameter of that first")
+xlabel("point is the diameter of the parent point")
+xlabel(" The exception, so that the error in area is not so")
+xlabel("egregious, is that dendrite branches that connect to the soma")
+xlabel("get a copy of the parent point as their first pt3d point but")
+xlabel("the diameter of that point is the diameter of the second point")
+xlabel(" The root section does not contain an extra parent point.")
+ xpanel(0)
+}
+endtemplate Import3d_SWC_read
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/io_tools.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/io_tools.py
new file mode 100644
index 0000000..2ae6289
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/io_tools.py
@@ -0,0 +1,38 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from neuron import h
+
+from bmtk.simulator.core.io_tools import IOUtils
+
+pc = h.ParallelContext()
+MPI_Rank = int(pc.id())
+MPI_Size = int(pc.nhost())
+
+
+class NEURONIOUtils(IOUtils):
+ def __init__(self):
+ super(NEURONIOUtils, self).__init__()
+ self.mpi_rank = MPI_Rank
+ self.mpi_size = MPI_Size
+
+io = NEURONIOUtils()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/__init__.py
new file mode 100644
index 0000000..7bb45dc
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/__init__.py
@@ -0,0 +1,27 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .ecp import EcpMod
+from .record_cellvars import MembraneReport, SomaReport, SectionReport
+from .record_spikes import SpikesMod
+from .xstim import XStimMod
+from .save_synapses import SaveSynapses
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/ecp.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/ecp.py
new file mode 100644
index 0000000..3ea1059
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/ecp.py
@@ -0,0 +1,275 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import h5py
+import math
+import pandas as pd
+from neuron import h
+import numpy as np
+
+from bmtk.simulator.bionet.modules.sim_module import SimulatorMod
+from bmtk.utils.sonata.utils import add_hdf5_magic, add_hdf5_version
+
+
+pc = h.ParallelContext()
+MPI_RANK = int(pc.id())
+N_HOSTS = int(pc.nhost())
+
+
+class EcpMod(SimulatorMod):
+ def __init__(self, tmp_dir, file_name, electrode_positions, contributions_dir, cells=[], variable_name='v',
+ electrode_channels=None):
+ self._ecp_output = file_name if os.path.isabs(file_name) else os.path.join(tmp_dir, file_name)
+ self._positions_file = electrode_positions
+ self._tmp_outputdir = tmp_dir
+ self._contributions_dir = contributions_dir if os.path.isabs(contributions_dir) else os.path.join(tmp_dir, contributions_dir)
+ self._cells = cells
+ self._rel = None
+ self._fih1 = None
+ self._rel_nsites = 0
+ self._block_size = 0
+ # self._biophys_gids = []
+ self._saved_gids = {}
+ self._nsteps = 0
+
+ self._tstep = 0 # accumlative time step
+ # self._rel_time = 0 #
+ self._block_step = 0 # time step within the given block of time
+ self._tstep_start_block = 0
+ self._data_block = None
+ self._cell_var_files = {}
+
+ self._tmp_ecp_file = self._get_tmp_fname(MPI_RANK)
+ self._tmp_ecp_handle = None
+ # self._tmp_ecp_dataset = None
+
+ self._local_gids = []
+
+ def _get_tmp_fname(self, rank):
+ return os.path.join(self._tmp_outputdir, 'tmp_{}_ecp.h5'.format(MPI_RANK))
+
+ def _create_ecp_file(self, sim):
+ dt = sim.dt
+ tstop = sim.tstop
+ self._nsteps = int(round(tstop/dt))
+
+ # create file to temporary store ecp data on each rank
+ self._tmp_ecp_handle = h5py.File(self._tmp_ecp_file, 'a')
+ self._tmp_ecp_handle.create_dataset('data', (self._nsteps, self._rel_nsites), maxshape=(None, self._rel_nsites),
+ chunks=True)
+
+ # only the primary node will need to save the final ecp
+ if MPI_RANK == 0:
+ with h5py.File(self._ecp_output, 'w') as f5:
+ add_hdf5_magic(f5)
+ add_hdf5_version(f5)
+ f5.create_dataset('data', (self._nsteps, self._rel_nsites), maxshape=(None, self._rel_nsites),
+ chunks=True)
+ f5.attrs['dt'] = dt
+ f5.attrs['tstart'] = 0.0
+ f5.attrs['tstop'] = tstop
+
+ # Save channels. Current we record from all channels, may want to be more selective in the future.
+ f5.create_dataset('channel_id', data=np.arange(self._rel.nsites))
+
+ pc.barrier()
+
+ def _create_cell_file(self, gid):
+ file_name = os.path.join(self._contributions_dir, '{}.h5'.format(int(gid)))
+ file_h5 = h5py.File(file_name, 'a')
+ self._cell_var_files[gid] = file_h5
+ file_h5.create_dataset('data', (self._nsteps, self._rel_nsites), maxshape=(None, self._rel_nsites), chunks=True)
+ # self._cell_var_files[gid] = file_h5['ecp']
+
+ def _calculate_ecp(self, sim):
+ self._rel = RecXElectrode(self._positions_file)
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+ #cell = sim.net.get_local_cell(gid)
+ # cell = sim.net.cells[gid]
+ self._rel.calc_transfer_resistance(gid, cell.get_seg_coords())
+
+ self._rel_nsites = self._rel.nsites
+ sim.h.cvode.use_fast_imem(1) # make i_membrane_ a range variable
+
+ def set_pointers():
+ for gid, cell in sim.net.get_local_cells().items():
+ #for gid, cell in sim.net.local_cells.items():
+ # for gid, cell in sim.net.cells.items():
+ cell.set_im_ptr()
+ self._fih1 = sim.h.FInitializeHandler(0, set_pointers)
+
+ def _save_block(self, interval):
+ """Add """
+ itstart, itend = interval
+ self._tmp_ecp_handle['data'][itstart:itend, :] += self._data_block[0:(itend - itstart), :]
+ self._tmp_ecp_handle.flush()
+ self._data_block[:] = 0.0
+
+ def _save_ecp(self, sim):
+ """Save ECP from each rank to disk into a single file"""
+ block_size = sim.nsteps_block
+ nblocks, remain = divmod(self._nsteps, block_size)
+ ivals = [i*block_size for i in range(nblocks+1)]
+ if remain != 0:
+ ivals.append(self._nsteps)
+
+ for rank in range(N_HOSTS): # iterate over the ranks
+ if rank == MPI_RANK: # wait until finished with a particular rank
+ with h5py.File(self._ecp_output, 'a') as ecp_f5:
+ for i in range(len(ivals)-1):
+ ecp_f5['data'][ivals[i]:ivals[i+1], :] += self._tmp_ecp_handle['data'][ivals[i]:ivals[i+1], :]
+
+ pc.barrier()
+
+ def _save_cell_vars(self, interval):
+ itstart, itend = interval
+
+ for gid, data in self._saved_gids.items():
+ h5_file = self._cell_var_files[gid]
+ h5_file['data'][itstart:itend, :] = data[0:(itend-itstart), :]
+ h5_file.flush()
+ data[:] = 0.0
+
+ def _delete_tmp_files(self):
+ if os.path.exists(self._tmp_ecp_file):
+ os.remove(self._tmp_ecp_file)
+
+ def initialize(self, sim):
+ if self._contributions_dir and (not os.path.exists(self._contributions_dir)) and MPI_RANK == 0:
+ os.makedirs(self._contributions_dir)
+ pc.barrier()
+
+ self._block_size = sim.nsteps_block
+
+ # Get list of gids being recorded
+ selected_gids = set(sim.net.get_node_set(self._cells).gids())
+ self._local_gids = list(set(sim.biophysical_gids) & selected_gids)
+
+ self._calculate_ecp(sim)
+ self._create_ecp_file(sim)
+
+ # ecp data
+ self._data_block = np.zeros((self._block_size, self._rel_nsites))
+
+ # create list of all cells whose ecp values will be saved separetly
+ self._saved_gids = {gid: np.empty((self._block_size, self._rel_nsites))
+ for gid in self._local_gids}
+ for gid in self._saved_gids.keys():
+ self._create_cell_file(gid)
+
+ pc.barrier()
+
+ def step(self, sim, tstep):
+ for gid in self._local_gids: # compute ecp only from the biophysical cells
+ cell = sim.net.get_cell_gid(gid)
+ #cell = sim.net.get_local_cell(gid)
+ # cell = sim.net.cells[gid]
+ im = cell.get_im()
+ tr = self._rel.get_transfer_resistance(gid)
+ ecp = np.dot(tr, im)
+
+ if gid in self._saved_gids.keys():
+ # save individual contribution
+ self._saved_gids[gid][self._block_step, :] = ecp
+
+ # add to total ecp contribution
+ self._data_block[self._block_step, :] += ecp
+
+ self._block_step += 1
+
+ def block(self, sim, block_interval):
+ self._save_block(block_interval)
+ # self._save_ecp(block_interval)
+ self._save_cell_vars(block_interval)
+
+ self._block_step = 0
+ self._tstep_start_block = self._tstep
+
+ def finalize(self, sim):
+ if self._block_step > 0:
+ # just in case the simulation doesn't end on a block step
+ self.block(sim, (sim.n_steps - self._block_step, sim.n_steps))
+
+ self._save_ecp(sim)
+ self._delete_tmp_files()
+ pc.barrier()
+
+
+class RecXElectrode(object):
+ """Extracellular electrode
+
+ """
+
+ def __init__(self, positions):
+ """Create an array"""
+ # self.conf = conf
+ electrode_file = positions # self.conf["recXelectrode"]["positions"]
+
+ # convert coordinates to ndarray, The first index is xyz and the second is the channel number
+ el_df = pd.read_csv(electrode_file, sep=' ')
+ self.pos = el_df[['x_pos', 'y_pos', 'z_pos']].T.values
+ #self.pos = el_df.as_matrix(columns=['x_pos', 'y_pos', 'z_pos']).T
+ self.nsites = self.pos.shape[1]
+ # self.conf['run']['nsites'] = self.nsites # add to the config
+ self.transfer_resistances = {} # V_e = transfer_resistance*Im
+
+ def drift(self):
+ # will include function to model electrode drift
+ pass
+
+ def get_transfer_resistance(self, gid):
+ return self.transfer_resistances[gid]
+
+ def calc_transfer_resistance(self, gid, seg_coords):
+ """Precompute mapping from segment to electrode locations"""
+ sigma = 0.3 # mS/mm
+
+ r05 = (seg_coords['p0'] + seg_coords['p1']) / 2
+ dl = seg_coords['p1'] - seg_coords['p0']
+
+ nseg = r05.shape[1]
+
+ tr = np.zeros((self.nsites, nseg))
+
+ for j in range(self.nsites): # calculate mapping for each site on the electrode
+ rel = np.expand_dims(self.pos[:, j], axis=1) # coordinates of a j-th site on the electrode
+ rel_05 = rel - r05 # distance between electrode and segment centers
+
+ # compute dot product column-wise, the resulting array has as many columns as original
+ r2 = np.einsum('ij,ij->j', rel_05, rel_05)
+
+ # compute dot product column-wise, the resulting array has as many columns as original
+ rlldl = np.einsum('ij,ij->j', rel_05, dl)
+ dlmag = np.linalg.norm(dl, axis=0) # length of each segment
+ rll = abs(rlldl / dlmag) # component of r parallel to the segment axis it must be always positive
+ rT2 = r2 - rll ** 2 # square of perpendicular component
+ up = rll + dlmag / 2
+ low = rll - dlmag / 2
+ num = up + np.sqrt(up ** 2 + rT2)
+ den = low + np.sqrt(low ** 2 + rT2)
+ tr[j, :] = np.log(num / den) / dlmag # units of (um) use with im_ (total seg current)
+ np.copyto(tr[j, :], 0, where=(dlmag == 0)) # zero out stub segments
+
+ tr *= 1 / (4 * math.pi * sigma)
+ self.transfer_resistances[gid] = tr
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/record_cellvars.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/record_cellvars.py
new file mode 100644
index 0000000..5a1d3a3
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/record_cellvars.py
@@ -0,0 +1,204 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import h5py
+from neuron import h
+
+from bmtk.simulator.bionet.modules.sim_module import SimulatorMod
+from bmtk.simulator.bionet.io_tools import io
+
+from bmtk.utils.io import cell_vars
+try:
+ # Check to see if h5py is built to run in parallel
+ if h5py.get_config().mpi:
+ MembraneRecorder = cell_vars.CellVarRecorderParallel
+ else:
+ MembraneRecorder = cell_vars.CellVarRecorder
+
+except Exception as e:
+ MembraneRecorder = cell_vars.CellVarRecorder
+
+MembraneRecorder._io = io
+
+pc = h.ParallelContext()
+MPI_RANK = int(pc.id())
+N_HOSTS = int(pc.nhost())
+
+
+def first_element(lst):
+ return lst[0]
+
+
+transforms_table = {
+ 'first_element': first_element,
+}
+
+
+class MembraneReport(SimulatorMod):
+ def __init__(self, tmp_dir, file_name, variable_name, cells, sections='all', buffer_data=True, transform={}):
+ """Module used for saving NEURON cell properities at each given step of the simulation.
+
+ :param tmp_dir:
+ :param file_name: name of h5 file to save variable.
+ :param variables: list of cell variables to record
+ :param gids: list of gids to to record
+ :param sections:
+ :param buffer_data: Set to true then data will be saved to memory until written to disk during each block, reqs.
+ more memory but faster. Set to false and data will be written to disk on each step (default: True)
+ """
+ self._all_variables = list(variable_name)
+ self._variables = list(variable_name)
+ self._transforms = {}
+ # self._special_variables = []
+ for var_name, fnc_name in transform.items():
+ if fnc_name is None or len(fnc_name) == 0:
+ del self._transforms[var_name]
+ continue
+
+ fnc = transforms_table[fnc_name]
+ self._transforms[var_name] = fnc
+ self._variables.remove(var_name)
+
+ self._tmp_dir = tmp_dir
+
+ self._file_name = file_name if os.path.isabs(file_name) else os.path.join(tmp_dir, file_name)
+ self._all_gids = cells
+ self._local_gids = []
+ self._sections = sections
+
+ self._var_recorder = MembraneRecorder(self._file_name, self._tmp_dir, self._all_variables,
+ buffer_data=buffer_data, mpi_rank=MPI_RANK, mpi_size=N_HOSTS)
+
+ self._gid_list = [] # list of all gids that will have their variables saved
+ self._data_block = {} # table of variable data indexed by [gid][variable]
+ self._block_step = 0 # time step within a given block
+
+ def _get_gids(self, sim):
+ # get list of gids to save. Will only work for biophysical cells saved on the current MPI rank
+ selected_gids = set(sim.net.get_node_set(self._all_gids).gids())
+ self._local_gids = list(set(sim.biophysical_gids) & selected_gids)
+
+ def _save_sim_data(self, sim):
+ self._var_recorder.tstart = 0.0
+ self._var_recorder.tstop = sim.tstop
+ self._var_recorder.dt = sim.dt
+
+ def initialize(self, sim):
+ self._get_gids(sim)
+ self._save_sim_data(sim)
+
+ # TODO: get section by name and/or list of section ids
+ # Build segment/section list
+ for gid in self._local_gids:
+ sec_list = []
+ seg_list = []
+ cell = sim.net.get_cell_gid(gid)
+ cell.store_segments()
+ for sec_id, sec in enumerate(cell.get_sections()):
+ for seg in sec:
+ # TODO: Make sure the seg has the recorded variable(s)
+ sec_list.append(sec_id)
+ seg_list.append(seg.x)
+
+ self._var_recorder.add_cell(gid, sec_list, seg_list)
+
+ self._var_recorder.initialize(sim.n_steps, sim.nsteps_block)
+
+ def step(self, sim, tstep):
+ # save all necessary cells/variables at the current time-step into memory
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+
+ for var_name in self._variables:
+ seg_vals = [getattr(seg, var_name) for seg in cell.get_segments()]
+ self._var_recorder.record_cell(gid, var_name, seg_vals, tstep)
+
+ for var_name, fnc in self._transforms.items():
+ seg_vals = [fnc(getattr(seg, var_name)) for seg in cell.get_segments()]
+ self._var_recorder.record_cell(gid, var_name, seg_vals, tstep)
+
+ self._block_step += 1
+
+ def block(self, sim, block_interval):
+ # write variables in memory to file
+ self._var_recorder.flush()
+
+ def finalize(self, sim):
+ # TODO: Build in mpi signaling into var_recorder
+ pc.barrier()
+ self._var_recorder.close()
+
+ pc.barrier()
+ self._var_recorder.merge()
+
+
+class SomaReport(MembraneReport):
+ """Special case for when only needing to save the soma variable"""
+ def __init__(self, tmp_dir, file_name, variable_name, cells, sections='soma', buffer_data=True, transform={}):
+ super(SomaReport, self).__init__(tmp_dir=tmp_dir, file_name=file_name, variable_name=variable_name, cells=cells,
+ sections=sections, buffer_data=buffer_data, transform=transform)
+
+ def initialize(self, sim):
+ self._get_gids(sim)
+ self._save_sim_data(sim)
+
+ for gid in self._local_gids:
+ self._var_recorder.add_cell(gid, [0], [0.5])
+ self._var_recorder.initialize(sim.n_steps, sim.nsteps_block)
+
+ def step(self, sim, tstep, rel_time=0.0):
+ # save all necessary cells/variables at the current time-step into memory
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+ for var_name in self._variables:
+ var_val = getattr(cell.hobj.soma[0](0.5), var_name)
+ self._var_recorder.record_cell(gid, var_name, [var_val], tstep)
+
+ for var_name, fnc in self._transforms.items():
+ var_val = getattr(cell.hobj.soma[0](0.5), var_name)
+ new_val = fnc(var_val)
+ self._var_recorder.record_cell(gid, var_name, [new_val], tstep)
+
+ self._block_step += 1
+
+class SectionReport(MembraneReport):
+ """For variables like im which have one value per section, not segment"""
+
+ def initialize(self, sim):
+ self._get_gids(sim)
+ self._save_sim_data(sim)
+
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+ sec_list = range(len(cell.get_sections()))
+ self._var_recorder.add_cell(gid, sec_list, sec_list)
+
+ self._var_recorder.initialize(sim.n_steps, sim.nsteps_block)
+
+ def step(self, sim, tstep):
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+ im_vals = cell.get_im()
+ self._var_recorder.record_cell(gid, 'im', im_vals, tstep)
+
+ self._block_step += 1
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/record_spikes.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/record_spikes.py
new file mode 100644
index 0000000..4c8751b
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/record_spikes.py
@@ -0,0 +1,94 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import csv
+import h5py
+import numpy as np
+from bmtk.simulator.bionet.modules.sim_module import SimulatorMod
+from bmtk.utils.io.spike_trains import SpikeTrainWriter
+
+from neuron import h
+
+
+pc = h.ParallelContext()
+MPI_RANK = int(pc.id())
+N_HOSTS = int(pc.nhost())
+
+
+class SpikesMod(SimulatorMod):
+ """Module use for saving spikes
+
+ """
+
+ def __init__(self, tmp_dir, spikes_file_csv=None, spikes_file=None, spikes_file_nwb=None, spikes_sort_order=None):
+ # TODO: Have option to turn off caching spikes to csv.
+ def _file_path(file_name):
+ if file_name is None:
+ return None
+ return file_name if os.path.isabs(file_name) else os.path.join(tmp_dir, file_name)
+
+ self._csv_fname = _file_path(spikes_file_csv)
+ self._save_csv = spikes_file_csv is not None
+
+ self._h5_fname = _file_path(spikes_file)
+ self._save_h5 = spikes_file is not None
+
+ self._nwb_fname = _file_path(spikes_file_nwb)
+ self._save_nwb = spikes_file_nwb is not None
+
+ self._tmpdir = tmp_dir
+ self._sort_order = spikes_sort_order
+
+ self._spike_writer = SpikeTrainWriter(tmp_dir=tmp_dir, mpi_rank=MPI_RANK, mpi_size=N_HOSTS)
+
+ def initialize(self, sim):
+ # TODO: since it's possible that other modules may need to access spikes, set_spikes_recordings() should
+ # probably be called in the simulator itself.
+ sim.set_spikes_recording()
+
+ def block(self, sim, block_interval):
+ # take spikes from Simulator spikes vector and save to the tmp file
+ for gid, tVec in sim.spikes_table.items():
+ for t in tVec:
+ self._spike_writer.add_spike(time=t, gid=gid)
+
+ pc.barrier() # wait until all ranks have been saved
+ sim.set_spikes_recording() # reset recording vector
+
+ def finalize(self, sim):
+ self._spike_writer.flush()
+ pc.barrier()
+
+ if self._save_csv:
+ self._spike_writer.to_csv(self._csv_fname, sort_order=self._sort_order)
+ pc.barrier()
+
+ if self._save_h5:
+ self._spike_writer.to_hdf5(self._h5_fname, sort_order=self._sort_order)
+ pc.barrier()
+
+ if self._save_nwb:
+ self._spike_writer.to_nwb(self._nwb_fname, sort_order=self._sort_order)
+ pc.barrier()
+
+ self._spike_writer.close()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/save_synapses.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/save_synapses.py
new file mode 100644
index 0000000..396aa7d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/save_synapses.py
@@ -0,0 +1,235 @@
+import os
+import csv
+import h5py
+import numpy as np
+from neuron import h
+
+from .sim_module import SimulatorMod
+from bmtk.simulator.bionet.biocell import BioCell
+from bmtk.simulator.bionet.io_tools import io
+from bmtk.simulator.bionet.pointprocesscell import PointProcessCell
+
+
+pc = h.ParallelContext()
+MPI_RANK = int(pc.id())
+N_HOSTS = int(pc.nhost())
+
+
+class SaveSynapses(SimulatorMod):
+ def __init__(self, network_dir, single_file=False, **params):
+ self._network_dir = network_dir
+ self._virt_lookup = {}
+ self._gid_lookup = {}
+ self._sec_lookup = {}
+ if not os.path.exists(network_dir):
+ os.makedirs(network_dir)
+
+ if N_HOSTS > 1:
+ io.log_exception('save_synapses module is not current supported with mpi')
+
+ self._syn_writer = ConnectionWriter(network_dir)
+
+ def _print_nc(self, nc, src_nid, trg_nid, cell, src_pop, trg_pop, edge_type_id):
+ if isinstance(cell, BioCell):
+ sec_x = nc.postloc()
+ sec = h.cas()
+ sec_id = self._sec_lookup[cell.gid][sec] #cell.get_section_id(sec)
+ h.pop_section()
+ self._syn_writer.add_bio_conn(edge_type_id, src_nid, src_pop, trg_nid, trg_pop, nc.weight[0], sec_id, sec_x)
+ # print '{} ({}) <-- {} ({}), {}, {}, {}, {}'.format(trg_nid, trg_pop, src_nid, src_pop, nc.weight[0], nc.delay, sec_id, sec_x)
+
+ else:
+ self._syn_writer.add_point_conn(edge_type_id, src_nid, src_pop, trg_nid, trg_pop, nc.weight[0])
+ #print '{} ({}) <-- {} ({}), {}, {}'.format(trg_nid, trg_pop, src_nid, src_pop, nc.weight[0], nc.delay)
+
+
+ def initialize(self, sim):
+ io.log_info('Saving network connections. This may take a while.')
+
+ # Need a way to look up virtual nodes from nc.pre()
+ for pop_name, nodes_table in sim.net._virtual_nodes.items():
+ for node_id, virt_node in nodes_table.items():
+ self._virt_lookup[virt_node.hobj] = (pop_name, node_id)
+
+ # Need to figure out node_id and pop_name from nc.srcgid()
+ for node_pop in sim.net.node_populations:
+ pop_name = node_pop.name
+ for node in node_pop[0::1]:
+ if node.model_type != 'virtual':
+ self._gid_lookup[node.gid] = (pop_name, node.node_id)
+
+ for gid, cell in sim.net.get_local_cells().items():
+ trg_pop, trg_id = self._gid_lookup[gid]
+ if isinstance(cell, BioCell):
+ #from pprint import pprint
+ #pprint({i: s_name for i, s_name in enumerate(cell.get_sections())})
+ #exit()
+ # sections = cell._syn_seg_ix
+ self._sec_lookup[gid] = {sec_name: sec_id for sec_id, sec_name in enumerate(cell.get_sections_id())}
+
+ else:
+ sections = [-1]*len(cell.netcons)
+
+ for nc, edge_type_id in zip(cell.netcons, cell._edge_type_ids):
+ src_gid = int(nc.srcgid())
+ if src_gid == -1:
+ # source is a virtual node
+ src_pop, src_id = self._virt_lookup[nc.pre()]
+ else:
+ src_pop, src_id = self._gid_lookup[src_gid]
+
+ self._print_nc(nc, src_id, trg_id, cell, src_pop, trg_pop, edge_type_id)
+
+ self._syn_writer.close()
+ io.log_info(' Done saving network connections.')
+
+
+class ConnectionWriter(object):
+ class H5Index(object):
+ def __init__(self, network_dir, src_pop, trg_pop):
+ # TODO: Merge with NetworkBuilder code for building SONATA files
+ self._nsyns = 0
+ self._n_biosyns = 0
+ self._n_pointsyns = 0
+ self._block_size = 5
+
+ self._pop_name = '{}_{}'.format(src_pop, trg_pop)
+ self._h5_file = h5py.File(os.path.join(network_dir, '{}_edges.h5'.format(self._pop_name)), 'w')
+ self._pop_root = self._h5_file.create_group('/edges/{}'.format(self._pop_name))
+ self._pop_root.create_dataset('edge_group_id', (self._block_size, ), dtype=np.uint16,
+ chunks=(self._block_size, ), maxshape=(None, ))
+ self._pop_root.create_dataset('source_node_id', (self._block_size, ), dtype=np.uint64,
+ chunks=(self._block_size, ), maxshape=(None, ))
+ self._pop_root['source_node_id'].attrs['node_population'] = src_pop
+ self._pop_root.create_dataset('target_node_id', (self._block_size, ), dtype=np.uint64,
+ chunks=(self._block_size, ), maxshape=(None, ))
+ self._pop_root['target_node_id'].attrs['node_population'] = trg_pop
+ self._pop_root.create_dataset('edge_type_id', (self._block_size, ), dtype=np.uint32,
+ chunks=(self._block_size, ), maxshape=(None, ))
+ self._pop_root.create_dataset('0/syn_weight', (self._block_size, ), dtype=np.float,
+ chunks=(self._block_size, ), maxshape=(None, ))
+ self._pop_root.create_dataset('0/sec_id', (self._block_size, ), dtype=np.uint64,
+ chunks=(self._block_size, ), maxshape=(None, ))
+ self._pop_root.create_dataset('0/sec_x', (self._block_size, ), chunks=(self._block_size, ),
+ maxshape=(None, ), dtype=np.float)
+ self._pop_root.create_dataset('1/syn_weight', (self._block_size, ), dtype=np.float,
+ chunks=(self._block_size, ), maxshape=(None, ))
+
+ def _add_conn(self, edge_type_id, src_id, trg_id, grp_id):
+ self._pop_root['edge_type_id'][self._nsyns] = edge_type_id
+ self._pop_root['source_node_id'][self._nsyns] = src_id
+ self._pop_root['target_node_id'][self._nsyns] = trg_id
+ self._pop_root['edge_group_id'][self._nsyns] = grp_id
+
+ self._nsyns += 1
+ if self._nsyns % self._block_size == 0:
+ self._pop_root['edge_type_id'].resize((self._nsyns + self._block_size,))
+ self._pop_root['source_node_id'].resize((self._nsyns + self._block_size, ))
+ self._pop_root['target_node_id'].resize((self._nsyns + self._block_size, ))
+ self._pop_root['edge_group_id'].resize((self._nsyns + self._block_size, ))
+
+ def add_bio_conn(self, edge_type_id, src_id, trg_id, syn_weight, sec_id, sec_x):
+ self._add_conn(edge_type_id, src_id, trg_id, 0)
+ self._pop_root['0/syn_weight'][self._n_biosyns] = syn_weight
+ self._pop_root['0/sec_id'][self._n_biosyns] = sec_id
+ self._pop_root['0/sec_x'][self._n_biosyns] = sec_x
+
+ self._n_biosyns += 1
+ if self._n_biosyns % self._block_size == 0:
+ self._pop_root['0/syn_weight'].resize((self._n_biosyns + self._block_size, ))
+ self._pop_root['0/sec_id'].resize((self._n_biosyns + self._block_size, ))
+ self._pop_root['0/sec_x'].resize((self._n_biosyns + self._block_size, ))
+
+ def add_point_conn(self, edge_type_id, src_id, trg_id, syn_weight):
+ self._add_conn(edge_type_id, src_id, trg_id, 1)
+ self._pop_root['1/syn_weight'][self._n_pointsyns] = syn_weight
+
+ self._n_pointsyns += 1
+ if self._n_pointsyns % self._block_size == 0:
+ self._pop_root['1/syn_weight'].resize((self._n_pointsyns + self._block_size, ))
+
+ def clean_ends(self):
+ self._pop_root['source_node_id'].resize((self._nsyns,))
+ self._pop_root['target_node_id'].resize((self._nsyns,))
+ self._pop_root['edge_group_id'].resize((self._nsyns,))
+ self._pop_root['edge_type_id'].resize((self._nsyns,))
+
+ self._pop_root['0/syn_weight'].resize((self._n_biosyns,))
+ self._pop_root['0/sec_id'].resize((self._n_biosyns,))
+ self._pop_root['0/sec_x'].resize((self._n_biosyns,))
+
+ self._pop_root['1/syn_weight'].resize((self._n_pointsyns,))
+
+ eg_ds = self._pop_root.create_dataset('edge_group_index', (self._nsyns, ), dtype=np.uint64)
+ bio_count, point_count = 0, 0
+ for idx, grp_id in enumerate(self._pop_root['edge_group_id']):
+ if grp_id == 0:
+ eg_ds[idx] = bio_count
+ bio_count += 1
+ elif grp_id == 1:
+ eg_ds[idx] = point_count
+ point_count += 1
+
+ self._create_index('target')
+
+ def _create_index(self, index_type='target'):
+ if index_type == 'target':
+ edge_nodes = np.array(self._pop_root['target_node_id'], dtype=np.int64)
+ output_grp = self._pop_root.create_group('indicies/target_to_source')
+ elif index_type == 'source':
+ edge_nodes = np.array(self._pop_root['source_node_id'], dtype=np.int64)
+ output_grp = self._pop_root.create_group('indicies/source_to_target')
+
+ edge_nodes = np.append(edge_nodes, [-1])
+ n_targets = np.max(edge_nodes)
+ ranges_list = [[] for _ in xrange(n_targets + 1)]
+
+ n_ranges = 0
+ begin_index = 0
+ cur_trg = edge_nodes[begin_index]
+ for end_index, trg_gid in enumerate(edge_nodes):
+ if cur_trg != trg_gid:
+ ranges_list[cur_trg].append((begin_index, end_index))
+ cur_trg = int(trg_gid)
+ begin_index = end_index
+ n_ranges += 1
+
+ node_id_to_range = np.zeros((n_targets + 1, 2))
+ range_to_edge_id = np.zeros((n_ranges, 2))
+ range_index = 0
+ for node_index, trg_ranges in enumerate(ranges_list):
+ if len(trg_ranges) > 0:
+ node_id_to_range[node_index, 0] = range_index
+ for r in trg_ranges:
+ range_to_edge_id[range_index, :] = r
+ range_index += 1
+ node_id_to_range[node_index, 1] = range_index
+
+ output_grp.create_dataset('range_to_edge_id', data=range_to_edge_id, dtype='uint64')
+ output_grp.create_dataset('node_id_to_range', data=node_id_to_range, dtype='uint64')
+
+ def __init__(self, network_dir):
+ self._network_dir = network_dir
+ self._pop_groups = {}
+
+ def _group_key(self, src_pop, trg_pop):
+ return (src_pop, trg_pop)
+
+ def _get_edge_group(self, src_pop, trg_pop):
+ grp_key = self._group_key(src_pop, trg_pop)
+ if grp_key not in self._pop_groups:
+ self._pop_groups[grp_key] = self.H5Index(self._network_dir, src_pop, trg_pop)
+
+ return self._pop_groups[grp_key]
+
+ def add_bio_conn(self, edge_type_id, src_id, src_pop, trg_id, trg_pop, syn_weight, sec_id, sec_x):
+ h5_grp = self._get_edge_group(src_pop, trg_pop)
+ h5_grp.add_bio_conn(edge_type_id, src_id, trg_id, syn_weight, sec_id, sec_x)
+
+ def add_point_conn(self, edge_type_id, src_id, src_pop, trg_id, trg_pop, syn_weight):
+ h5_grp = self._get_edge_group(src_pop, trg_pop)
+ h5_grp.add_point_conn(edge_type_id, src_id, trg_id, syn_weight)
+
+ def close(self):
+ for _, h5index in self._pop_groups.items():
+ h5index.clean_ends()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/sim_module.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/sim_module.py
new file mode 100644
index 0000000..f04e469
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/sim_module.py
@@ -0,0 +1,72 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+class SimulatorMod(object):
+ """Class for writing custom bionet functions that will be called during the simulation. To use overwrite one or
+ more of the following methods in a subclass, and bionet will call the function at the appropiate time.
+
+ To call during a simulation:
+ ...
+ sim = Simulation(...)
+ mymod = MyModule(...)
+ sim.add_mod(mymod)
+ sim.run()
+
+ """
+
+ def initialize(self, sim):
+ """Will be called once at the beginning of the simulation run, after the network and simulation parameters have
+ all been finalized.
+
+ :param sim: Simulation object
+ """
+ pass
+
+ def step(self, sim, tstep):
+ """Called on every single time step (dt).
+
+ The step method is used for anything that should be recorded or changed continously. dt is determined during
+ the setup, and the sim parameter can be used to access simulation, network and individual cell properties
+
+ :param sim: Simulation object.
+ :param tstep: The decrete time-step
+ """
+ pass
+
+ def block(self, sim, block_interval):
+ """This method is called once after every block of time, as specified by the configuration.
+
+ Unlike the step method which is called during every time-step, the block method will typically be called only a
+ few times over the entire simulation. The block method is preferable for accessing and saving to the disk,
+ summing up existing data, or simular functionality
+
+ :param sim: Simulation object
+ :param block_interval: The time interval (tstep_start, tstep_end) for which the block is being called on.
+ """
+ pass
+
+ def finalize(self, sim):
+ """Call once at the very end of the simulation.
+
+ :param sim: Simulation object
+ """
+ pass
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/xstim.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/xstim.py
new file mode 100644
index 0000000..f2192ff
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/xstim.py
@@ -0,0 +1,163 @@
+import os
+import math
+import pandas as pd
+import numpy as np
+import six
+from neuron import h
+
+from bmtk.simulator.bionet.modules.sim_module import SimulatorMod
+from bmtk.simulator.bionet.modules.xstim_waveforms import stimx_waveform_factory
+from bmtk.simulator.bionet.utils import rotation_matrix
+from bmtk.simulator.bionet.io_tools import io
+
+
+class XStimMod(SimulatorMod):
+ def __init__(self, positions_file, waveform, mesh_files_dir=None, cells=None, set_nrn_mechanisms=True,
+ node_set=None):
+ self._positions_file = positions_file
+ self._mesh_files_dir = mesh_files_dir if mesh_files_dir is not None \
+ else os.path.dirname(os.path.realpath(self._positions_file))
+
+ self._waveform = waveform # TODO: Check if waveform is a file or dict and load it appropiately
+
+ self._set_nrn_mechanisms = set_nrn_mechanisms
+ self._electrode = None
+ self._cells = cells
+ self._local_gids = []
+ self._fih = None
+
+ #def __set_extracellular_mechanism(self):
+ # for gid in self._local_gids:
+
+ def initialize(self, sim):
+ if self._cells is None:
+ # if specific gids not listed just get all biophysically detailed cells on this rank
+ self._local_gids = sim.biophysical_gids
+ else:
+ # get subset of selected gids only on this rank
+ self._local_gids = list(set(sim.local_gids) & set(self._all_gids))
+
+ self._electrode = StimXElectrode(self._positions_file, self._waveform, self._mesh_files_dir, sim.dt)
+ for gid in self._local_gids:
+ # cell = sim.net.get_local_cell(gid)
+ cell = sim.net.get_cell_gid(gid)
+ cell.setup_xstim(self._set_nrn_mechanisms)
+ self._electrode.set_transfer_resistance(gid, cell.get_seg_coords())
+
+ def set_pointers():
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+ #cell = sim.net.get_local_cell(gid)
+ cell.set_ptr2e_extracellular()
+
+ self._fih = sim.h.FInitializeHandler(0, set_pointers)
+
+ def step(self, sim, tstep):
+ for gid in self._local_gids:
+ cell = sim.net.get_cell_gid(gid)
+ # Use tstep +1 to match isee-engine existing results. This will make it so that it begins a step earlier
+ # than if using just tstep.
+ self._electrode.calculate_waveforms(tstep+1)
+ vext_vec = self._electrode.get_vext(gid)
+ cell.set_e_extracellular(vext_vec)
+
+
+class StimXElectrode(object):
+ """
+ Extracellular Stimulating electrode
+ """
+ def __init__(self, positions_file, waveform, mesh_files_dir, dt):
+ self._dt = dt
+ self._mesh_files_dir = mesh_files_dir
+
+ stimelectrode_position_df = pd.read_csv(positions_file, sep=' ')
+
+ self.elmesh_files = stimelectrode_position_df['electrode_mesh_file']
+ self.elpos = stimelectrode_position_df[['pos_x', 'pos_y', 'pos_z']].T.values
+ self.elrot = stimelectrode_position_df[['rotation_x', 'rotation_y', 'rotation_z']].values
+ self.elnsites = self.elpos.shape[1] # Number of electrodes in electrode file
+ self.waveform = stimx_waveform_factory(waveform)
+
+ self.trans_X = {} # mapping segment coordinates
+ self.waveform_amplitude = []
+ self.el_mesh = {}
+ self.el_mesh_size = []
+
+ self.read_electrode_mesh()
+ self.rotate_the_electrodes()
+ self.place_the_electrodes()
+
+ def read_electrode_mesh(self):
+ el_counter = 0
+ for mesh_file in self.elmesh_files:
+ file_path = mesh_file if os.path.isabs(mesh_file) else os.path.join(self._mesh_files_dir, mesh_file)
+ mesh = pd.read_csv(file_path, sep=" ")
+ mesh_size = mesh.shape[0]
+ self.el_mesh_size.append(mesh_size)
+
+ self.el_mesh[el_counter] = np.zeros((3, mesh_size))
+ self.el_mesh[el_counter][0] = mesh['x_pos']
+ self.el_mesh[el_counter][1] = mesh['y_pos']
+ self.el_mesh[el_counter][2] = mesh['z_pos']
+ el_counter += 1
+
+ def place_the_electrodes(self):
+
+ transfer_vector = np.zeros((self.elnsites, 3))
+
+ for el in range(self.elnsites):
+ mesh_mean = np.mean(self.el_mesh[el], axis=1)
+ transfer_vector[el] = self.elpos[:, el] - mesh_mean[:]
+
+ for el in range(self.elnsites):
+ new_mesh = self.el_mesh[el].T + transfer_vector[el]
+ self.el_mesh[el] = new_mesh.T
+
+ def rotate_the_electrodes(self):
+ for el in range(self.elnsites):
+ phi_x = self.elrot[el][0]
+ phi_y = self.elrot[el][1]
+ phi_z = self.elrot[el][2]
+
+ rot_x = rotation_matrix([1, 0, 0], phi_x)
+ rot_y = rotation_matrix([0, 1, 0], phi_y)
+ rot_z = rotation_matrix([0, 0, 1], phi_z)
+ rot_xy = rot_x.dot(rot_y)
+ rot_xyz = rot_xy.dot(rot_z)
+ new_mesh = np.dot(rot_xyz, self.el_mesh[el])
+ self.el_mesh[el] = new_mesh
+
+ def set_transfer_resistance(self, gid, seg_coords):
+
+ rho = 300.0 # ohm cm
+ r05 = seg_coords['p05']
+ nseg = r05.shape[1]
+ cell_map = np.zeros((self.elnsites, nseg))
+ for el in six.moves.range(self.elnsites):
+
+ mesh_size = self.el_mesh_size[el]
+
+ for k in range(mesh_size):
+
+ rel = np.expand_dims(self.el_mesh[el][:, k], axis=1)
+ rel_05 = rel - r05
+ r2 = np.einsum('ij,ij->j', rel_05, rel_05)
+ r = np.sqrt(r2)
+ if not all(i >= 10 for i in r):
+ io.log_exception('External electrode is too close')
+ cell_map[el, :] += 1. / r
+
+ cell_map *= (rho / (4 * math.pi)) * 0.01
+ self.trans_X[gid] = cell_map
+
+ def calculate_waveforms(self, tstep):
+ simulation_time = self._dt * tstep
+ # copies waveform elnsites times (homogeneous)
+ self.waveform_amplitude = np.zeros(self.elnsites) + self.waveform.calculate(simulation_time)
+
+ def get_vext(self, gid):
+ waveform_per_mesh = np.divide(self.waveform_amplitude, self.el_mesh_size)
+ v_extracellular = np.dot(waveform_per_mesh, self.trans_X[gid]) * 1E6
+ vext_vec = h.Vector(v_extracellular)
+
+ return vext_vec
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/xstim_waveforms.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/xstim_waveforms.py
new file mode 100644
index 0000000..86e204d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/modules/xstim_waveforms.py
@@ -0,0 +1,127 @@
+import os
+import numpy as np
+import pandas as pd
+import json
+from six import string_types
+
+from bmtk.simulator.bionet.io_tools import io
+
+class BaseWaveform(object):
+ """Abstraction of waveform class to ensure calculate method is implemented"""
+ def calculate(self, simulation_time):
+ raise NotImplementedError("Implement specific waveform calculation")
+
+
+class BaseWaveformType(object):
+ """Specific waveform type"""
+ def __init__(self, waveform_config):
+ self.amp = float(waveform_config["amp"]) # units? mA?
+ self.delay = float(waveform_config["del"]) # ms
+ self.duration = float(waveform_config["dur"]) # ms
+
+ def is_active(self, simulation_time):
+ stop_time = self.delay + self.duration
+ return self.delay < simulation_time < stop_time
+
+
+class WaveformTypeDC(BaseWaveformType, BaseWaveform):
+ """DC (step) waveform"""
+ def __init__(self, waveform_config):
+ super(WaveformTypeDC, self).__init__(waveform_config)
+
+ def calculate(self, t): # TODO better name
+ if self.is_active(t):
+ return self.amp
+ else:
+ return 0
+
+
+class WaveformTypeSin(BaseWaveformType, BaseWaveform):
+ """Sinusoidal waveform"""
+ def __init__(self, waveform_config):
+ super(WaveformTypeSin, self).__init__(waveform_config)
+ self.freq = float(waveform_config["freq"]) # Hz
+ self.phase_offset = float(waveform_config.get("phase", np.pi)) # radians, optional
+ self.amp_offset = float(waveform_config.get("offset", 0)) # units? mA? optional
+
+ def calculate(self, t): # TODO better name
+ if self.is_active(t):
+ f = self.freq / 1000. # Hz to mHz
+ a = self.amp
+ return a * np.sin(2 * np.pi * f * t + self.phase_offset) + self.amp_offset
+ else:
+ return 0
+
+
+class WaveformCustom(BaseWaveform):
+ """Custom waveform defined by csv file"""
+ def __init__(self, waveform_file):
+ self.definition = pd.read_csv(waveform_file, sep='\t')
+
+ def calculate(self, t):
+ return np.interp(t, self.definition["time"], self.definition["amplitude"])
+
+
+class ComplexWaveform(BaseWaveform):
+ """Superposition of simple waveforms"""
+ def __init__(self, el_collection):
+ self.electrodes = el_collection
+
+ def calculate(self, t):
+ val = 0
+ for el in self.electrodes:
+ val += el.calculate(t)
+
+ return val
+
+
+# mapping from 'shape' code to subclass, always lowercase
+shape_classes = {
+ 'dc': WaveformTypeDC,
+ 'sin': WaveformTypeSin,
+}
+
+
+def stimx_waveform_factory(waveform):
+ """
+ Factory to create correct waveform class based on conf.
+ Supports json config in conf as well as string pointer to a file.
+ :rtype: BaseWaveformType
+ """
+ if isinstance(waveform, string_types):
+ # if waveform_conf is str or unicode assume to be name of file in stim_dir
+ # waveform_conf = str(waveform_conf) # make consistent
+ file_ext = os.path.splitext(waveform)
+ if file_ext == 'csv':
+ return WaveformCustom(waveform)
+
+ elif file_ext == 'json':
+ with open(waveform, 'r') as f:
+ waveform = json.load(f)
+ else:
+ io.log_warning('Unknwon filetype for waveform')
+
+ shape_key = waveform["shape"].lower()
+
+ if shape_key not in shape_classes:
+ io.log_warning("Waveform shape not known") # throw error?
+
+ Constructor = shape_classes[shape_key]
+ return Constructor(waveform)
+
+
+def iclamp_waveform_factory(conf):
+ """
+ Factory to create correct waveform class based on conf.
+ Supports json config in conf as well as string pointer to a file.
+ :rtype: BaseWaveformType
+ """
+ iclamp_waveform_conf = conf["iclamp"]
+
+ shape_key = iclamp_waveform_conf["shape"].lower()
+
+ if shape_key not in shape_classes:
+ io.log_warning('iclamp waveform shape not known') # throw error?
+
+ Constructor = shape_classes[shape_key]
+ return Constructor(iclamp_waveform_conf)
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/morphology.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/morphology.py
new file mode 100644
index 0000000..b0085fc
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/morphology.py
@@ -0,0 +1,245 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+from neuron import h
+
+
+pc = h.ParallelContext() # object to access MPI methods
+
+
+class Morphology(object):
+ """Methods for processing morphological data"""
+ def __init__(self, hobj):
+ """reuse hoc object from one of the cells which share the same morphology/model"""
+ self.hobj = hobj
+ self.sec_type_swc = {'soma': 1, 'somatic': 1, # convert section name and section list names
+ 'axon': 2, 'axonal': 2, # into a consistent swc notation
+ 'dend': 3, 'basal': 3,
+ 'apic': 4, 'apical': 4}
+ self.nseg = self.get_nseg()
+ self._segments = {}
+
+ def get_nseg(self):
+ nseg = 0
+ for sec in self.hobj.all:
+ nseg += sec.nseg # get the total # of segments in the cell
+ return nseg
+
+ def get_soma_pos(self):
+ n3dsoma = 0
+ r3dsoma = np.zeros(3)
+ for sec in self.hobj.somatic:
+ n3d = int(h.n3d()) # get number of n3d points in each section
+ r3d = np.zeros((3, n3d)) # to hold locations of 3D morphology for the current section
+ n3dsoma += n3d
+
+ for i in range(n3d):
+ r3dsoma[0] += h.x3d(i)
+ r3dsoma[1] += h.y3d(i)
+ r3dsoma[2] += h.z3d(i)
+
+ r3dsoma /= n3dsoma
+ return r3dsoma
+
+ def calc_seg_coords(self):
+ """Calculate segment coordinates from 3d point coordinates"""
+ ix = 0 # segment index
+
+ p3dsoma = self.get_soma_pos()
+ self.psoma = p3dsoma
+
+ p0 = np.zeros((3, self.nseg)) # hold the coordinates of segment starting points
+ p1 = np.zeros((3, self.nseg)) # hold the coordinates of segment end points
+ p05 = np.zeros((3, self.nseg))
+ d0 = np.zeros(self.nseg)
+ d1 = np.zeros(self.nseg)
+
+ for sec in self.hobj.all:
+ n3d = int(h.n3d()) # get number of n3d points in each section
+ p3d = np.zeros((3, n3d)) # to hold locations of 3D morphology for the current section
+ l3d = np.zeros(n3d) # to hold locations of 3D morphology for the current section
+ diam3d = np.zeros(n3d) # to diameters
+
+ for i in range(n3d):
+ p3d[0, i] = h.x3d(i) - p3dsoma[0]
+ p3d[1, i] = h.y3d(i) - p3dsoma[1] # shift coordinates such to place soma at the origin.
+ p3d[2, i] = h.z3d(i) - p3dsoma[2]
+ diam3d[i] = h.diam3d(i)
+ l3d[i] = h.arc3d(i)
+
+ l3d /= sec.L # normalize
+ nseg = sec.nseg
+
+ l0 = np.zeros(nseg) # keep range of segment starting point
+ l1 = np.zeros(nseg) # keep range of segment ending point
+ l05 = np.zeros(nseg)
+
+ for iseg, seg in enumerate(sec):
+ l0[iseg] = seg.x - 0.5*1/nseg # x (normalized distance along the section) for the beginning of the segment
+ l1[iseg] = seg.x + 0.5*1/nseg # x for the end of the segment
+ l05[iseg] = seg.x
+
+ if n3d != 0:
+ p0[0, ix:ix+nseg] = np.interp(l0, l3d, p3d[0, :])
+ p0[1, ix:ix+nseg] = np.interp(l0, l3d, p3d[1, :])
+ p0[2, ix:ix+nseg] = np.interp(l0, l3d, p3d[2, :])
+ d0[ix:ix+nseg] = np.interp(l0, l3d, diam3d[:])
+
+ p1[0, ix:ix+nseg] = np.interp(l1, l3d, p3d[0, :])
+ p1[1, ix:ix+nseg] = np.interp(l1, l3d, p3d[1, :])
+ p1[2, ix:ix+nseg] = np.interp(l1, l3d, p3d[2, :])
+ d1[ix:ix+nseg] = np.interp(l1, l3d, diam3d[:])
+
+ p05[0,ix:ix+nseg] = np.interp(l05, l3d, p3d[0,:])
+ p05[1,ix:ix+nseg] = np.interp(l05, l3d, p3d[1,:])
+ p05[2,ix:ix+nseg] = np.interp(l05, l3d, p3d[2,:])
+ else:
+ # If we are dealing with a stub axon, this compartment
+ # will be zero'd out in the calculation of transfer
+ # resistance in modules/ecp.py
+
+ if sec not in self.hobj.axonal:
+ raise Exception("Non-axonal section with 0 3d points (stub)")
+
+ if nseg != 1:
+ raise Exception("in calc_seg_coords(), n3d = 0, but nseg != 1")
+
+ ix += nseg
+
+ self.seg_coords = {}
+
+ self.seg_coords['p0'] = p0
+ self.seg_coords['p1'] = p1
+ self.seg_coords['p05'] = p05
+
+ self.seg_coords['d0'] = d0
+ self.seg_coords['d1'] = d1
+
+ return self.seg_coords
+
+ def set_seg_props(self):
+ """Set segment properties which are invariant for all cell using this morphology"""
+ seg_type = []
+ seg_area = []
+ seg_x = []
+ seg_dist = []
+ seg_length = []
+
+ h.distance(sec=self.hobj.soma[0]) # measure distance relative to the soma
+
+ for sec in self.hobj.all:
+ fullsecname = sec.name()
+ sec_type = fullsecname.split(".")[1][:4] # get sec name type without the cell name
+ sec_type_swc = self.sec_type_swc[sec_type] # convert to swc code
+
+ for seg in sec:
+
+ seg_area.append(h.area(seg.x))
+ seg_x.append(seg.x)
+ seg_length.append(sec.L/sec.nseg)
+ seg_type.append(sec_type_swc) # record section type in a list
+ seg_dist.append(h.distance(seg.x)) # distance to the center of the segment
+
+ self.seg_prop = {}
+ self.seg_prop['type'] = np.array(seg_type)
+ self.seg_prop['area'] = np.array(seg_area)
+ self.seg_prop['x'] = np.array(seg_x)
+ self.seg_prop['dist'] = np.array(seg_dist)
+ self.seg_prop['length'] = np.array(seg_length)
+ self.seg_prop['dist0'] = self.seg_prop['dist'] - self.seg_prop['length']/2
+ self.seg_prop['dist1'] = self.seg_prop['dist'] + self.seg_prop['length']/2
+
+ def get_target_segments(self, edge_type):
+ # Determine the target segments and their probabilities of connections for each new edge-type. Save the
+ # information for each additional time a given edge-type is used on this morphology
+ # TODO: Don't rely on edge-type-table, just use the edge?
+ if edge_type in self._segments:
+ return self._segments[edge_type]
+
+ else:
+ tar_seg_ix, tar_seg_prob = self.find_sections(edge_type.target_sections, edge_type.target_distance)
+ self._segments[edge_type] = (tar_seg_ix, tar_seg_prob)
+ return tar_seg_ix, tar_seg_prob
+
+ """
+ tar_sec_labels = edge_type.target_sections
+ drange = edge_type.target_distance
+ dmin, dmax = drange[0], drange[1]
+
+ seg_d0 = self.seg_prop['dist0'] # use a more compact variables
+ seg_d1 = self.seg_prop['dist1']
+ seg_length = self.seg_prop['length']
+ seg_area = self.seg_prop['area']
+ seg_type = self.seg_prop['type']
+
+ # Find the fractional overlap between the segment and the distance range:
+ # this is done by finding the overlap between [d0,d1] and [dmin,dmax]
+ # np.minimum(seg_d1,dmax) find the smaller of the two end locations
+ # np.maximum(seg_d0,dmin) find the larger of the two start locations
+ # np.maximum(0,overlap) is used to return zero when segments do not overlap
+ # and then dividing by the segment length
+ frac_overlap = np.maximum(0, (np.minimum(seg_d1, dmax) - np.maximum(seg_d0, dmin))) / seg_length
+ ix_drange = np.where(frac_overlap > 0) # find indexes with non-zero overlap
+ ix_labels = np.array([], dtype=np.int)
+
+ for tar_sec_label in tar_sec_labels: # find indexes within sec_labels
+ sec_type = self.sec_type_swc[tar_sec_label] # get swc code for the section label
+ ix_label = np.where(seg_type == sec_type)
+ ix_labels = np.append(ix_labels, ix_label) # target segment indexes
+
+ tar_seg_ix = np.intersect1d(ix_drange, ix_labels) # find intersection between indexes for range and labels
+ tar_seg_length = seg_length[tar_seg_ix] * frac_overlap[tar_seg_ix] # weighted length of targeted segments
+ tar_seg_prob = tar_seg_length / np.sum(tar_seg_length) # probability of targeting segments
+
+ self._segments[edge_type] = (tar_seg_ix, tar_seg_prob)
+ return tar_seg_ix, tar_seg_prob
+ """
+
+ def find_sections(self, target_sections, distance_range):
+ dmin, dmax = distance_range[0], distance_range[1]
+
+ seg_d0 = self.seg_prop['dist0'] # use a more compact variables
+ seg_d1 = self.seg_prop['dist1']
+ seg_length = self.seg_prop['length']
+ seg_area = self.seg_prop['area']
+ seg_type = self.seg_prop['type']
+
+ # Find the fractional overlap between the segment and the distance range:
+ # this is done by finding the overlap between [d0,d1] and [dmin,dmax]
+ # np.minimum(seg_d1,dmax) find the smaller of the two end locations
+ # np.maximum(seg_d0,dmin) find the larger of the two start locations
+ # np.maximum(0,overlap) is used to return zero when segments do not overlap
+ # and then dividing by the segment length
+ frac_overlap = np.maximum(0, (np.minimum(seg_d1, dmax) - np.maximum(seg_d0, dmin))) / seg_length
+ ix_drange = np.where(frac_overlap > 0) # find indexes with non-zero overlap
+ ix_labels = np.array([], dtype=np.int)
+
+ for tar_sec_label in target_sections: # find indexes within sec_labels
+ sec_type = self.sec_type_swc[tar_sec_label] # get swc code for the section label
+ ix_label = np.where(seg_type == sec_type)
+ ix_labels = np.append(ix_labels, ix_label) # target segment indexes
+
+ tar_seg_ix = np.intersect1d(ix_drange, ix_labels) # find intersection between indexes for range and labels
+ tar_seg_length = seg_length[tar_seg_ix] * frac_overlap[tar_seg_ix] # weighted length of targeted segments
+ tar_seg_prob = tar_seg_length / np.sum(tar_seg_length) # probability of targeting segments
+ return tar_seg_ix, tar_seg_prob
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/nml_reader.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/nml_reader.py
new file mode 100644
index 0000000..c64b9cd
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/nml_reader.py
@@ -0,0 +1,168 @@
+class NMLTree(object):
+ nml_ns = '{http://www.neuroml.org/schema/neuroml2}'
+ element_registry = {}
+
+ def __init__(self, nml_path):
+ from xml.etree import ElementTree
+ self._nml_path = nml_path
+ self._nml_root = ElementTree.parse(nml_path).getroot()
+ #self._relevant_elements = {
+ # NMLTree.ns_name('channelDensity'): ChannelDensity,
+ # NMLTree.ns_name('resistivity'): Resistivity
+ #}
+
+ # For each section store a list of all the NML elements include
+ self._soma_props = {}
+ self._axon_props = {}
+ self._dend_props = {}
+ self._apic_props = {}
+ # For lookup by segmentGroup attribute, include common synonyms for diff sections
+ self._section_maps = {
+ 'soma': self._soma_props, 'somatic': self._soma_props,
+ 'axon': self._axon_props, 'axonal': self._axon_props,
+ 'dend': self._dend_props, 'basal': self._dend_props, 'dendritic': self._dend_props,
+ 'apic': self._apic_props, 'apical': self._apic_props
+ }
+
+ self._parse_root(self._nml_root)
+
+ @classmethod
+ def ns_name(cls, name):
+ return '{}{}'.format(cls.nml_ns, name)
+
+ @staticmethod
+ def common_name(elem):
+ if '}' in elem:
+ return elem.split('}')[-1]
+ else:
+ return elem
+
+ @staticmethod
+ def parse_value(value):
+ val_list = value.split(' ')
+ if len(val_list) == 2:
+ return float(val_list[0]), val_list[1]
+ elif len(val_list) == 1:
+ return float(val_list[0]), 'NONE'
+ else:
+ raise Exception('Cannot parse value {}'.format(value))
+
+ @classmethod
+ def register_module(cls, element_cls):
+ cls.element_registry[cls.ns_name(element_cls.element_tag())] = element_cls
+ return element_cls
+
+ def _parse_root(self, root):
+ for elem in root.iter():
+ if elem.tag in NMLTree.element_registry:
+ nml_element = NMLTree.element_registry[elem.tag](elem)
+ self._add_param(nml_element)
+
+ def _add_param(self, nml_element):
+ seggroup_str = nml_element.section
+ if seggroup_str is None:
+ raise Exception('Error: tag {} in {} is missing segmentGroup'.format(nml_element.id, self._nml_path))
+ elif seggroup_str.lower() == 'all':
+ sections = ['soma', 'axon', 'apic', 'dend']
+ else:
+ sections = [seggroup_str.lower()]
+
+ for sec_name in sections:
+ param_table = self._section_maps[sec_name]
+ if sec_name in param_table:
+ raise Exception('Error: {} already has a {} element in {}.'.format(nml_element.id, sec_name,
+ self._nml_path))
+
+ self._section_maps[sec_name][nml_element.id] = nml_element
+
+ def __getitem__(self, section_name):
+ return self._section_maps[section_name]
+
+
+class NMLElement(object):
+ def __init__(self, nml_element):
+ self._elem = nml_element
+ self._attribs = nml_element.attrib
+
+ self.tag_name = NMLTree.common_name(self._elem.tag)
+ self.section = self._attribs.get('segmentGroup', None)
+ self.id = self._attribs.get('id', self.tag_name)
+
+ @staticmethod
+ def element_tag():
+ raise NotImplementedError()
+
+
+@NMLTree.register_module
+class ChannelDensity(NMLElement):
+ def __init__(self, nml_element):
+ super(ChannelDensity, self).__init__(nml_element)
+ self.ion = self._attribs['ion']
+ self.ion_channel = self._attribs['ionChannel']
+
+ if 'erev' in self._attribs:
+ v_list = NMLTree.parse_value(self._attribs['erev'])
+ self.erev = v_list[0]
+ self.erev_units = v_list[1]
+ else:
+ self.erev = None
+
+ v_list = NMLTree.parse_value(self._attribs['condDensity'])
+ self.cond_density = v_list[0]
+ self.cond_density_units = v_list[1]
+
+ @staticmethod
+ def element_tag():
+ return 'channelDensity'
+
+
+@NMLTree.register_module
+class ChannelDensityNernst(ChannelDensity):
+
+ @staticmethod
+ def element_tag():
+ return 'channelDensityNernst'
+
+
+@NMLTree.register_module
+class Resistivity(NMLElement):
+ def __init__(self, nml_element):
+ super(Resistivity, self).__init__(nml_element)
+ v_list = NMLTree.parse_value(self._attribs['value'])
+ self.value = v_list[0]
+ self.value_units = v_list[1]
+
+ @staticmethod
+ def element_tag():
+ return 'resistivity'
+
+
+@NMLTree.register_module
+class SpecificCapacitance(NMLElement):
+ def __init__(self, nml_element):
+ super(SpecificCapacitance, self).__init__(nml_element)
+ v_list = NMLTree.parse_value(self._attribs['value'])
+ self.value = v_list[0]
+ self.value_units = v_list[1]
+
+ @staticmethod
+ def element_tag():
+ return 'specificCapacitance'
+
+
+@NMLTree.register_module
+class ConcentrationModel(NMLElement):
+ def __init__(self, nml_element):
+ super(ConcentrationModel, self).__init__(nml_element)
+ self.type = self._attribs['type']
+ v_list = NMLTree.parse_value(self._attribs['decay'])
+ self.decay = v_list[0]
+ self.decay_units = v_list[1]
+
+ v_list = NMLTree.parse_value(self._attribs['gamma'])
+ self.gamma = v_list[0]
+ self.gamma_units = v_list[1]
+
+ @staticmethod
+ def element_tag():
+ return 'concentrationModel'
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/nrn.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/nrn.py
new file mode 100644
index 0000000..c5f8419
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/nrn.py
@@ -0,0 +1,82 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import sys
+import os
+import glob
+import neuron
+from neuron import h
+
+from bmtk.simulator.bionet.pyfunction_cache import py_modules
+from bmtk.simulator.bionet.pyfunction_cache import load_py_modules
+from bmtk.simulator.bionet.pyfunction_cache import synapse_model, synaptic_weight, cell_model
+
+
+pc = h.ParallelContext()
+
+
+def quit_execution(): # quit the execution with a message
+ pc.done()
+ sys.exit()
+ return
+
+
+def clear_gids():
+ pc.gid_clear()
+ pc.barrier()
+
+
+def load_neuron_modules(mechanisms_dir, templates_dir, default_templates=True):
+ """
+
+ :param mechanisms_dir:
+ :param templates_dir:
+ :param default_templates:
+ """
+ h.load_file('stdgui.hoc')
+
+ bionet_dir = os.path.dirname(__file__)
+ # h.load_file(os.path.join(bionet_dir, 'import3d.hoc')) # customized import3d.hoc to supress warnings
+ # h.load_file('import3d.hoc')
+ h.load_file(os.path.join(bionet_dir,'default_templates', 'advance.hoc'))
+
+ if mechanisms_dir is not None:
+ neuron.load_mechanisms(str(mechanisms_dir))
+
+ # if default_templates:
+ # load_templates(os.path.join(bionet_dir, 'default_templates'))
+
+ # if templates_dir:
+ # load_templates(templates_dir)
+
+
+def load_templates(template_dir):
+ """Load all templates to be available in the hoc namespace for instantiating cells"""
+ cwd = os.getcwd()
+ os.chdir(template_dir)
+
+ hoc_templates = glob.glob("*.hoc")
+
+ for hoc_template in hoc_templates:
+ h.load_file(str(hoc_template))
+
+ os.chdir(cwd)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/pointprocesscell.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/pointprocesscell.py
new file mode 100644
index 0000000..8d0d893
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/pointprocesscell.py
@@ -0,0 +1,85 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from neuron import h
+import six
+from bmtk.simulator.bionet.cell import Cell
+
+
+pc = h.ParallelContext() # object to access MPI methods
+
+
+class PointProcessCell(Cell):
+ """Implimentation of a Leaky Integrate-and-file neuron type cell."""
+ def __init__(self, node, bionetwork):
+ super(PointProcessCell, self).__init__(node)
+ self.set_spike_detector()
+ self._src_gids = []
+ self._src_nets = []
+ self._edge_type_ids = []
+
+ def set_spike_detector(self):
+ nc = h.NetCon(self.hobj, None)
+ pc.cell(self.gid, nc)
+
+ def set_im_ptr(self):
+ pass
+
+ def set_syn_connection(self, edge_prop, src_node, stim=None):
+ syn_params = edge_prop.dynamics_params
+ nsyns = edge_prop.nsyns
+ delay = edge_prop.delay
+
+ syn_weight = edge_prop.syn_weight(src_node, self._node)
+ if not edge_prop.preselected_targets:
+ # TODO: this is not very robust, need some other way
+ syn_weight *= syn_params['sign'] * nsyns
+
+ if stim is not None:
+ src_gid = -1
+ nc = h.NetCon(stim.hobj, self.hobj)
+ else:
+ src_gid = src_node.node_id
+ nc = pc.gid_connect(src_gid, self.hobj)
+
+ weight = syn_weight
+ nc.weight[0] = weight
+ nc.delay = delay
+ self._netcons.append(nc)
+ self._src_gids.append(src_gid)
+ self._src_nets.append(-1)
+ self._edge_type_ids.append(edge_prop.edge_type_id)
+ return nsyns
+
+ def get_connection_info(self):
+ # TODO: There should be a more effecient and robust way to return synapse information.
+ return [[self.gid, self._src_gids[i], self.network_name, self._src_nets[i], 'NaN', 'NaN',
+ self.netcons[i].weight[0], self.netcons[i].delay, self._edge_type_id[i], 1]
+ for i in range(len(self._src_gids))]
+
+ def print_synapses(self):
+ rstr = ''
+ for i in six.moves.range(len(self._src_gids)):
+ rstr += '{}> <-- {} ({}, {})\n'.format(i, self._src_gids[i], self.netcons[i].weight[0],
+ self.netcons[i].delay)
+
+ return rstr
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/pointsomacell.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/pointsomacell.py
new file mode 100644
index 0000000..0c96594
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/pointsomacell.py
@@ -0,0 +1,12 @@
+from neuron import h
+from bmtk.simulator.bionet.cell import Cell
+
+
+pc = h.ParallelContext() # object to access MPI methods
+
+
+class PointSomaCell(Cell):
+ """Used to represent single compartment cells with neural mechanisms"""
+ def __init__(self):
+ # TODO: Implement
+ raise NotImplementedError('Point Soma cell types are not currently implemented.')
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/pyfunction_cache.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/pyfunction_cache.py
new file mode 100644
index 0000000..1fa5a26
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/pyfunction_cache.py
@@ -0,0 +1,252 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import types
+import warnings
+from functools import wraps
+
+
+class _PyFunctions(object):
+ """Structure for holding custom user-defined python functions.
+
+ Will store a set of functions created by the user. Should not access this directly but rather user the
+ decorators or setter functions, and use the py_modules class variable to access individual functions. Is divided
+ up into
+ synaptic_weight: functions for calcuating synaptic weight.
+ cell_model: should return NEURON cell hobj.
+ synapse model: should return a NEURON synapse object.
+ """
+ def __init__(self):
+ self.__syn_weights = {}
+ self.__cell_models = {}
+ self.__synapse_models = {}
+ self.__cell_processors = {}
+
+ def clear(self):
+ self.__syn_weights.clear()
+ self.__cell_models.clear()
+ self.__synapse_models.clear()
+ self.__cell_processors.clear()
+
+ def add_synaptic_weight(self, name, func, overwrite=True):
+ """stores synpatic fuction for given name"""
+ if overwrite or name not in self.__syn_weights:
+ self.__syn_weights[name] = func
+
+ @property
+ def synaptic_weights(self):
+ """return list of the names of all available synaptic weight functions"""
+ return self.__syn_weights.keys()
+
+ def synaptic_weight(self, name):
+ """return the synpatic weight function"""
+ return self.__syn_weights[name]
+
+ def has_synaptic_weight(self, name):
+ return name in self.__syn_weights
+
+ def __cell_model_key(self, directive, model_type):
+ return (directive, model_type)
+
+ def add_cell_model(self, directive, model_type, func, overwrite=True):
+ key = self.__cell_model_key(directive, model_type)
+ if overwrite or key not in self.__cell_models:
+ self.__cell_models[key] = func
+
+ @property
+ def cell_models(self):
+ return self.__cell_models.keys()
+
+ def cell_model(self, directive, model_type):
+ return self.__cell_models[self.__cell_model_key(directive, model_type)]
+
+ def has_cell_model(self, directive, model_type):
+ return self.__cell_model_key(directive, model_type) in self.__cell_models
+
+ def add_synapse_model(self, name, func, overwrite=True):
+ if overwrite or name not in self.__synapse_models:
+ self.__synapse_models[name] = func
+
+ @property
+ def synapse_models(self):
+ return self.__synapse_models.keys()
+
+ def synapse_model(self, name):
+ return self.__synapse_models[name]
+
+ @property
+ def cell_processors(self):
+ return self.__cell_processors.keys()
+
+ def cell_processor(self, name):
+ return self.__cell_processors[name]
+
+ def add_cell_processor(self, name, func, overwrite=True):
+ if overwrite or name not in self.__syn_weights:
+ self.__cell_processors[name] = func
+
+ def __repr__(self):
+ rstr = '{}: {}\n'.format('cell_models', self.cell_models)
+ rstr += '{}: {}\n'.format('synapse_models', self.synapse_models)
+ rstr += '{}: {}'.format('synaptic_weights', self.synaptic_weights)
+ return rstr
+
+py_modules = _PyFunctions()
+
+
+def synaptic_weight(*wargs, **wkwargs):
+ """A decorator for registering a function as a synaptic weight function.
+ To use either
+ @synaptic_weight
+ def weight_function(): ...
+
+ or
+ @synaptic_weight(name='name_in_edge_types')
+ def weight_function(): ...
+
+ Once the decorator has been attached and imported the functions will automatically be added to py_modules.
+ """
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_synaptic_weight(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_synaptic_weight(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def cell_model(*wargs, **wkwargs):
+ """A decorator for registering NEURON cell loader functions."""
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_cell_model(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_cell_model(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def synapse_model(*wargs, **wkwargs):
+ """A decorator for registering NEURON synapse loader functions."""
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_synapse_model(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_synapse_model(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def add_weight_function(func, name=None, overwrite=True):
+ assert(callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_synaptic_weight(func_name, func, overwrite)
+
+
+def add_cell_model(func, directive, model_type, overwrite=True):
+ assert(callable(func))
+ # func_name = name if name is not None else func.__name__
+ py_modules.add_cell_model(directive, model_type, func, overwrite)
+
+
+def add_cell_processor(func, name=None, overwrite=True):
+ assert(callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_cell_processor(func_name, func, overwrite)
+
+
+def add_synapse_model(func, name=None, overwrite=True):
+ assert (callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_synapse_model(func_name, func, overwrite)
+
+
+def load_py_modules(cell_models=None, syn_models=None, syn_weights=None, cell_processors=None):
+ # py_modules.clear()
+ warnings.warn('Do not call this method directly', DeprecationWarning)
+ if cell_models is not None:
+ assert(isinstance(cell_models, types.ModuleType))
+ for f in [cell_models.__dict__.get(f) for f in dir(cell_models)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_cell_model(f.__name__, f)
+
+ if syn_models is not None:
+ assert(isinstance(syn_models, types.ModuleType))
+ for f in [syn_models.__dict__.get(f) for f in dir(syn_models)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_synapse_model(f.__name__, f)
+
+ if syn_weights is not None:
+ assert(isinstance(syn_weights, types.ModuleType))
+ for f in [syn_weights.__dict__.get(f) for f in dir(syn_weights)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_synaptic_weight(f.__name__, f)
+
+ if cell_processors is not None:
+ assert(isinstance(cell_processors, types.ModuleType))
+ for f in [cell_processors.__dict__.get(f) for f in dir(cell_processors)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_cell_processor(f.__name__, f)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/config_schema.json b/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/config_schema.json
new file mode 100644
index 0000000..cd63b71
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/config_schema.json
@@ -0,0 +1,130 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "type": "object",
+
+ "properties": {
+ "target_simulator": {"$ref": "#/definitions/target_simulator"},
+ "components": {"$ref": "#/definitions/components"},
+ "networks": {
+ "type": "object",
+ "properties": {
+ "node_files": {"$ref": "#/definitions/nodes_files"},
+ "edge_files": {"$ref": "#/definitions/edges_files"}
+ }
+ },
+ "run": {"$ref": "#/definitions/run"},
+ "groups": {"$ref": "#/definitions/groups"},
+ "output": {"$ref": "#/definitions/output"},
+ "conditions": {"$ref": "#/definitions/conditions"},
+ "input": {
+ "type": "array",
+ "items": {
+ "oneOf": [
+ {"$ref": "#/definitions/input_file"}
+ ]
+ }
+ }
+ },
+
+ "definitions": {
+ "target_simulator": {
+ "type": "string"
+ },
+
+ "components": {
+ "type": "object",
+ "properties": {
+ "synaptic_models_dir": {"type": "directory", "exists": true},
+ "mechanisms_dir": {"type": "directory", "exists": true},
+ "morphologies_dir": {"type": "directory", "exists": true},
+ "biophysical_neuron_models_dir": {"type": "directory", "exists": true},
+ "point_neuron_models_dir": {"type": "directory", "exists": true},
+ "templates_dir": {"type": "directory", "exists": true}
+ }
+ },
+
+ "edges": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properites": {
+ "edges_file": {"type": "file", "exists": true},
+ "edge_types_file": {"type": "file", "exists": true}
+ }
+ }
+ },
+
+ "nodes": {
+ "type": "array",
+ "items": {
+ "type": "object",
+
+ "properties": {
+ "nodes_file": {"type": "file", "exists": true},
+ "node_types_file": {"type": "file", "exists": true}
+ }
+ }
+ },
+
+ "run": {
+ "type": "object",
+ "properties": {
+ "tstop": {"type": "number", "minimum": 0},
+ "dt": {"type": "number", "minimum": 0},
+ "dL": {"type": "number", "minimum": 0},
+ "overwrite_output_dir": {"type": "boolean"},
+ "spike_threshold": {"type": "number"},
+ "save_state": {"type": "boolean"},
+ "start_from_state": {"type": "boolean"},
+ "nsteps_block": {"type": "number", "minimum": 0},
+ "save_cell_vars": {"type": "array"},
+ "calc_ecp": {"type": "boolean"}
+ }
+ },
+
+ "node_id_selections": {
+ "type": "object",
+ "properties": {
+ "save_cell_vars": {"type": "array", "items": {"type": "number"}}
+ }
+ },
+
+ "output": {
+ "type": "object",
+ "properties": {
+ "log_file": {"type": "file"},
+ "spikes_ascii": {"type": "file"},
+ "spikes_h5": {"type": "file"},
+ "cell_vars_dir": {"type": "file"},
+ "extra_cell_vars": {"type": "file"},
+ "ecp_file": {"type": "file"},
+ "state_dir": {"type": "directory"},
+ "output_dir": {"type": "directory"}
+ }
+ },
+
+ "conditions": {
+ "type": "object",
+ "properties": {
+ "celsius": {"type": "number"},
+ "v_init": {"type": "number"},
+ "cao0": {"type": "number"}
+ }
+ },
+
+ "extracellular_electrode": {
+ "type": "object",
+ "properties": {
+ "positions": {"type": "file"}
+ }
+ },
+
+ "input_file": {
+ "type": "object",
+ "properties": {
+ "format": {"type": "string", "enum": ["nwb", "csv"]},
+ "file": {"type": "file", "exists": true}
+ }
+ }
+ }
+}
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_edge_types.json b/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_edge_types.json
new file mode 100644
index 0000000..b3e6f59
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_edge_types.json
@@ -0,0 +1,20 @@
+{
+ "file_type": "csv",
+ "file_properties": {
+ "sep": " "
+ },
+
+ "columns": {
+ "edge_type_id": {"required": true},
+ "target_query": {"required": false},
+ "source_query": {"required": false},
+ "weight_max": {"required": true},
+ "weight_function": {"required": false},
+ "weight_sigma": {"required": false},
+ "distance_range": {"required": true},
+ "target_sections": {"required": true},
+ "delay": {"required": true},
+ "params_file": {"required": true},
+ "set_params_function": {"required": true}
+ }
+}
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_node_types_external.json b/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_node_types_external.json
new file mode 100644
index 0000000..5dd3a08
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_node_types_external.json
@@ -0,0 +1,11 @@
+{
+ "file_type": "csv",
+ "file_properties": {
+ "sep": " "
+ },
+
+ "columns": {
+ "node_type_id": {"required": true},
+ "level_of_detail": {"required": true}
+ }
+}
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_node_types_internal.json b/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_node_types_internal.json
new file mode 100644
index 0000000..6dc2188
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_node_types_internal.json
@@ -0,0 +1,15 @@
+{
+ "file_type": "csv",
+ "file_properties": {
+ "sep": " "
+ },
+
+ "columns": {
+ "node_type_id": {"required": true},
+ "params_file": {"required": true},
+ "level_of_detail": {"required": true},
+ "morphology_file": {"required": true},
+ "rotation_angle_zaxis": {"required": true},
+ "set_params_function": {"required": true}
+ }
+}
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_nodes_external.json b/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_nodes_external.json
new file mode 100644
index 0000000..e7240b0
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_nodes_external.json
@@ -0,0 +1,11 @@
+{
+ "file_type": "csv",
+ "file_properties": {
+ "sep": " "
+ },
+
+ "columns": {
+ "node_id": {"required": true},
+ "node_type_id": {"required": true}
+ }
+}
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_nodes_internal.json b/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_nodes_internal.json
new file mode 100644
index 0000000..f2287b0
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/schemas/csv_nodes_internal.json
@@ -0,0 +1,19 @@
+{
+ "file_type": "csv",
+ "file_properties": {
+ "sep": " "
+ },
+
+ "columns": {
+ "node_id": {"required": true},
+ "node_type_id": {"required": true},
+ "x_soma": {"required": true},
+ "y_soma": {"required": true},
+ "z_soma": {"required": true},
+ "rotation_angle_yaxis": {"required": true},
+ "pop_name": {"required": true},
+ "ei": {"required": true},
+ "location": {"required": false},
+ "tuning_angle": {"required": false}
+ }
+}
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/sonata_adaptors.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/sonata_adaptors.py
new file mode 100644
index 0000000..91982c6
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/sonata_adaptors.py
@@ -0,0 +1,142 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import types
+import numpy as np
+
+from bmtk.simulator.core.sonata_reader import NodeAdaptor, SonataBaseNode, EdgeAdaptor, SonataBaseEdge
+from bmtk.simulator.bionet import nrn
+
+
+class BioNode(SonataBaseNode):
+ @property
+ def position(self):
+ return self._prop_adaptor.position(self._node)
+
+ @property
+ def morphology_file(self):
+ return self._node['morphology']
+
+ @property
+ def rotation_angle_xaxis(self):
+ return self._prop_adaptor.rotation_angle_xaxis(self._node)
+
+ @property
+ def rotation_angle_yaxis(self):
+ # TODO: Combine rotation alnges into a single property
+ return self._prop_adaptor.rotation_angle_yaxis(self._node)
+
+ @property
+ def rotation_angle_zaxis(self):
+ return self._prop_adaptor.rotation_angle_zaxis(self._node)
+
+ def load_cell(self):
+ model_template = self.model_template
+ template_name = model_template[1]
+ model_type = self.model_type
+ if nrn.py_modules.has_cell_model(self['model_template'], model_type):
+ cell_fnc = nrn.py_modules.cell_model(self['model_template'], model_type)
+ else:
+ cell_fnc = nrn.py_modules.cell_model(model_template[0], model_type)
+
+ dynamics_params = self.dynamics_params
+ hobj = cell_fnc(self, template_name, dynamics_params)
+
+ for model_processing_str in self.model_processing:
+ processing_fnc = nrn.py_modules.cell_processor(model_processing_str)
+ hobj = processing_fnc(hobj, self, dynamics_params)
+
+ return hobj
+
+
+class BioNodeAdaptor(NodeAdaptor):
+ def get_node(self, sonata_node):
+ return BioNode(sonata_node, self)
+
+ @classmethod
+ def patch_adaptor(cls, adaptor, node_group, network):
+ node_adaptor = NodeAdaptor.patch_adaptor(adaptor, node_group, network)
+
+ # Position
+ if 'positions' in node_group.all_columns:
+ node_adaptor.position = types.MethodType(positions, adaptor)
+ elif 'position' in node_group.all_columns:
+ node_adaptor.position = types.MethodType(position, adaptor)
+ else:
+ node_adaptor.position = types.MethodType(positions_default, adaptor)
+
+ # Rotation angles
+ if 'rotation_angle_xaxis' in node_group.all_columns:
+ node_adaptor.rotation_angle_xaxis = types.MethodType(rotation_angle_x, node_adaptor)
+ else:
+ node_adaptor.rotation_angle_xaxis = types.MethodType(rotation_angle_default, node_adaptor)
+
+ if 'rotation_angle_yaxis' in node_group.all_columns:
+ node_adaptor.rotation_angle_yaxis = types.MethodType(rotation_angle_y, node_adaptor)
+ else:
+ node_adaptor.rotation_angle_yaxis = types.MethodType(rotation_angle_default, node_adaptor)
+
+ if 'rotation_angle_zaxis' in node_group.all_columns:
+ node_adaptor.rotation_angle_zaxis = types.MethodType(rotation_angle_z, node_adaptor)
+ else:
+ node_adaptor.rotation_angle_zaxis = types.MethodType(rotation_angle_default, node_adaptor)
+
+ return node_adaptor
+
+
+def positions_default(self, node):
+ return np.array([0.0, 0.0, 0.0])
+
+
+def positions(self, node):
+ return node['positions']
+
+
+def position(self, node):
+ return node['position']
+
+
+def rotation_angle_default(self, node):
+ return 0.0
+
+
+def rotation_angle_x(self, node):
+ return node['rotation_angle_xaxis']
+
+
+def rotation_angle_y(self, node):
+ return node['rotation_angle_yaxis']
+
+
+def rotation_angle_z(self, node):
+ return node['rotation_angle_zaxis']
+
+
+class BioEdge(SonataBaseEdge):
+ def load_synapses(self, section_x, section_id):
+ synapse_fnc = nrn.py_modules.synapse_model(self.model_template)
+ return synapse_fnc(self.dynamics_params, section_x, section_id)
+
+
+class BioEdgeAdaptor(EdgeAdaptor):
+ def get_edge(self, sonata_edge):
+ return BioEdge(sonata_edge, self)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/utils.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/utils.py
new file mode 100644
index 0000000..ca63bc6
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/utils.py
@@ -0,0 +1,84 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import numpy as np
+import math
+import json
+import pandas as pd
+import h5py
+
+from neuron import h
+
+
+def rotation_matrix(axis, theta):
+ """Return the rotation matrix associated with counterclockwise rotation about the given axis by theta radians.
+ """
+ axis = np.asarray(axis)
+ theta = np.asarray(theta)
+ axis = axis/math.sqrt(np.dot(axis, axis))
+ a = math.cos(theta/2.0)
+ b, c, d = -axis*math.sin(theta/2.0)
+ aa, bb, cc, dd = a*a, b*b, c*c, d*d
+ bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d
+
+ return np.array([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac)],
+ [2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab)],
+ [2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc]])
+
+
+def edge_converter_csv(output_dir, csv_file):
+ """urrently being used by BioNetwork.write_connections(), need to refactor
+
+ :param output_dir:
+ :param csv_file:
+ :return:
+ """
+ syns_df = pd.read_csv(csv_file, sep=' ')
+ for name, group in syns_df.groupby(['trg_network', 'src_network']):
+ trg_net, src_net = name
+ group_len = len(group.index)
+ with h5py.File(os.path.join(output_dir, '{}_{}_edges.h5'.format(trg_net, src_net)), 'w') as conns_h5:
+ conns_h5.create_dataset('edges/target_gid', data=group['trg_gid'])
+ conns_h5.create_dataset('edges/source_gid', data=group['src_gid'])
+ conns_h5.create_dataset('edges/edge_type_id', data=group['edge_type_id'])
+ conns_h5.create_dataset('edges/edge_group', data=group['connection_group'])
+
+ group_counters = {group_id: 0 for group_id in group.connection_group.unique()}
+ edge_group_indicies = np.zeros(group_len, dtype=np.uint)
+ for i, group_id in enumerate(group['connection_group']):
+ edge_group_indicies[i] = group_counters[group_id]
+ group_counters[group_id] += 1
+ conns_h5.create_dataset('edges/edge_group_indicies', data=edge_group_indicies)
+
+ for group_class, sub_group in group.groupby('connection_group'):
+ grp = conns_h5.create_group('edges/{}'.format(group_class))
+ if group_class == 0:
+ grp.create_dataset('sec_id', data=sub_group['segment'], dtype='int')
+ grp.create_dataset('sec_x', data=sub_group['section'])
+ grp.create_dataset('syn_weight', data=sub_group['weight'])
+ grp.create_dataset('delay', data=sub_group['delay'])
+ elif group_class == 1:
+ grp.create_dataset('syn_weight', data=sub_group['weight'])
+ grp.create_dataset('delay', data=sub_group['delay'])
+ else:
+ print('Unknown cell group {}'.format(group_class))
diff --git a/bmtk-vb/build/lib/bmtk/simulator/bionet/virtualcell.py b/bmtk-vb/build/lib/bmtk/simulator/bionet/virtualcell.py
new file mode 100644
index 0000000..64b3929
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/bionet/virtualcell.py
@@ -0,0 +1,51 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from neuron import h
+
+
+class VirtualCell(object):
+ """Representation of a Virtual/External node"""
+
+ def __init__(self, node, spike_train_dataset):
+ # VirtualCell is currently not a subclass of bionet.Cell class b/c the parent has a bunch of properties that
+ # just don't apply to a virtual cell. May want to make bionet.Cell more generic in the future.
+ self._node_id = node.node_id
+ self._hobj = None
+ self._spike_train_dataset = spike_train_dataset
+ self._train_vec = []
+ self.set_stim(node, self._spike_train_dataset)
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def hobj(self):
+ return self._hobj
+
+ def set_stim(self, stim_prop, spike_train):
+ """Gets the spike trains for each individual cell."""
+ self._train_vec = h.Vector(spike_train.get_spikes(self.node_id))
+ vecstim = h.VecStim()
+ vecstim.play(self._train_vec)
+ self._hobj = vecstim
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/core/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/config.py b/bmtk-vb/build/lib/bmtk/simulator/core/config.py
new file mode 100644
index 0000000..8a36dc7
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/core/config.py
@@ -0,0 +1,436 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+from bmtk.simulator.utils.config import ConfigDict
+
+'''
+import os
+import json
+import re
+import copy
+import datetime
+from six import string_types
+
+
+from bmtk.simulator.core.io_tools import io
+
+
+def from_json(config_file, validator=None):
+ """Builds and validates a configuration json file.
+
+ :param config_file: File object or path to a json file.
+ :param validator: A SimConfigValidator object to validate json file. Won't validate if set to None
+ :return: A dictionary, verified against json validator and with manifest variables resolved.
+ """
+ #print(config_file)
+ #if os.path.isfile(config_file):
+ #if isinstance(config_file, file):
+ # conf = json.load(config_file)
+ if isinstance(config_file, string_types):
+ conf = json.load(open(config_file, 'r'))
+ elif isinstance(config_file, dict):
+ conf = config_file.copy()
+ else:
+ raise Exception('{} is not a file or file path.'.format(config_file))
+
+ # insert file path into dictionary
+ if 'config_path' not in conf:
+ conf['config_path'] = os.path.abspath(config_file)
+ conf['config_dir'] = os.path.dirname(conf['config_path'])
+
+ # Will resolve manifest variables and validate
+ return from_dict(conf, validator)
+
+
+def from_dict(config_dict, validator=None):
+ """Builds and validates a configuration json dictionary object. Best to directly use from_json when possible.
+
+ :param config_dict: Dictionary object
+ :param validator: A SimConfigValidator object to validate json file. Won't validate if set to None
+ :return: A dictionary, verified against json validator and with manifest variables resolved.
+ """
+ assert(isinstance(config_dict, dict))
+ conf = copy.deepcopy(config_dict) # Since the functions will mutate the dictionary we will copy just-in-case.
+
+ if 'config_path' not in conf:
+ conf['config_path'] = os.path.join(os.getcwd(), 'tmp_cfg.dict')
+ conf['config_dir'] = os.path.dirname(conf['config_path'])
+
+ # Build the manifest and resolve variables.
+ # TODO: Check that manifest exists
+ manifest = __build_manifest(conf)
+ conf['manifest'] = manifest
+ __recursive_insert(conf, manifest)
+
+ # In our work with Blue-Brain it was agreed that 'network' and 'simulator' parts of config may be split up into
+ # separate files. If this is the case we build each sub-file separately and merge into this one
+ for childconfig in ['network', 'simulation']:
+ if childconfig in conf and isinstance(conf[childconfig], string_types):
+ # Try to resolve the path of the network/simulation config files. If an absolute path isn't used find
+ # the file relative to the current config file. TODO: test if this will work on windows?
+ conf_str = conf[childconfig]
+ conf_path = conf_str if conf_str.startswith('/') else os.path.join(conf['config_dir'], conf_str)
+
+ # Build individual json file and merge into parent.
+ child_json = from_json(conf_path)
+ del child_json['config_path'] # we don't want 'config_path' of parent being overwritten.
+ conf.update(child_json)
+
+ # Run the validator
+ if validator is not None:
+ validator.validate(conf)
+
+ return conf
+
+
+def copy_config(conf):
+ """Copy configuration file to different directory, with manifest variables resolved.
+
+ :param conf: configuration dictionary
+ """
+ output_dir = conf.output_dir
+ config_name = os.path.basename(conf['config_path'])
+ output_path = os.path.join(output_dir, config_name)
+ with open(output_path, 'w') as fp:
+ out_cfg = conf.copy()
+ if 'manifest' in out_cfg:
+ del out_cfg['manifest']
+ json.dump(out_cfg, fp, indent=2)
+
+
+def __special_variables(conf):
+ """A list of preloaded variables to insert into the manifest, containing things like path to run-time directory,
+ configuration directory, etc.
+ """
+ pre_manifest = dict()
+ pre_manifest['$workingdir'] = os.path.dirname(os.getcwd())
+ if 'config_path' in conf:
+ pre_manifest['$configdir'] = os.path.dirname(conf['config_path']) # path of configuration file
+ pre_manifest['$configfname'] = conf['config_path']
+
+ dt_now = datetime.datetime.now()
+ pre_manifest['$time'] = dt_now.strftime('%H-%M-%S')
+ pre_manifest['$date'] = dt_now.strftime('%Y-%m-%d')
+ pre_manifest['$datetime'] = dt_now.strftime('%Y-%m-%d_%H-%M-%S')
+
+ return pre_manifest
+
+
+def __build_manifest(conf):
+ """Resolves the manifest section and resolve any internal variables"""
+ if 'manifest' not in conf:
+ return __special_variables(conf)
+
+ manifest = conf["manifest"]
+ resolved_manifest = __special_variables(conf)
+ resolved_keys = set()
+ unresolved_keys = set(manifest.keys())
+
+ # No longer using recursion since that can lead to an infinite loop if the person who writes the config file isn't
+ # careful. Also added code to allow for ${VAR} format in-case user wants to user "$.../some_${MODEl}_here/..."
+ while unresolved_keys:
+ for key in unresolved_keys:
+ # Find all variables in manifest and see if they can be replaced by the value in resolved_manifest
+ value = __find_variables(manifest[key], resolved_manifest)
+
+ # If value no longer has variables, and key-value pair to resolved_manifest and remove from unresolved-keys
+ if value.find('$') < 0:
+ resolved_manifest[key] = value
+ resolved_keys.add(key)
+
+ # remove resolved key-value pairs from set, and make sure at every iteration unresolved_keys shrinks to prevent
+ # infinite loops
+ n_unresolved = len(unresolved_keys)
+ unresolved_keys -= resolved_keys
+ if n_unresolved == len(unresolved_keys):
+ msg = "Unable to resolve manifest variables: {}".format(unresolved_keys)
+ raise Exception(msg)
+
+ return resolved_manifest
+
+
+def __recursive_insert(json_obj, manifest):
+ """Loop through the config and substitute the path variables (e.g.: $MY_DIR) with the values from the manifest
+
+ :param json_obj: A json dictionary object that may contain variables needing to be resolved.
+ :param manifest: A dictionary of variable values
+ :return: A new json dictionar config file with variables resolved
+ """
+ if isinstance(json_obj, string_types):
+ return __find_variables(json_obj, manifest)
+
+ elif isinstance(json_obj, list):
+ new_list = []
+ for itm in json_obj:
+ new_list.append(__recursive_insert(itm, manifest))
+ return new_list
+
+ elif isinstance(json_obj, dict):
+ for key, val in json_obj.items():
+ if key == 'manifest':
+ continue
+ json_obj[key] = __recursive_insert(val, manifest)
+
+ return json_obj
+
+ else:
+ return json_obj
+
+
+def __find_variables(json_str, manifest):
+ """Replaces variables (i.e. $VAR, ${VAR}) with their values from the manifest.
+
+ :param json_str: a json string that may contain none, one or multiple variable
+ :param manifest: dictionary of variable lookup values
+ :return: json_str with resolved variables. Won't resolve variables that don't exist in manifest.
+ """
+ variables = [m for m in re.finditer('\$\{?[\w]+\}?', json_str)]
+ for var in variables:
+ var_lookup = var.group()
+ if var_lookup.startswith('${') and var_lookup.endswith('}'):
+ # replace ${VAR} with $VAR
+ var_lookup = "$" + var_lookup[2:-1]
+ if var_lookup in manifest:
+ json_str = json_str.replace(var.group(), manifest[var_lookup])
+
+ return json_str
+
+
+class ConfigDict(dict):
+ def __init__(self, *args, **kwargs):
+ self.update(*args, **kwargs)
+ self._env_built = False
+ self._io = None
+
+ self._node_set = {}
+ self._load_node_set()
+
+ @property
+ def io(self):
+ if self._io is None:
+ self._io = io
+ return self._io
+
+ @io.setter
+ def io(self, io):
+ self._io = io
+
+ @property
+ def run(self):
+ return self['run']
+
+ @property
+ def tstart(self):
+ return self.run.get('tstart', 0.0)
+
+ @property
+ def tstop(self):
+ return self.run['tstop']
+
+ @property
+ def dt(self):
+ return self.run.get('dt', 0.1)
+
+ @property
+ def spike_threshold(self):
+ return self.run.get('spike_threshold', -15.0)
+
+ @property
+ def dL(self):
+ return self.run.get('dL', 20.0)
+
+ @property
+ def gid_mappings(self):
+ return self.get('gid_mapping_file', None)
+
+ @property
+ def block_step(self):
+ return self.run.get('nsteps_block', 5000)
+
+ @property
+ def conditions(self):
+ return self['conditions']
+
+ @property
+ def celsius(self):
+ return self.conditions['celsius']
+
+ @property
+ def v_init(self):
+ return self.conditions['v_init']
+
+ @property
+ def path(self):
+ return self['config_path']
+
+ @property
+ def output(self):
+ return self['output']
+
+ @property
+ def output_dir(self):
+ return self.output['output_dir']
+
+ @property
+ def overwrite_output(self):
+ return self.output.get('overwrite_output_dir', False)
+
+ @property
+ def log_file(self):
+ return self.output['log_file']
+
+ @property
+ def components(self):
+ return self.get('components', {})
+
+ @property
+ def morphologies_dir(self):
+ return self.components['morphologies_dir']
+
+ @property
+ def synaptic_models_dir(self):
+ return self.components['synaptic_models_dir']
+
+ @property
+ def point_neuron_models_dir(self):
+ return self.components['point_neuron_models_dir']
+
+ @property
+ def mechanisms_dir(self):
+ return self.components['mechanisms_dir']
+
+ @property
+ def biophysical_neuron_models_dir(self):
+ return self.components['biophysical_neuron_models_dir']
+
+ @property
+ def templates_dir(self):
+ return self.components.get('templates_dir', None)
+
+ @property
+ def with_networks(self):
+ return 'networks' in self and len(self.nodes) > 0
+
+ @property
+ def networks(self):
+ return self['networks']
+
+ @property
+ def nodes(self):
+ return self.networks.get('nodes', [])
+
+ @property
+ def edges(self):
+ return self.networks.get('edges', [])
+
+ @property
+ def reports(self):
+ return self.get('reports', {})
+
+ @property
+ def inputs(self):
+ return self.get('inputs', {})
+
+ @property
+ def node_sets(self):
+ return self._node_set
+
+ def _load_node_set(self):
+ if 'node_sets_file' in self.keys():
+ node_set_val = self['node_sets_file']
+ elif 'node_sets' in self.keys():
+ node_set_val = self['node_sets']
+ else:
+ self._node_set = {}
+ return
+
+ if isinstance(node_set_val, dict):
+ self._node_set = node_set_val
+ else:
+ try:
+ self._node_set = json.load(open(node_set_val, 'r'))
+ except Exception as e:
+ io.log_exception('Unable to load node_sets_file {}'.format(node_set_val))
+
+ def copy_to_output(self):
+ copy_config(self)
+
+ def get_modules(self, module_name):
+ return [report for report in self.reports.values() if report['module'] == module_name]
+
+ def _set_logging(self):
+ """Check if log-level and/or log-format string is being changed through the config"""
+ output_sec = self.output
+ if 'log_format' in output_sec:
+ self._io.set_log_format(output_sec['log_format'])
+
+ if 'log_level' in output_sec:
+ self._io.set_log_level(output_sec['log_level'])
+
+ if 'log_to_console' in output_sec:
+ self._io.log_to_console = output_sec['log_to_console']
+
+ if 'quiet_simulator' in output_sec and output_sec['quiet_simulator']:
+ self._io.quiet_simulator()
+
+ def build_env(self):
+ if self._env_built:
+ return
+
+ self._set_logging()
+ self.io.setup_output_dir(self.output_dir, self.log_file, self.overwrite_output)
+ self.copy_to_output()
+ self._env_built = True
+
+ @staticmethod
+ def get_validator():
+ raise NotImplementedError
+
+ @classmethod
+ def from_json(cls, config_file, validate=False):
+ validator = cls.get_validator() if validate else None
+ return cls(from_json(config_file, validator))
+
+ @classmethod
+ def from_dict(cls, config_dict, validate=False):
+ validator = cls.get_validator() if validate else None
+ return cls(from_dict(config_dict, validator))
+
+ @classmethod
+ def from_yaml(cls, config_file, validate=False):
+ raise NotImplementedError
+
+ @classmethod
+ def load(cls, config_file, validate=False):
+ # Implement factory method that can resolve the format/type of input configuration.
+ if isinstance(config_file, dict):
+ return cls.from_dict(config_file, validate)
+ elif isinstance(config_file, string_types):
+ if config_file.endswith('yml') or config_file.endswith('yaml'):
+ return cls.from_yaml(config_file, validate)
+ else:
+ return cls.from_json(config_file, validate)
+ else:
+ raise Exception
+'''
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/edge_population.py b/bmtk-vb/build/lib/bmtk/simulator/core/edge_population.py
new file mode 100644
index 0000000..5dfa06c
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/core/edge_population.py
@@ -0,0 +1,21 @@
+class SimEdge(object):
+ @property
+ def node_id(self):
+ raise NotImplementedError()
+
+ @property
+ def gid(self):
+ raise NotImplementedError()
+
+
+class EdgePopulation(object):
+ @property
+ def source_nodes(self):
+ raise NotImplementedError()
+
+ @property
+ def target_nodes(self):
+ raise NotImplementedError()
+
+ def initialize(self, network):
+ raise NotImplementedError()
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/graph.py b/bmtk-vb/build/lib/bmtk/simulator/core/graph.py
new file mode 100644
index 0000000..1e56ef1
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/core/graph.py
@@ -0,0 +1,435 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+import ast
+import numpy as np
+
+from bmtk.simulator.core.config import ConfigDict
+#import config as cfg
+from bmtk.simulator.utils.property_maps import NodePropertyMap, EdgePropertyMap
+from bmtk.utils import sonata
+from bmtk.simulator.core.io_tools import io
+
+from bmtk.simulator.core.node_sets import NodeSet, NodeSetAll
+
+
+"""Creates a graph of nodes and edges from multiple network files for all simulators.
+
+Consists of edges and nodes. All classes are abstract and should be reimplemented by a specific simulator. Also
+contains base factor methods for building a network from a config file (or other).
+"""
+
+
+class SimEdge(object):
+ def __init__(self, original_params, dynamics_params):
+ self._orig_params = original_params
+ self._dynamics_params = dynamics_params
+ self._updated_params = {'dynamics_params': self._dynamics_params}
+
+ @property
+ def edge_type_id(self):
+ return self._orig_params['edge_type_id']
+
+ def __getitem__(self, item):
+ if item in self._updated_params:
+ return self._updated_params[item]
+ else:
+ return self._orig_params[item]
+
+
+class SimNode(object):
+ def __init__(self, node_id, graph, network, params):
+ self._node_id = node_id
+ self._graph = graph
+ self._graph_params = params
+ self._node_type_id = params['node_type_id']
+ self._network = network
+ self._updated_params = {}
+
+ self._model_params = {}
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def node_type_id(self):
+ return self._node_type_id
+
+ @property
+ def network(self):
+ """Name of network node belongs too."""
+ return self._network
+
+ @property
+ def model_params(self):
+ """Parameters (json file, nml, dictionary) that describe a specific node"""
+ return self._model_params
+
+ @model_params.setter
+ def model_params(self, value):
+ self._model_params = value
+
+ def __contains__(self, item):
+ return item in self._updated_params or item in self._graph_params
+
+ def __getitem__(self, item):
+ if item in self._updated_params:
+ return self._updated_params[item]
+ else:
+ return self._graph_params[item]
+
+
+class SimGraph(object):
+ model_type_col = 'model_type'
+
+ def __init__(self):
+ self._components = {} # components table, i.e. paths to model files.
+ self._io = io
+
+ self._node_property_maps = {}
+ self._edge_property_maps = {}
+
+ self._node_populations = {}
+ self._internal_populations_map = {}
+ self._virtual_populations_map = {}
+
+ self._virtual_cells_nid = {}
+
+ self._recurrent_edges = {}
+ self._external_edges = {}
+
+ self._node_sets = {}
+ self._using_gids = False
+
+ @property
+ def io(self):
+ return self._io
+
+ '''
+ @property
+ def internal_pop_names(self):
+ return self
+ '''
+
+ @property
+ def node_populations(self):
+ return list(self._node_populations.keys())
+
+ def get_node_set(self, node_set):
+ if node_set in self._node_sets.keys():
+ return self._node_sets[node_set]
+
+ elif isinstance(node_set, (dict, list)):
+ return NodeSet(node_set, self)
+
+ else:
+ self.io.log_exception('Unable to load or find node_set "{}"'.format(node_set))
+
+ def get_node_populations(self):
+ return self._node_populations.values()
+
+ def get_node_population(self, population_name):
+ return self._node_populations[population_name]
+
+ def get_component(self, key):
+ """Get the value of item in the components dictionary.
+
+ :param key: name of component
+ :return: value assigned to component
+ """
+ return self._components[key]
+
+ def add_component(self, key, value):
+ """Add a component key-value pair
+
+ :param key: name of component
+ :param value: value
+ """
+ self._components[key] = value
+
+ '''
+ def _from_json(self, file_name):
+ return cfg.from_json(file_name)
+ '''
+
+ def _validate_components(self):
+ """Make sure various components (i.e. paths) exists before attempting to build the graph."""
+ return True
+
+ def _create_nodes_prop_map(self, grp):
+ return NodePropertyMap()
+
+ def _create_edges_prop_map(self, grp):
+ return EdgePropertyMap()
+
+ def __avail_model_types(self, population):
+ model_types = set()
+ for grp in population.groups:
+ if self.model_type_col not in grp.all_columns:
+ self.io.log_exception('model_type is missing from nodes.')
+
+ model_types.update(set(np.unique(grp.get_values(self.model_type_col))))
+ return model_types
+
+ def _preprocess_node_types(self, node_population):
+ # TODO: The following figures out the actually used node-type-ids. For mem and speed may be better to just
+ # process them all
+ node_type_ids = node_population.type_ids
+ # TODO: Verify all the node_type_ids are in the table
+ node_types_table = node_population.types_table
+
+ # TODO: Convert model_type to a enum
+ morph_dir = self.get_component('morphologies_dir')
+ if morph_dir is not None and 'morphology' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ if node_type['morphology'] is None:
+ continue
+ # TODO: Check the file exits
+ # TODO: See if absolute path is stored in csv
+ node_type['morphology'] = os.path.join(morph_dir, node_type['morphology'])
+
+ if 'dynamics_params' in node_types_table.columns and 'model_type' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ dynamics_params = node_type['dynamics_params']
+ if isinstance(dynamics_params, dict):
+ continue
+
+ model_type = node_type['model_type']
+ if model_type == 'biophysical':
+ params_dir = self.get_component('biophysical_neuron_models_dir')
+ elif model_type == 'point_process':
+ params_dir = self.get_component('point_neuron_models_dir')
+ elif model_type == 'point_soma':
+ params_dir = self.get_component('point_neuron_models_dir')
+ else:
+ # Not sure what to do in this case, throw Exception?
+ params_dir = self.get_component('custom_neuron_models')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ node_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ self.io.log_exception('Could not find node dynamics_params file {}.'.format(params_path))
+
+ def _preprocess_edge_types(self, edge_pop):
+ edge_types_table = edge_pop.types_table
+ edge_type_ids = np.unique(edge_pop.type_ids)
+
+ for et_id in edge_type_ids:
+ edge_type = edge_types_table[et_id]
+ if 'dynamics_params' in edge_types_table.columns:
+ dynamics_params = edge_type['dynamics_params']
+ params_dir = self.get_component('synaptic_models_dir')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ edge_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ self.io.log_exception('Could not find edge dynamics_params file {}.'.format(params_path))
+
+ # Split target_sections
+ if 'target_sections' in edge_type:
+ trg_sec = edge_type['target_sections']
+ if trg_sec is not None:
+ try:
+ edge_type['target_sections'] = ast.literal_eval(trg_sec)
+ except Exception as exc:
+ self.io.log_warning('Unable to split target_sections list {}'.format(trg_sec))
+ edge_type['target_sections'] = None
+
+ # Split target distances
+ if 'distance_range' in edge_type:
+ dist_range = edge_type['distance_range']
+ if dist_range is not None:
+ try:
+ # TODO: Make the distance range has at most two values
+ edge_type['distance_range'] = json.loads(dist_range)
+ except Exception as e:
+ try:
+ edge_type['distance_range'] = [0.0, float(dist_range)]
+ except Exception as e:
+ self.io.log_warning('Unable to parse distance_range {}'.format(dist_range))
+ edge_type['distance_range'] = None
+
+ def external_edge_populations(self, src_pop, trg_pop):
+ return self._external_edges.get((src_pop, trg_pop), [])
+
+ def add_nodes(self, sonata_file, populations=None):
+ """Add nodes from a network to the graph.
+
+ :param sonata_file: A NodesFormat type object containing list of nodes.
+ :param populations: name/identifier of network. If none will attempt to retrieve from nodes object
+ """
+ nodes = sonata_file.nodes
+
+ selected_populations = nodes.population_names if populations is None else populations
+ for pop_name in selected_populations:
+ if pop_name not in nodes:
+ # when user wants to simulation only a few populations in the file
+ continue
+
+ if pop_name in self.node_populations:
+ # Make sure their aren't any collisions
+ self.io.log_exception('There are multiple node populations with name {}.'.format(pop_name))
+
+ node_pop = nodes[pop_name]
+ self._preprocess_node_types(node_pop)
+ self._node_populations[pop_name] = node_pop
+
+ # Segregate into virtual populations and non-virtual populations
+ model_types = self.__avail_model_types(node_pop)
+ if 'virtual' in model_types:
+ self._virtual_populations_map[pop_name] = node_pop
+ self._virtual_cells_nid[pop_name] = {}
+ model_types -= set(['virtual'])
+ if model_types:
+ # We'll allow a population to have virtual and non-virtual nodes but it is not ideal
+ self.io.log_warning('Node population {} contains both virtual and non-virtual nodes which can ' +
+ 'cause memory and build-time inefficency. Consider separating virtual nodes ' +
+ 'into their own population'.format(pop_name))
+
+ if model_types:
+ self._internal_populations_map[pop_name] = node_pop
+
+ self._node_sets[pop_name] = NodeSet({'population': pop_name}, self)
+ self._node_property_maps[pop_name] = {grp.group_id: self._create_nodes_prop_map(grp)
+ for grp in node_pop.groups}
+
+ def build_nodes(self):
+ raise NotImplementedError
+
+ def build_recurrent_edges(self):
+ raise NotImplementedError
+
+ def add_edges(self, sonata_file, populations=None, source_pop=None, target_pop=None):
+ """
+
+ :param sonata_file:
+ :param populations:
+ :param source_pop:
+ :param target_pop:
+ :return:
+ """
+ edges = sonata_file.edges
+ selected_populations = edges.population_names if populations is None else populations
+
+ for pop_name in selected_populations:
+ if pop_name not in edges:
+ continue
+
+ edge_pop = edges[pop_name]
+ self._preprocess_edge_types(edge_pop)
+
+ # Check the source nodes exists
+ src_pop = source_pop if source_pop is not None else edge_pop.source_population
+ is_internal_src = src_pop in self._internal_populations_map.keys()
+ is_external_src = src_pop in self._virtual_populations_map.keys()
+
+ trg_pop = target_pop if target_pop is not None else edge_pop.target_population
+ is_internal_trg = trg_pop in self._internal_populations_map.keys()
+
+ if not is_internal_trg:
+ self.io.log_exception(('Node population {} does not exists (or consists of only virtual nodes). ' +
+ '{} edges cannot create connections.').format(trg_pop, pop_name))
+
+ if not (is_internal_src or is_external_src):
+ self.io.log_exception('Source node population {} not found. Please update {} edges'.format(src_pop,
+ pop_name))
+ if is_internal_src:
+ if trg_pop not in self._recurrent_edges:
+ self._recurrent_edges[trg_pop] = []
+ self._recurrent_edges[trg_pop].append(edge_pop)
+
+ if is_external_src:
+ if trg_pop not in self._external_edges:
+ self._external_edges[(src_pop, trg_pop)] = []
+ self._external_edges[(src_pop, trg_pop)].append(edge_pop)
+
+ self._edge_property_maps[pop_name] = {grp.group_id: self._create_edges_prop_map(grp)
+ for grp in edge_pop.groups}
+
+ @classmethod
+ def from_config(cls, conf, **properties):
+ """Generates a graph structure from a json config file or dictionary.
+
+ :param conf: name of json config file, or a dictionary with config parameters
+ :param properties: optional properties.
+ :return: A graph object of type cls
+ """
+ graph = cls(**properties)
+
+ # The simulation run script should create a config-dict since it's likely to vary based on the simulator engine,
+ # however in the case the user doesn't we will try a generic conversion from dict/json to ConfigDict
+ if isinstance(conf, ConfigDict):
+ config = conf
+ else:
+ try:
+ config = ConfigDict.load(conf)
+ except Exception as e:
+ graph.io.log_exception('Could not convert {} (type "{}") to json.'.format(conf, type(conf)))
+
+ if not config.with_networks:
+ graph.io.log_exception('Could not find any network files. Unable to build network.')
+
+ # TODO: These are simulator specific
+ graph.spike_threshold = config.spike_threshold
+ graph.dL = config.dL
+
+ # load components
+ for name, value in config.components.items():
+ graph.add_component(name, value)
+ graph._validate_components()
+
+ # load nodes
+ gid_map = config.gid_mappings
+ for node_dict in config.nodes:
+ nodes_net = sonata.File(data_files=node_dict['nodes_file'], data_type_files=node_dict['node_types_file'],
+ gid_table=gid_map)
+ graph.add_nodes(nodes_net)
+
+ # load edges
+ for edge_dict in config.edges:
+ target_network = edge_dict['target'] if 'target' in edge_dict else None
+ source_network = edge_dict['source'] if 'source' in edge_dict else None
+ edge_net = sonata.File(data_files=edge_dict['edges_file'], data_type_files=edge_dict['edge_types_file'])
+ graph.add_edges(edge_net, source_pop=target_network, target_pop=source_network)
+
+ graph._node_sets['all'] = NodeSetAll(graph)
+ for ns_name, ns_filter in conf.node_sets.items():
+ graph._node_sets[ns_name] = NodeSet(ns_filter, graph)
+
+ return graph
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/io_tools.py b/bmtk-vb/build/lib/bmtk/simulator/core/io_tools.py
new file mode 100644
index 0000000..3750015
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/core/io_tools.py
@@ -0,0 +1,111 @@
+import os
+import sys
+import shutil
+import logging
+
+
+class IOUtils(object):
+ """
+ For logging/mkdir commands we sometimes need to use different MPI classes depending on the simulator being used
+ (NEST and NEURON have their own barrier functions that don't work well with mpi). We also need to be able to
+ adjust the logging levels/format at run-time depending on the simulator/configuration options.
+
+ Thus the bulk of the io and logging functions are put into their own class and can be overwritten by specific
+ simulator modules
+ """
+ def __init__(self):
+ self.mpi_rank = 0
+ self.mpi_size = 1
+
+ self._log_format = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s")
+ self._log_level = logging.DEBUG
+ self._log_to_console = True
+ self._logger = None
+
+ @property
+ def log_to_console(self):
+ return self._log_to_console
+
+ @log_to_console.setter
+ def log_to_console(self, flag):
+ assert(isinstance(flag, bool))
+ self._log_to_console = flag
+
+ @property
+ def logger(self):
+ if self._logger is None:
+ # Create the logger the first time it is accessed
+ self._logger = logging.getLogger(self.__class__.__name__)
+ self._logger.setLevel(self._log_level)
+ self._set_console_logging()
+
+ return self._logger
+
+ def _set_console_logging(self):
+ if not self._log_to_console:
+ return
+
+ console_handler = logging.StreamHandler(sys.stdout)
+ console_handler.setFormatter(self._log_format)
+ self._logger.addHandler(console_handler)
+
+ def set_log_format(self, format_str):
+ self._log_format = logging.Formatter(format_str)
+
+ def set_log_level(self, loglevel):
+ if isinstance(loglevel, int):
+ self._log_level = loglevel
+
+ elif isinstance(loglevel, (str, unicode)):
+ self._log_level = logging.getLevelName(loglevel)
+
+ else:
+ raise Exception('Error: cannot set logging levels to {}'.format(loglevel))
+
+ def barrier(self):
+ pass
+
+ def quiet_simulator(self):
+ pass
+
+ def setup_output_dir(self, output_dir, log_file, overwrite=True):
+ if self.mpi_rank == 0:
+ # Create output directory
+ if os.path.exists(output_dir):
+ if overwrite:
+ shutil.rmtree(output_dir)
+ else:
+ self.log_exception('Directory already exists (remove or set to overwrite).')
+ os.makedirs(output_dir)
+
+ # Create log file
+ if log_file is not None:
+ log_path = log_file if os.path.isabs(log_file) else os.path.join(output_dir, log_file)
+ file_logger = logging.FileHandler(log_path)
+ file_logger.setFormatter(self._log_format)
+ self.logger.addHandler(file_logger)
+ self.log_info('Created log file')
+
+ self.barrier()
+
+ def log_info(self, message, all_ranks=False):
+ if all_ranks is False and self.mpi_rank != 0:
+ return
+
+ self.logger.info(message)
+
+ def log_warning(self, message, all_ranks=False):
+ if all_ranks is False and self.mpi_rank != 0:
+ return
+
+ self.logger.warning(message)
+
+ def log_exception(self, message):
+ if self.mpi_rank == 0:
+ self.logger.error(message)
+
+ self.barrier()
+ raise Exception(message)
+
+
+io = IOUtils()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/network_reader.py b/bmtk-vb/build/lib/bmtk/simulator/core/network_reader.py
new file mode 100644
index 0000000..8089e32
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/core/network_reader.py
@@ -0,0 +1,73 @@
+
+
+class NodesReader(object):
+ def __init__(self):
+ self._has_internal_nodes = False
+ self._has_virtual_nodes = False
+
+ @property
+ def name(self):
+ raise NotImplementedError()
+
+ @property
+ def internal_nodes_only(self):
+ return self._has_internal_nodes and not self._has_virtual_nodes
+
+ @property
+ def virtual_nodes_only(self):
+ return self._has_virtual_nodes and not self._has_internal_nodes
+
+ @property
+ def mixed_nodes(self):
+ return self._has_internal_nodes and self._has_virtual_nodes
+
+ def initialize(self, network):
+ raise NotImplementedError()
+
+ @classmethod
+ def load(cls, **properties):
+ raise NotImplementedError()
+
+
+class EdgesReader(object):
+ unknown = 0
+ recurrent = 0
+ virtual = 1
+ mixed = 2
+
+ def __init__(self):
+ self._connection_type = -1
+
+ @property
+ def recurrent_connections(self):
+ return self._connection_type == self.recurrent
+
+ @property
+ def virtual_connections(self):
+ return self._connection_type == self.virtual
+
+ @property
+ def mixed_connections(self):
+ return self._connection_type == self.mixed
+
+ @property
+ def source_nodes(self):
+ raise NotImplementedError()
+
+ @property
+ def target_nodes(self):
+ raise NotImplementedError()
+
+ def set_connection_type(self, src_pop, trg_pop):
+ if src_pop.internal_nodes_only and trg_pop.internal_nodes_only:
+ self._connection_type = self.recurrent
+
+ elif src_pop.virtual_nodes_only and trg_pop.internal_nodes_only:
+ self._connection_type = self.virtual
+
+ else:
+ self._connection_type = self.mixed
+
+ def initialize(self, network):
+ raise NotImplementedError()
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/node_population.py b/bmtk-vb/build/lib/bmtk/simulator/core/node_population.py
new file mode 100644
index 0000000..353bd7a
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/core/node_population.py
@@ -0,0 +1,37 @@
+class SimNode(object):
+ @property
+ def node_id(self):
+ raise NotImplementedError()
+
+ @property
+ def gid(self):
+ raise NotImplementedError()
+
+
+class NodePopulation(object):
+ def __init__(self):
+ self._has_internal_nodes = False
+ self._has_virtual_nodes = False
+
+ @property
+ def name(self):
+ raise NotImplementedError()
+
+ @property
+ def internal_nodes_only(self):
+ return self._has_internal_nodes and not self._has_virtual_nodes
+
+ @property
+ def virtual_nodes_only(self):
+ return self._has_virtual_nodes and not self._has_internal_nodes
+
+ @property
+ def mixed_nodes(self):
+ return self._has_internal_nodes and self._has_virtual_nodes
+
+ def initialize(self, network):
+ raise NotImplementedError()
+
+ @classmethod
+ def load(cls, **properties):
+ raise NotImplementedError()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/node_sets.py b/bmtk-vb/build/lib/bmtk/simulator/core/node_sets.py
new file mode 100644
index 0000000..5a67f95
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/core/node_sets.py
@@ -0,0 +1,57 @@
+from .io_tools import io
+
+
+class NodeSet(object):
+ def __init__(self, filter_params, network):
+ self._network = network
+ self._populations = []
+ self._preselected_gids = None
+
+ if isinstance(filter_params, list):
+ self._preselected_gids = filter_params
+ elif isinstance(filter_params, dict):
+ self._filter = filter_params.copy()
+ self._populations = self._find_populations()
+ else:
+ io.log_exception('Unknown node set params type {}'.format(type(filter_params)))
+
+ def _find_populations(self):
+ for k in ['population', 'populations']:
+ if k in self._filter:
+ node_pops = []
+ for pop_name in to_list(self._filter[k]):
+ node_pops.append(self._network.get_node_population(pop_name))
+ del self._filter[k]
+ return node_pops
+
+ return self._network.get_node_populations()
+
+ def populations(self):
+ return self._populations
+
+ def population_names(self):
+ return [p.name for p in self._populations]
+
+ def gids(self):
+ if self._preselected_gids is not None:
+ for gid in self._preselected_gids:
+ yield gid
+ else:
+ for pop in self._populations:
+ for node in pop.filter(self._filter):
+ yield node.node_id
+
+ def nodes(self):
+ return None
+
+
+class NodeSetAll(NodeSet):
+ def __init__(self, network):
+ super(NodeSetAll, self).__init__({}, network)
+
+
+def to_list(val):
+ if isinstance(val, list):
+ return val
+ else:
+ return [val]
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/simulator.py b/bmtk-vb/build/lib/bmtk/simulator/core/simulator.py
new file mode 100644
index 0000000..4a84174
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/core/simulator.py
@@ -0,0 +1,9 @@
+class Simulator(object):
+ def __init__(self):
+ self._sim_mods = []
+
+ def add_mod(self, module):
+ self._sim_mods.append(module)
+
+ def run(self):
+ raise NotImplementedError()
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/simulator_network.py b/bmtk-vb/build/lib/bmtk/simulator/core/simulator_network.py
new file mode 100644
index 0000000..e1da1b3
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/core/simulator_network.py
@@ -0,0 +1,200 @@
+from six import string_types
+
+from bmtk.simulator.core.io_tools import io
+#from bmtk.simulator.core.config import ConfigDict
+from bmtk.simulator.utils.config import ConfigDict
+from bmtk.simulator.core.node_sets import NodeSet, NodeSetAll
+from bmtk.simulator.core import sonata_reader
+
+
+class SimNetwork(object):
+ def __init__(self):
+ self._components = {}
+ self._io = io
+
+ self._node_adaptors = {}
+ self._edge_adaptors = {}
+ self._register_adaptors()
+
+ self._node_populations = {}
+ self._node_sets = {}
+
+ self._edge_populations = []
+
+ @property
+ def io(self):
+ return self._io
+
+ @property
+ def node_populations(self):
+ return self._node_populations.values()
+
+ @property
+ def recurrent_edges(self):
+ return [ep for ep in self._edge_populations if ep.recurrent_connections]
+
+ @property
+ def py_function_caches(self):
+ return None
+
+ def _register_adaptors(self):
+ self._node_adaptors['sonata'] = sonata_reader.NodeAdaptor
+ self._edge_adaptors['sonata'] = sonata_reader.EdgeAdaptor
+
+ def get_node_adaptor(self, name):
+ return self._node_adaptors[name]
+
+ def get_edge_adaptor(self, name):
+ return self._edge_adaptors[name]
+
+ def add_component(self, name, path):
+ self._components[name] = path
+
+ def get_component(self, name):
+ if name not in self._components:
+ self.io.log_exception('No network component set with name {}'.format(name))
+ else:
+ return self._components[name]
+
+ def has_component(self, name):
+ return name in self._components
+
+ def get_node_population(self, name):
+ return self._node_populations[name]
+
+ def get_node_populations(self):
+ return self._node_populations.values()
+
+ def add_node_set(self, name, node_set):
+ self._node_sets[name] = node_set
+
+ def get_node_set(self, node_set):
+ if isinstance(node_set, string_types) and node_set in self._node_sets:
+ return self._node_sets[node_set]
+
+ elif isinstance(node_set, (dict, list)):
+ return NodeSet(node_set, self)
+
+ else:
+ self.io.log_exception('Unable to load or find node_set "{}"'.format(node_set))
+
+ def add_nodes(self, node_population):
+ pop_name = node_population.name
+ if pop_name in self._node_populations:
+ # Make sure their aren't any collisions
+ self.io.log_exception('There are multiple node populations with name {}.'.format(pop_name))
+
+ node_population.initialize(self)
+ self._node_populations[pop_name] = node_population
+ if node_population.mixed_nodes:
+ # We'll allow a population to have virtual and non-virtual nodes but it is not ideal
+ self.io.log_warning(('Node population {} contains both virtual and non-virtual nodes which can cause ' +
+ 'memory and build-time inefficency. Consider separating virtual nodes into their ' +
+ 'own population').format(pop_name))
+
+ # Used in inputs/reports when needed to get all gids belonging to a node population
+ self._node_sets[pop_name] = NodeSet({'population': pop_name}, self)
+
+ def add_edges(self, edge_population):
+ edge_population.initialize(self)
+ pop_name = edge_population.name
+
+ # Check that source_population exists
+ src_pop_name = edge_population.source_nodes
+ if src_pop_name not in self._node_populations:
+ self.io.log_exception('Source node population {} not found. Please update {} edges'.format(src_pop_name,
+ pop_name))
+
+ # Check that the target population exists and contains non-virtual nodes (we cannot synapse onto virt nodes)
+ trg_pop_name = edge_population.target_nodes
+ if trg_pop_name not in self._node_populations or self._node_populations[trg_pop_name].virtual_nodes_only:
+ self.io.log_exception(('Node population {} does not exists (or consists of only virtual nodes). ' +
+ '{} edges cannot create connections.').format(trg_pop_name, pop_name))
+
+ edge_population.set_connection_type(src_pop=self._node_populations[src_pop_name],
+ trg_pop = self._node_populations[trg_pop_name])
+ self._edge_populations.append(edge_population)
+
+ def build(self):
+ self.build_nodes()
+ self.build_recurrent_edges()
+
+ def build_nodes(self):
+ raise NotImplementedError()
+
+ def build_recurrent_edges(self):
+ raise NotImplementedError()
+
+ def build_virtual_connections(self):
+ raise NotImplementedError()
+
+ @classmethod
+ def from_config(cls, conf, **properties):
+ """Generates a graph structure from a json config file or dictionary.
+
+ :param conf: name of json config file, or a dictionary with config parameters
+ :param properties: optional properties.
+ :return: A graph object of type cls
+ """
+ network = cls(**properties)
+
+ # The simulation run script should create a config-dict since it's likely to vary based on the simulator engine,
+ # however in the case the user doesn't we will try a generic conversion from dict/json to ConfigDict
+ if isinstance(conf, ConfigDict):
+ config = conf
+ else:
+ try:
+ config = ConfigDict.load(conf)
+ except Exception as e:
+ network.io.log_exception('Could not convert {} (type "{}") to json.'.format(conf, type(conf)))
+
+ if not config.with_networks:
+ network.io.log_exception('Could not find any network files. Unable to build network.')
+
+ # TODO: These are simulator specific
+ network.spike_threshold = config.spike_threshold
+ network.dL = config.dL
+
+ # load components
+ for name, value in config.components.items():
+ network.add_component(name, value)
+
+ # load nodes
+ gid_map = config.gid_mappings
+ node_adaptor = network.get_node_adaptor('sonata')
+ for node_dict in config.nodes:
+ nodes = sonata_reader.load_nodes(node_dict['nodes_file'], node_dict['node_types_file'], gid_map,
+ adaptor=node_adaptor)
+ for node_pop in nodes:
+ network.add_nodes(node_pop)
+
+ # TODO: Raise a warning if more than one internal population and no gids (node_id collision)
+
+ # load edges
+ edge_adaptor = network.get_edge_adaptor('sonata')
+ for edge_dict in config.edges:
+ if not edge_dict.get('enabled', True):
+ continue
+
+ edges = sonata_reader.load_edges(edge_dict['edges_file'], edge_dict['edge_types_file'],
+ adaptor=edge_adaptor)
+ for edge_pop in edges:
+ network.add_edges(edge_pop)
+
+ # Add nodeset section
+ network.add_node_set('all', NodeSetAll(network))
+ for ns_name, ns_filter in config.node_sets.items():
+ network.add_node_set(ns_name, NodeSet(ns_filter, network))
+
+ return network
+
+ @classmethod
+ def from_manifest(cls, manifest_json):
+ # TODO: Add adaptors to build a simulation network from model files downloaded celltypes.brain-map.org
+ raise NotImplementedError()
+
+ @classmethod
+ def from_builder(cls, network):
+ # TODO: Add adaptors to build a simulation network from a bmtk.builder Network object
+ raise NotImplementedError()
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/sonata_reader/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/core/sonata_reader/__init__.py
new file mode 100644
index 0000000..9b09281
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/core/sonata_reader/__init__.py
@@ -0,0 +1,3 @@
+from .node_adaptor import NodeAdaptor, SonataBaseNode
+from .edge_adaptor import EdgeAdaptor, SonataBaseEdge
+from .network_reader import load_nodes, load_edges
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/sonata_reader/edge_adaptor.py b/bmtk-vb/build/lib/bmtk/simulator/core/sonata_reader/edge_adaptor.py
new file mode 100644
index 0000000..01ebace
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/core/sonata_reader/edge_adaptor.py
@@ -0,0 +1,206 @@
+import os
+import ast
+import json
+import types
+
+import numpy as np
+
+
+class SonataBaseEdge(object):
+ def __init__(self, sonata_edge, edge_adaptor):
+ self._edge = sonata_edge
+ self._prop_adaptor = edge_adaptor
+
+ @property
+ def source_node_id(self):
+ return self._edge.source_node_id
+
+ @property
+ def target_node_id(self):
+ return self._edge.target_node_id
+
+ @property
+ def dynamics_params(self):
+ return self._prop_adaptor.dynamics_params(self._edge)
+
+ @property
+ def delay(self):
+ return self._edge['delay']
+
+ @property
+ def weight_function(self):
+ return self._prop_adaptor.weight_function(self._edge)
+
+ @property
+ def preselected_targets(self):
+ return self._prop_adaptor.preselected_targets
+
+ @property
+ def target_sections(self):
+ return self._edge['target_sections']
+
+ @property
+ def target_distance(self):
+ return self._edge['distance_range']
+
+ @property
+ def edge_type_id(self):
+ return self._edge.edge_type_id
+
+ @property
+ def nsyns(self):
+ return self._prop_adaptor.nsyns(self._edge)
+
+ @property
+ def model_template(self):
+ return self._edge['model_template']
+
+ def syn_weight(self, src_node, trg_node):
+ return self._prop_adaptor.syn_weight(self, src_node=src_node, trg_node=trg_node)
+
+ def __getitem__(self, item):
+ return self._edge[item]
+
+
+class EdgeAdaptor(object):
+ def __init__(self, network):
+ self._network = network
+ self._func_caches = self._network.py_function_caches
+
+ @property
+ def batch_process(self):
+ return False
+
+ @batch_process.setter
+ def batch_process(self, flag):
+ pass
+
+ def get_edge(self, sonata_node):
+ return SonataBaseEdge(sonata_node, self)
+
+ @staticmethod
+ def preprocess_edge_types(network, edge_population):
+ edge_types_table = edge_population.types_table
+ edge_type_ids = np.unique(edge_population.type_ids)
+
+ for et_id in edge_type_ids:
+ edge_type = edge_types_table[et_id]
+ if 'dynamics_params' in edge_types_table.columns:
+ dynamics_params = edge_type['dynamics_params']
+ params_dir = network.get_component('synaptic_models_dir')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ edge_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ network.io.log_exception('Could not find edge dynamics_params file {}.'.format(params_path))
+
+ # Split target_sections
+ if 'target_sections' in edge_type:
+ trg_sec = edge_type['target_sections']
+ if trg_sec is not None:
+ try:
+ edge_type['target_sections'] = ast.literal_eval(trg_sec)
+ except Exception as exc:
+ network.io.log_warning('Unable to split target_sections list {}'.format(trg_sec))
+ edge_type['target_sections'] = None
+
+ # Split target distances
+ if 'distance_range' in edge_type:
+ dist_range = edge_type['distance_range']
+ if dist_range is not None:
+ try:
+ # TODO: Make the distance range has at most two values
+ edge_type['distance_range'] = json.loads(dist_range)
+ except Exception as e:
+ try:
+ edge_type['distance_range'] = [0.0, float(dist_range)]
+ except Exception as e:
+ network.io.log_warning('Unable to parse distance_range {}'.format(dist_range))
+ edge_type['distance_range'] = None
+
+ @classmethod
+ def create_adaptor(cls, edge_group, network):
+ prop_map = cls(network)
+ return cls.patch_adaptor(prop_map, edge_group)
+
+ @staticmethod
+ def patch_adaptor(adaptor, edge_group):
+ # dynamics_params
+ if edge_group.has_dynamics_params:
+ adaptor.dynamics_params = types.MethodType(group_dynamics_params, adaptor)
+ else: # 'dynamics_params' in node_group.all_columns:
+ adaptor.dynamics_params = types.MethodType(types_dynamics_params, adaptor)
+
+ # For fetching/calculating synaptic weights
+ if 'weight_function' in edge_group.all_columns:
+ # Customized function for user to calculate the synaptic weight
+ adaptor.weight_function = types.MethodType(weight_function, adaptor)
+ adaptor.syn_weight = types.MethodType(syn_weight_function, adaptor)
+ elif 'syn_weight' in edge_group.all_columns:
+ # Just return the synaptic weight
+ adaptor.weight_function = types.MethodType(ret_none_function, adaptor)
+ adaptor.syn_weight = types.MethodType(syn_weight, adaptor)
+ else:
+ raise Exception('Could not find syn_weight or weight_function properties. Cannot create connections.')
+
+ # For determining the synapse placement
+ if 'sec_id' in edge_group.all_columns:
+ adaptor.preselected_targets = True
+ adaptor.nsyns = types.MethodType(no_nsyns, adaptor)
+ elif 'nsyns' in edge_group.all_columns:
+ adaptor.preselected_targets = False
+ adaptor.nsyns = types.MethodType(nsyns, adaptor)
+ else:
+ # It will get here for connections onto point neurons
+ adaptor.preselected_targets = True
+ adaptor.nsyns = types.MethodType(no_nsyns, adaptor)
+
+ return adaptor
+
+
+def ret_none_function(self, edge):
+ return None
+
+
+def weight_function(self, edge):
+ return edge['weight_function']
+
+
+def syn_weight(self, edge, src_node, trg_node):
+ return edge['syn_weight']
+
+
+def syn_weight_function(self, edge, src_node, trg_node):
+ weight_fnc_name = edge.weight_function
+ if weight_fnc_name is None:
+ weight_fnc = self._func_caches.py_modules.synaptic_weight('default_weight_fnc')
+ return weight_fnc(edge, src_node, trg_node)
+
+ elif self._func_caches.py_modules.has_synaptic_weight(weight_fnc_name):
+ weight_fnc = self._func_caches.py_modules.synaptic_weight(weight_fnc_name)
+ return weight_fnc(edge, src_node, trg_node)
+
+ else:
+ self._network.io.log_exception('weight_function {} is not defined.'.format(weight_fnc_name))
+
+
+def nsyns(self, edge):
+ return edge['nsyns']
+
+
+def no_nsyns(self, edge):
+ return 1
+
+
+def types_dynamics_params(self, node):
+ return node['dynamics_params']
+
+
+def group_dynamics_params(self, node):
+ return node.dynamics_params
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/sonata_reader/network_reader.py b/bmtk-vb/build/lib/bmtk/simulator/core/sonata_reader/network_reader.py
new file mode 100644
index 0000000..648d9ad
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/core/sonata_reader/network_reader.py
@@ -0,0 +1,241 @@
+import os
+import numpy as np
+import json
+import ast
+
+from bmtk.simulator.core.network_reader import NodesReader, EdgesReader
+from bmtk.simulator.core.sonata_reader.node_adaptor import NodeAdaptor
+from bmtk.simulator.core.sonata_reader.edge_adaptor import EdgeAdaptor
+from bmtk.utils import sonata
+
+
+def load_nodes(nodes_h5, node_types_csv, gid_table=None, selected_nodes=None, adaptor=NodeAdaptor):
+ return SonataNodes.load(nodes_h5, node_types_csv, gid_table, selected_nodes, adaptor)
+
+
+def load_edges(edges_h5, edge_types_csv, selected_populations=None, adaptor=EdgeAdaptor):
+ return SonataEdges.load(edges_h5, edge_types_csv, selected_populations, adaptor)
+
+
+class SonataNodes(NodesReader):
+ def __init__(self, sonata_node_population, prop_adaptor):
+ super(SonataNodes, self).__init__()
+ self._node_pop = sonata_node_population
+ self._pop_name = self._node_pop.name
+ self._prop_adaptors = {}
+ self._adaptor = prop_adaptor
+
+ @property
+ def name(self):
+ return self._pop_name
+
+ @property
+ def adaptor(self):
+ return self._adaptor
+
+ def initialize(self, network):
+ # Determine the various mode-types available in the Node Population, whether or not a population of nodes
+ # contains virtual/external nodes, internal nodes, or a mix of both affects how to nodes are built
+ model_types = set()
+ for grp in self._node_pop.groups:
+ if self._adaptor.COL_MODEL_TYPE not in grp.all_columns:
+ network.io.log_exception('property {} is missing from nodes.'.format(self._adaptor.COL_MODEL_TYPE))
+
+ model_types.update(set(np.unique(grp.get_values(self._adaptor.COL_MODEL_TYPE))))
+
+ if 'virtual' in model_types:
+ self._has_virtual_nodes = True
+ model_types -= set(['virtual'])
+ else:
+ self._has_virtual_nodes = False
+
+ if model_types:
+ self._has_internal_nodes = True
+
+ self._adaptor.preprocess_node_types(network, self._node_pop)
+ #self._preprocess_node_types(network)
+ self._prop_adaptors = {grp.group_id: self._create_adaptor(grp, network) for grp in self._node_pop.groups}
+
+ def _create_adaptor(self, grp, network):
+ return self._adaptor.create_adaptor(grp, network)
+
+ '''
+ def _preprocess_node_types(self, network):
+ # TODO: The following figures out the actually used node-type-ids. For mem and speed may be better to just
+ # process them all
+ node_type_ids = self._node_pop.type_ids
+ # TODO: Verify all the node_type_ids are in the table
+ node_types_table = self._node_pop.types_table
+
+ # TODO: Convert model_type to a enum
+ if network.has_component('morphologies_dir'):
+ morph_dir = network.get_component('morphologies_dir')
+ if morph_dir is not None and 'morphology_file' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ if node_type['morphology_file'] is None:
+ continue
+ # TODO: Check the file exits
+ # TODO: See if absolute path is stored in csv
+ node_type['morphology_file'] = os.path.join(morph_dir, node_type['morphology_file'])
+
+ if 'dynamics_params' in node_types_table.columns and 'model_type' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ dynamics_params = node_type['dynamics_params']
+ if isinstance(dynamics_params, dict):
+ continue
+
+ model_type = node_type['model_type']
+ if model_type == 'biophysical':
+ params_dir = network.get_component('biophysical_neuron_models_dir')
+ elif model_type == 'point_process':
+ params_dir = network.get_component('point_neuron_models_dir')
+ elif model_type == 'point_soma':
+ params_dir = network.get_component('point_neuron_models_dir')
+ else:
+ # Not sure what to do in this case, throw Exception?
+ params_dir = network.get_component('custom_neuron_models')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ node_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ network.io.log_exception('Could not find node dynamics_params file {}.'.format(params_path))
+
+ # TODO: Use adaptor to validate model_type and model_template values
+ '''
+
+ @classmethod
+ def load(cls, nodes_h5, node_types_csv, gid_table=None, selected_nodes=None, adaptor=NodeAdaptor):
+ sonata_file = sonata.File(data_files=nodes_h5, data_type_files=node_types_csv, gid_table=gid_table)
+ node_populations = []
+ for node_pop in sonata_file.nodes.populations:
+ node_populations.append(cls(node_pop, adaptor))
+
+ return node_populations
+
+ def get_node(self, node_id):
+ return self._node_pop.get_node_id(node_id)
+
+ def __getitem__(self, item):
+ for base_node in self._node_pop[item]:
+ snode = self._prop_adaptors[base_node.group_id].get_node(base_node)
+ yield snode
+
+ def __iter__(self):
+ return self
+
+ def filter(self, filter_conditons):
+ for node in self._node_pop.filter(**filter_conditons):
+ yield node
+
+ def get_nodes(self):
+ for node_group in self._node_pop.groups:
+ node_adaptor = self._prop_adaptors[node_group.group_id]
+ if node_adaptor.batch_process:
+ for batch in node_adaptor.get_batches(node_group):
+ yield batch
+ else:
+ for node in node_group:
+ yield node_adaptor.get_node(node)
+
+
+class SonataEdges(EdgesReader):
+ def __init__(self, edge_population, adaptor):
+ self._edge_pop = edge_population
+ self._adaptor_cls = adaptor
+ self._edge_adaptors = {}
+
+ @property
+ def name(self):
+ return self._edge_pop.name
+
+ @property
+ def source_nodes(self):
+ return self._edge_pop.source_population
+
+ @property
+ def target_nodes(self):
+ return self._edge_pop.target_population
+
+ def initialize(self, network):
+ self._adaptor_cls.preprocess_edge_types(network, self._edge_pop)
+ # self._preprocess_edge_types(network)
+ self._edge_adaptors = {grp.group_id: self._adaptor_cls.create_adaptor(grp, network)
+ for grp in self._edge_pop.groups}
+
+ def get_target(self, node_id):
+ for edge in self._edge_pop.get_target(node_id):
+ yield self._edge_adaptors[edge.group_id].get_edge(edge)
+
+ def get_edges(self):
+ for edge_group in self._edge_pop.groups:
+ edge_adaptor = self._edge_adaptors[edge_group.group_id]
+ if edge_adaptor.batch_process:
+ for edge in edge_adaptor.get_batches(edge_group):
+ yield edge
+ else:
+ for edge in self._edge_pop:
+ yield edge_adaptor.get_edge(edge)
+
+ '''
+ def _preprocess_edge_types(self, network):
+ edge_types_table = self._edge_pop.types_table
+ edge_type_ids = np.unique(self._edge_pop.type_ids)
+
+ for et_id in edge_type_ids:
+ edge_type = edge_types_table[et_id]
+ if 'dynamics_params' in edge_types_table.columns:
+ dynamics_params = edge_type['dynamics_params']
+ params_dir = network.get_component('synaptic_models_dir')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ edge_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ self.io.log_exception('Could not find edge dynamics_params file {}.'.format(params_path))
+
+ # Split target_sections
+ if 'target_sections' in edge_type:
+ trg_sec = edge_type['target_sections']
+ if trg_sec is not None:
+ try:
+ edge_type['target_sections'] = ast.literal_eval(trg_sec)
+ except Exception as exc:
+ self.io.log_warning('Unable to split target_sections list {}'.format(trg_sec))
+ edge_type['target_sections'] = None
+
+ # Split target distances
+ if 'distance_range' in edge_type:
+ dist_range = edge_type['distance_range']
+ if dist_range is not None:
+ try:
+ # TODO: Make the distance range has at most two values
+ edge_type['distance_range'] = json.loads(dist_range)
+ except Exception as e:
+ try:
+ edge_type['distance_range'] = [0.0, float(dist_range)]
+ except Exception as e:
+ self.io.log_warning('Unable to parse distance_range {}'.format(dist_range))
+ edge_type['distance_range'] = None
+ '''
+
+ @classmethod
+ def load(cls, edges_h5, edge_types_csv, selected_populations=None, adaptor=EdgeAdaptor):
+ sonata_file = sonata.File(data_files=edges_h5, data_type_files=edge_types_csv)
+ edge_populations = []
+ for edge_pop in sonata_file.edges.populations:
+ edge_populations.append(cls(edge_pop, adaptor))
+
+ return edge_populations
diff --git a/bmtk-vb/build/lib/bmtk/simulator/core/sonata_reader/node_adaptor.py b/bmtk-vb/build/lib/bmtk/simulator/core/sonata_reader/node_adaptor.py
new file mode 100644
index 0000000..f8d980c
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/core/sonata_reader/node_adaptor.py
@@ -0,0 +1,207 @@
+import os
+import json
+import types
+import numpy as np
+
+
+class SonataBaseNode(object):
+ def __init__(self, node, prop_adaptor):
+ self._node = node
+ self._prop_adaptor = prop_adaptor
+
+ @property
+ def node_id(self):
+ return self._prop_adaptor.node_id(self._node)
+
+ @property
+ def gid(self):
+ return self._prop_adaptor.gid(self._node)
+
+ @property
+ def dynamics_params(self):
+ return self._prop_adaptor.dynamics_params(self._node)
+
+ @property
+ def model_type(self):
+ return self._prop_adaptor.model_type(self._node)
+
+ @property
+ def model_template(self):
+ return self._prop_adaptor.model_template(self._node)
+
+ @property
+ def model_processing(self):
+ return self._prop_adaptor.model_processing(self._node)
+
+ @property
+ def network(self):
+ return self._prop_adaptor.network
+
+ @property
+ def population(self):
+ return self._prop_adaptor.network
+
+ def __getitem__(self, prop_key):
+ return self._node[prop_key]
+
+
+class NodeAdaptor(object):
+ COL_MODEL_TYPE = 'model_type'
+ COL_GID = 'gid'
+ COL_DYNAMICS_PARAM = 'dynamics_params'
+ COL_MODEL_TEMPLATE = 'model_template'
+ COL_MODEL_PROCESSING = 'model_processing'
+
+ def __init__(self, network):
+ self._network = network
+ self._model_template_cache = {}
+ self._model_processing_cache = {}
+
+ @property
+ def batch_process(self):
+ return False
+
+ @batch_process.setter
+ def batch_process(self, flag):
+ pass
+
+ def node_id(self, node):
+ return node.node_id
+
+ def model_type(self, node):
+ return node[self.COL_MODEL_TYPE]
+
+ def model_template(self, node):
+ # TODO: If model-template comes from the types table we should split it in _preprocess_types
+ model_template_str = node[self.COL_MODEL_TEMPLATE]
+ if model_template_str is None:
+ return None
+ elif model_template_str in self._model_template_cache:
+ return self._model_template_cache[model_template_str]
+ else:
+ template_parts = model_template_str.split(':')
+ directive, template = template_parts[0], template_parts[1]
+ self._model_template_cache[model_template_str] = (directive, template)
+ return directive, template
+
+ def model_processing(self, node):
+ model_processing_str = node[self.COL_MODEL_PROCESSING]
+ if model_processing_str is None:
+ return []
+ else:
+ # TODO: Split in the node_types_table when possible
+ return model_processing_str.split(',')
+
+ @staticmethod
+ def preprocess_node_types(network, node_population):
+ # TODO: The following figures out the actually used node-type-ids. For mem and speed may be better to just
+ # process them all
+ #node_type_ids = node_population.type_ids
+ node_type_ids = np.unique(node_population.type_ids)
+ # TODO: Verify all the node_type_ids are in the table
+ node_types_table = node_population.types_table
+
+ # TODO: Convert model_type to a enum
+ if network.has_component('morphologies_dir'):
+ morph_dir = network.get_component('morphologies_dir')
+ if morph_dir is not None and 'morphology' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ if node_type['morphology'] is None:
+ continue
+
+ # TODO: See if absolute path is stored in csv
+ swc_path = os.path.join(morph_dir, node_type['morphology'])
+
+ # According to Sonata format, the .swc extension is not needed. Thus we need to add it if req.
+ if not os.path.exists(swc_path) and not swc_path.endswith('.swc'):
+ swc_path += '.swc'
+ if not os.path.exists(swc_path):
+ network.io.log_exception('Could not find node morphology file {}.'.format(swc_path))
+
+ node_type['morphology'] = swc_path
+
+ if 'dynamics_params' in node_types_table.columns and 'model_type' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ dynamics_params = node_type['dynamics_params']
+ if isinstance(dynamics_params, dict):
+ continue
+
+ if dynamics_params is None:
+ continue
+
+ model_type = node_type['model_type']
+ if model_type == 'biophysical':
+ params_dir = network.get_component('biophysical_neuron_models_dir')
+ elif model_type == 'point_process':
+ params_dir = network.get_component('point_neuron_models_dir')
+ elif model_type == 'point_soma':
+ params_dir = network.get_component('point_neuron_models_dir')
+ elif model_type == 'population':
+ params_dir = network.get_component('population_models_dir')
+ else:
+ # Not sure what to do in this case, throw Exception?
+ params_dir = network.get_component('custom_neuron_models')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ node_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ network.io.log_exception('Could not find node dynamics_params file {}.'.format(params_path))
+
+ # TODO: Use adaptor to validate model_type and model_template values
+
+ @classmethod
+ def create_adaptor(cls, node_group, network):
+ prop_map = cls(network)
+ return cls.patch_adaptor(prop_map, node_group, network)
+
+ @classmethod
+ def patch_adaptor(cls, adaptor, node_group, network):
+ adaptor.network = network
+
+ # Use node_id if the user hasn't specified a gid table
+ if not node_group.has_gids:
+ adaptor.gid = types.MethodType(NodeAdaptor.node_id, adaptor)
+
+ # dynamics_params
+ if node_group.has_dynamics_params:
+ adaptor.dynamics_params = types.MethodType(group_dynamics_params, adaptor)
+ elif 'dynamics_params' in node_group.all_columns:
+ adaptor.dynamics_params = types.MethodType(types_dynamics_params, adaptor)
+ else:
+ adaptor.dynamics_params = types.MethodType(none_function, adaptor)
+
+ if 'model_template' not in node_group.all_columns:
+ adaptor.model_template = types.MethodType(none_function, adaptor)
+
+ if 'model_processing' not in node_group.all_columns:
+ adaptor.model_processing = types.MethodType(empty_list, adaptor)
+
+ return adaptor
+
+ def get_node(self, sonata_node):
+ return SonataBaseNode(sonata_node, self)
+
+
+def none_function(self, node):
+ return None
+
+
+def empty_list(self, node):
+ return []
+
+
+def types_dynamics_params(self, node):
+ return node['dynamics_params']
+
+
+def group_dynamics_params(self, node):
+ return node.dynamics_params
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/__init__.py
new file mode 100644
index 0000000..9e6712b
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/__init__.py
@@ -0,0 +1,5 @@
+from bmtk.simulator.filternet.filternetwork import FilterNetwork
+from bmtk.simulator.filternet.filtersimulator import FilterSimulator
+from bmtk.simulator.filternet.config import Config
+
+import bmtk.simulator.filternet.default_setters
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/cell.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/cell.py
new file mode 100644
index 0000000..240cab5
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/cell.py
@@ -0,0 +1,28 @@
+from bmtk.simulator.filternet.pyfunction_cache import py_modules
+
+
+class Cell(object):
+ def __init__(self, node):
+ self._node = node
+ self._gid = node.gid
+ self._node_id = node.node_id
+ self._lgn_cell_obj = None
+
+ @property
+ def gid(self):
+ return self._gid
+
+ @property
+ def lgn_cell_obj(self):
+ return self._lgn_cell_obj
+
+ def build(self):
+ cell_loaders = self._node.model_processing
+ if len(cell_loaders) > 0:
+ raise Exception('Cannot use more than one model_processing method per cell. Exiting.')
+ elif len(cell_loaders) == 1:
+ model_processing_fnc = py_modules.cell_processor(cell_loaders[0])
+ else:
+ model_processing_fnc = py_modules.cell_processor('default')
+
+ self._lgn_cell_obj = model_processing_fnc(self._node)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/cell_models.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/cell_models.py
new file mode 100644
index 0000000..a415e9f
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/cell_models.py
@@ -0,0 +1 @@
+from bmtk.simulator.filternet.lgnmodel.cellmodel import *
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/config.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/config.py
new file mode 100644
index 0000000..b10ee10
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/config.py
@@ -0,0 +1,8 @@
+import os
+import json
+
+from bmtk.simulator.core.config import ConfigDict
+
+
+class Config(ConfigDict):
+ pass
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/default_setters/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/default_setters/__init__.py
new file mode 100644
index 0000000..6ec46cc
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/default_setters/__init__.py
@@ -0,0 +1 @@
+from cell_loaders import *
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/default_setters/cell_loaders.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/default_setters/cell_loaders.py
new file mode 100644
index 0000000..c0c74ad
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/default_setters/cell_loaders.py
@@ -0,0 +1,9 @@
+from bmtk.simulator.filternet.pyfunction_cache import add_cell_processor
+
+
+def default_cell_loader(node):
+ print(node.model_template)
+ print('DEFAULT')
+ exit()
+
+add_cell_processor(default_cell_loader, 'default', overwrite=False)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/filternetwork.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/filternetwork.py
new file mode 100644
index 0000000..170a9e7
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/filternetwork.py
@@ -0,0 +1,28 @@
+from bmtk.simulator.core.simulator_network import SimNetwork
+from bmtk.simulator.filternet.cell import Cell
+from bmtk.simulator.filternet.pyfunction_cache import py_modules
+
+
+class FilterNetwork(SimNetwork):
+ def __init__(self):
+ super(FilterNetwork, self).__init__()
+
+ self._local_cells = []
+
+ def cells(self):
+ return self._local_cells
+
+ def build(self):
+ self.build_nodes()
+
+ def set_default_processing(self, processing_fnc):
+ py_modules.add_cell_processor('default', processing_fnc)
+
+ def build_nodes(self):
+ for node_pop in self.node_populations:
+ for node in node_pop.get_nodes():
+ cell = Cell(node)
+ cell.build()
+ self._local_cells.append(cell)
+
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/filters.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/filters.py
new file mode 100644
index 0000000..ae53df5
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/filters.py
@@ -0,0 +1,3 @@
+from bmtk.simulator.filternet.lgnmodel.temporalfilter import *
+from bmtk.simulator.filternet.lgnmodel.spatialfilter import *
+from bmtk.simulator.filternet.lgnmodel.linearfilter import *
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/filtersimulator.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/filtersimulator.py
new file mode 100644
index 0000000..7d6742a
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/filtersimulator.py
@@ -0,0 +1,193 @@
+import csv
+
+from bmtk.simulator.core.simulator import Simulator
+import bmtk.simulator.utils.simulation_inputs as inputs
+from bmtk.simulator.filternet.config import Config
+from bmtk.simulator.filternet.lgnmodel.movie import *
+from bmtk.simulator.filternet import modules as mods
+from bmtk.simulator.filternet.io_tools import io
+from six import string_types
+
+
+class FilterSimulator(Simulator):
+ def __init__(self, network, dt, tstop):
+ super(FilterSimulator, self).__init__()
+ self._network = network
+ self._dt = dt
+ self._tstop = tstop/1000.0
+
+ self.rates_csv = None
+ self._movies = []
+
+ def add_movie(self, movie_type, params):
+ # TODO: Move this into its own factory
+ movie_type = movie_type.lower() if isinstance(movie_type, string_types) else 'movie'
+ if movie_type == 'movie' or not movie_type:
+ raise NotImplementedError
+
+ elif movie_type == 'full_field':
+ raise NotImplementedError
+
+ elif movie_type == 'full_field_flash':
+ raise NotImplementedError
+
+ elif movie_type == 'graiting':
+ init_params = FilterSimulator.find_params(['row_size', 'col_size', 'frame_rate'], **params)
+ create_params = FilterSimulator.find_params(['gray_screen_dur', 'cpd', 'temporal_f', 'theta', 'contrast'],
+ **params)
+ gm = GratingMovie(**init_params)
+ graiting_movie = gm.create_movie(t_min=0.0, t_max=self._tstop, **create_params)
+ self._movies.append(graiting_movie)
+
+ else:
+ raise Exception('Unknown movie type {}'.format(movie_type))
+
+ def run(self):
+ for mod in self._sim_mods:
+ mod.initialize(self)
+
+ io.log_info('Evaluating rates.')
+ for cell in self._network.cells():
+ for movie in self._movies:
+ ts, f_rates = cell.lgn_cell_obj.evaluate(movie, downsample=1, separable=True)
+
+ for mod in self._sim_mods:
+ mod.save(self, cell.gid, ts, f_rates)
+
+ """
+ if self.rates_csv is not None:
+ print 'saving {}'.format(cell.gid)
+ for t, f in zip(t, f_tot):
+ csv_writer.writerow([t, f, cell.gid])
+ csv_fhandle.flush()
+ """
+ io.log_info('Done.')
+ for mod in self._sim_mods:
+ mod.finalize(self)
+
+ """
+ def generate_spikes(LGN, trials, duration, output_file_name):
+ # f_tot = np.loadtxt(output_file_name + "_f_tot.csv", delimiter=" ")
+ # t = f_tot[0, :]
+
+ f = h5.File(output_file_name + "_f_tot.h5", 'r')
+ f_tot = np.array(f.get('firing_rates_Hz'))
+
+ t = np.array(f.get('time'))
+ # For h5 files that don't have time explicitly saved
+ t = np.linspace(0, duration, f_tot.shape[1])
+
+
+ #create output file
+ f = nwb.create_blank_file(output_file_name + '_spikes.nwb', force=True)
+
+ for trial in range(trials):
+ for counter in range(len(LGN.nodes())):
+ try:
+ spike_train = np.array(f_rate_to_spike_train(t*1000., f_tot[counter, :], np.random.randint(10000), 1000.*min(t), 1000.*max(t), 0.1))
+ except:
+ spike_train = 1000.*np.array(pg.generate_inhomogenous_poisson(t, f_tot[counter, :], seed=np.random.randint(10000))) #convert to milliseconds and hence the multiplication by 1000
+
+ nwb.SpikeTrain(spike_train, unit='millisecond').add_to_processing(f, 'trial_%s' % trial)
+ f.close()
+
+ """
+
+
+ @staticmethod
+ def find_params(param_names, **kwargs):
+ ret_dict = {}
+ for pn in param_names:
+ if pn in kwargs:
+ ret_dict[pn] = kwargs[pn]
+
+ return ret_dict
+
+ @classmethod
+ def from_config(cls, config, network):
+ if not isinstance(config, Config):
+ try:
+ config = Config.load(config, False)
+ except Exception as e:
+ network.io.log_exception('Could not convert {} (type "{}") to json.'.format(config, type(config)))
+
+ if not config.with_networks:
+ network.io.log_exception('Could not find any network files. Unable to build network.')
+
+ sim = cls(network=network, dt=config.dt, tstop=config.tstop)
+
+ network.io.log_info('Building cells.')
+ network.build_nodes()
+
+ # TODO: Need to create a gid selector
+ for sim_input in inputs.from_config(config):
+ if sim_input.input_type == 'movie':
+ sim.add_movie(sim_input.module, sim_input.params)
+ else:
+ raise Exception('Unable to load input type {}'.format(sim_input.input_type))
+
+
+ """
+ node_set = network.get_node_set(sim_input.node_set)
+ if sim_input.input_type == 'spikes':
+ spikes = spike_trains.SpikesInput.load(name=sim_input.name, module=sim_input.module,
+ input_type=sim_input.input_type, params=sim_input.params)
+ io.log_info('Build virtual cell stimulations for {}'.format(sim_input.name))
+ network.add_spike_trains(spikes, node_set)
+
+ elif sim_input.module == 'IClamp':
+ # TODO: Parse from csv file
+ amplitude = sim_input.params['amp']
+ delay = sim_input.params['delay']
+ duration = sim_input.params['duration']
+ gids = sim_input.params['node_set']
+ sim.attach_current_clamp(amplitude, delay, duration, node_set)
+
+ elif sim_input.module == 'xstim':
+ sim.add_mod(mods.XStimMod(**sim_input.params))
+
+ else:
+ io.log_exception('Can not parse input format {}'.format(sim_input.name))
+ """
+
+
+ rates_csv = config.output.get('rates_csv', None)
+ rates_h5 = config.output.get('rates_h5', None)
+ if rates_csv or rates_h5:
+ sim.add_mod(mods.RecordRates(rates_csv, rates_h5, config.output_dir))
+
+ spikes_csv = config.output.get('spikes_csv', None)
+ spikes_h5 = config.output.get('spikes_h5', None)
+ spikes_nwb = config.output.get('spikes_nwb', None)
+ if spikes_csv or spikes_h5 or spikes_nwb:
+ sim.add_mod(mods.SpikesGenerator(spikes_csv, spikes_h5, spikes_nwb, config.output_dir))
+
+ # Parse the "reports" section of the config and load an associated output module for each report
+ """
+ sim_reports = reports.from_config(config)
+ for report in sim_reports:
+ if isinstance(report, reports.SpikesReport):
+ mod = mods.SpikesMod(**report.params)
+
+ elif isinstance(report, reports.MembraneReport):
+ if report.params['sections'] == 'soma':
+ mod = mods.SomaReport(**report.params)
+
+ else:
+ #print report.params
+ mod = mods.MembraneReport(**report.params)
+
+ elif isinstance(report, reports.ECPReport):
+ mod = mods.EcpMod(**report.params)
+ # Set up the ability for ecp on all relevant cells
+ # TODO: According to spec we need to allow a different subset other than only biophysical cells
+ for gid, cell in network.cell_type_maps('biophysical').items():
+ cell.setup_ecp()
+ else:
+ # TODO: Allow users to register customized modules using pymodules
+ io.log_warning('Unrecognized module {}, skipping.'.format(report.module))
+ continue
+
+ sim.add_mod(mod)
+ """
+ return sim
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/io_tools.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/io_tools.py
new file mode 100644
index 0000000..dfdcfaa
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/io_tools.py
@@ -0,0 +1 @@
+from bmtk.simulator.core.io_tools import io
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/__init__.py
new file mode 100644
index 0000000..72b9443
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/__init__.py
@@ -0,0 +1,7 @@
+__version__ = '0.1.0'
+
+# from lgnmodel import lgnmodel
+# from lgnmodel.dev import mask
+# from lgnmodel.dev import movie
+# from lgnmodel import cellmodel
+# from lgnmodel.dev import boundcellmodel
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/cellmodel.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/cellmodel.py
new file mode 100644
index 0000000..bc64495
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/cellmodel.py
@@ -0,0 +1,358 @@
+#import isee_engine
+import os
+import itertools
+import matplotlib.pyplot as plt
+import numpy as np
+from . import utilities as util
+import importlib
+from .kernel import Kernel2D, Kernel3D
+from .linearfilter import SpatioTemporalFilter
+import json
+from .spatialfilter import GaussianSpatialFilter
+from .transferfunction import ScalarTransferFunction
+from .temporalfilter import TemporalFilterCosineBump
+from .cursor import LNUnitCursor, MultiLNUnitCursor
+from .movie import Movie
+from .lgnmodel1 import LGNModel, heat_plot
+from .transferfunction import MultiTransferFunction, ScalarTransferFunction
+from .lnunit import LNUnit, MultiLNUnit
+from sympy.abc import x as symbolic_x
+from sympy.abc import y as symbolic_y
+
+
+
+class OnUnit(LNUnit):
+
+ def __init__(self, linear_filter, transfer_function):
+ assert linear_filter.amplitude > 0
+ super(OnUnit, self).__init__(linear_filter, transfer_function)
+
+class OffUnit(LNUnit):
+
+ def __init__(self, linear_filter, transfer_function):
+ assert linear_filter.amplitude < 0
+ super(OffUnit, self).__init__(linear_filter, transfer_function)
+
+class LGNOnOffCell(MultiLNUnit):
+ """A cell model for a OnOff cell"""
+ def __init__(self, on_filter, off_filter, transfer_function=MultiTransferFunction((symbolic_x, symbolic_y), 'Heaviside(x)*(x)+Heaviside(y)*(y)')):
+ """Summary
+
+ :param on_filter:
+ :param off_filter:
+ :param transfer_function:
+ """
+ self.on_filter = on_filter
+ self.off_filter = off_filter
+ self.on_unit = OnUnit(self.on_filter, ScalarTransferFunction('s'))
+ self.off_unit = OffUnit(self.off_filter, ScalarTransferFunction('s'))
+ super(LGNOnOffCell, self).__init__([self.on_unit, self.off_unit], transfer_function)
+
+class TwoSubfieldLinearCell(MultiLNUnit):
+
+ def __init__(self, dominant_filter, nondominant_filter,subfield_separation=10, onoff_axis_angle=45, dominant_subfield_location=(30,40),
+ transfer_function = MultiTransferFunction((symbolic_x, symbolic_y), 'Heaviside(x)*(x)+Heaviside(y)*(y)')):
+
+ self.subfield_separation = subfield_separation
+ self.onoff_axis_angle = onoff_axis_angle
+ self.dominant_subfield_location = dominant_subfield_location
+ self.dominant_filter = dominant_filter
+ self.nondominant_filter = nondominant_filter
+ self.transfer_function= transfer_function
+
+ self.dominant_unit = LNUnit(self.dominant_filter, ScalarTransferFunction('s'), amplitude=self.dominant_filter.amplitude)
+ self.nondominant_unit = LNUnit(self.nondominant_filter, ScalarTransferFunction('s'), amplitude=self.dominant_filter.amplitude)
+
+ super(TwoSubfieldLinearCell, self).__init__([self.dominant_unit, self.nondominant_unit], self.transfer_function)
+
+ self.dominant_filter.spatial_filter.translate = self.dominant_subfield_location
+ hor_offset = np.cos(self.onoff_axis_angle*np.pi/180.)*self.subfield_separation + self.dominant_subfield_location[0]
+ vert_offset = np.sin(self.onoff_axis_angle*np.pi/180.)*self.subfield_separation+ self.dominant_subfield_location[1]
+ rel_translation = (hor_offset,vert_offset)
+ self.nondominant_filter.spatial_filter.translate = rel_translation
+
+
+class LGNOnCell(object):
+
+ def __init__(self, **kwargs):
+
+ self.position = kwargs.pop('position', None)
+ self.weights = kwargs.pop('weights', None)
+ self.kpeaks = kwargs.pop('kpeaks', None)
+ self.amplitude = kwargs.pop('amplitude', None)
+ self.sigma = kwargs.pop('sigma', None)
+ self.transfer_function_str = kwargs.pop('transfer_function_str', 's') # 'Heaviside(s)*s')
+ self.metadata = kwargs.pop('metadata', {})
+
+ temporal_filter = TemporalFilterCosineBump(self.weights, self.kpeaks)
+ spatial_filter = GaussianSpatialFilter(translate=self.position, sigma=self.sigma, origin=(0,0)) # all distances measured from BOTTOM LEFT
+ spatiotemporal_filter = SpatioTemporalFilter(spatial_filter, temporal_filter, amplitude=self.amplitude)
+ transfer_function = ScalarTransferFunction(self.transfer_function_str)
+ self.unit = OnUnit(spatiotemporal_filter, transfer_function)
+
+class LGNOffCell(OffUnit):
+
+ def __init__(self, **kwargs):
+
+ lattice_unit_center = kwargs.pop('lattice_unit_center', None)
+ weights = kwargs.pop('weights', None)
+ kpeaks = kwargs.pop('kpeaks', None)
+ amplitude = kwargs.pop('amplitude', None)
+ sigma = kwargs.pop('sigma', None)
+ width = kwargs.pop('width', 5)
+ transfer_function_str = kwargs.pop('transfer_function_str', 'Heaviside(s)*s')
+
+ dxi = np.random.uniform(-width*1./2,width*1./2)
+ dyi = np.random.uniform(-width*1./2,width*1./2)
+ temporal_filter = TemporalFilterCosineBump(weights, kpeaks)
+ spatial_filter = GaussianSpatialFilter(translate=(dxi,dyi), sigma=sigma, origin=lattice_unit_center) # all distances measured from BOTTOM LEFT
+ spatiotemporal_filter = SpatioTemporalFilter(spatial_filter, temporal_filter, amplitude=amplitude)
+ transfer_function = ScalarTransferFunction(transfer_function_str)
+ super(LGNOnCell, self).__init__(spatiotemporal_filter, transfer_function)
+
+if __name__ == "__main__":
+
+ movie_file = '/data/mat/iSee_temp_shared/movies/TouchOfEvil.npy'
+ m_data = np.load(movie_file, 'r')
+ m = Movie(m_data[1000:], frame_rate=30.)
+
+ # Create second cell:
+ transfer_function = ScalarTransferFunction('s')
+ temporal_filter = TemporalFilterCosineBump((.4,-.3), (20,60))
+ cell_list = []
+ for xi in np.linspace(0,m.data.shape[2], 5):
+ for yi in np.linspace(0,m.data.shape[1], 5):
+ spatial_filter_on = GaussianSpatialFilter(sigma=(2,2), origin=(0,0), translate=(xi, yi))
+ on_linear_filter = SpatioTemporalFilter(spatial_filter_on, temporal_filter, amplitude=20)
+ spatial_filter_off = GaussianSpatialFilter(sigma=(4,4), origin=(0,0), translate=(xi, yi))
+ off_linear_filter = SpatioTemporalFilter(spatial_filter_off, temporal_filter, amplitude=-20)
+ on_off_cell = LGNOnOffCell(on_linear_filter, off_linear_filter)
+ cell_list.append(on_off_cell)
+
+ lgn = LGNModel(cell_list) #Here include a list of all cells
+ y = lgn.evaluate(m, downsample=100) #Does the filtering + non-linearity on movie object m
+ heat_plot(y, interpolation='none', colorbar=True)
+
+
+
+
+
+#
+# def imshow(self, ii, image_shape, fps, ax=None, show=True, relative_spatial_location=(0,0)):
+#
+# if ax is None:
+# _, ax = plt.subplots(1,1)
+#
+# curr_kernel = self.get_spatio_temporal_kernel(image_shape, fps, relative_spatial_location=relative_spatial_location)
+#
+# cax = curr_kernel.imshow(ii, ax=ax, show=False)
+#
+# if show == True:
+# plt.show()
+#
+# return ax
+#
+#
+# class OnOffCellModel(CellModel):
+#
+# def __init__(self, dc_offset=0, on_subfield=None, off_subfield=None, on_weight = 1, off_weight = -1, t_max=None):
+#
+# super(self.__class__, self).__init__(dc_offset, t_max)
+#
+# if isinstance(on_subfield, dict):
+# curr_module, curr_class = on_subfield.pop('class')
+# self.on_subfield = getattr(importlib.import_module(curr_module), curr_class)(**on_subfield)
+# else:
+# self.on_subfield = on_subfield
+#
+# super(self.__class__, self).add_subfield(on_subfield, on_weight)
+#
+# if isinstance(off_subfield, dict):
+# curr_module, curr_class = off_subfield.pop('class')
+# self.off_subfield = getattr(importlib.import_module(curr_module), curr_class)(**off_subfield)
+# else:
+# self.off_subfield = off_subfield
+#
+# super(self.__class__, self).add_subfield(off_subfield, off_weight)
+#
+#
+# def to_dict(self):
+#
+# return {'dc_offset':self.dc_offset,
+# 'on_subfield':self.on_subfield.to_dict(),
+# 'off_subfield':self.off_subfield.to_dict(),
+# 't_max':self.t_max,
+# 'class':(__name__, self.__class__.__name__)}
+#
+# class SingleSubfieldCellModel(CellModel):
+#
+# def __init__(self, subfield, weight = 1, dc_offset=0, t_max=None):
+#
+# super(SingleSubfieldCellModel, self).__init__(dc_offset, t_max)
+#
+# if isinstance(subfield, dict):
+# curr_module, curr_class = subfield.pop('class')
+# subfield = getattr(importlib.import_module(curr_module), curr_class)(**subfield)
+#
+# super(self.__class__, self).add_subfield(subfield, weight)
+#
+# def to_dict(self):
+#
+# assert len(self.subfield_list) == 1
+# subfield = self.subfield_list[0]
+# weight = self.subfield_weight_dict[subfield]
+#
+# return {'dc_offset':self.dc_offset,
+# 'subfield':subfield.to_dict(),
+# 'weight':weight,
+# 't_max':self.t_max,
+# 'class':(__name__, self.__class__.__name__)}
+#
+# class OnCellModel(SingleSubfieldCellModel):
+#
+# def __init__(self, on_subfield, weight = 1, dc_offset=0 , t_max=None):
+# assert weight > 0
+# super(OnCellModel, self).__init__(on_subfield, weight, dc_offset, t_max)
+#
+# def to_dict(self):
+# data_dict = super(OnCellModel, self).to_dict()
+# data_dict['on_subfield'] = data_dict.pop('subfield')
+# return data_dict
+#
+# class OffCellModel(SingleSubfieldCellModel):
+#
+# def __init__(self, on_subfield, weight = -1, dc_offset=0 , t_max=None):
+# assert weight < 0
+# super(OffCellModel, self).__init__(on_subfield, weight, dc_offset, t_max)
+#
+# def to_dict(self):
+# data_dict = super(OffCellModel, self).to_dict()
+# data_dict['off_subfield'] = data_dict.pop('subfield')
+# return data_dict
+
+
+# class OffCellModel(CellModel):
+#
+# def __init__(self, off_subfield, dc_offset=0, off_weight = 1, t_max=None):
+#
+# assert off_weight < 0.
+# self.weight = off_weight
+#
+#
+#
+#
+# super(self.__class__, self).__init__(dc_offset, t_max)
+#
+# if isinstance(on_subfield, dict):
+# curr_module, curr_class = on_subfield.pop('class')
+# self.subfield = getattr(importlib.import_module(curr_module), curr_class)(**on_subfield)
+# else:
+# self.subfield = on_subfield
+#
+# super(self.__class__, self).add_subfield(self.subfield, self.weight)
+#
+# def to_dict(self):
+#
+# return {'dc_offset':self.dc_offset,
+# 'on_subfield':self.subfield.to_dict(),
+# 'on_weight':self.weight,
+# 't_max':self.t_max,
+# 'class':(__name__, self.__class__.__name__)}
+
+
+
+
+
+
+# if __name__ == "__main__":
+#
+# t = np.arange(0,.5,.001)
+# example_movie = movie.Movie(file_name=os.path.join(isee_engine.movie_directory, 'TouchOfEvil.npy'), frame_rate=30.1, memmap=True)
+#
+# temporal_filter_on = TemporalFilterExponential(weight=1, tau=.05)
+# on_subfield = Subfield(scale=(5,15), weight=.5, rotation=30, temporal_filter=temporal_filter_on, translation=(0,0))
+#
+# temporal_filter_off = TemporalFilterExponential(weight=2, tau=.01)
+# off_subfield = Subfield(scale=(5,15), weight=.5, rotation=-30, temporal_filter=temporal_filter_off)
+#
+# cell = OnOffCellModel(on_subfield=on_subfield, off_subfield=off_subfield, dc_offset=0., t_max=.5)
+# curr_kernel = cell.get_spatio_temporal_kernel((100,150), 30.1)
+# curr_kernel.imshow(0)
+#
+# print cell.to_dict()
+
+
+
+# f = cell.get_spatio_temporal_filter(example_movie.movie_data.shape[1:], t,threshold=.5)
+# print len(f.t_ind_list)
+#
+#
+
+# for ii in range(example_movie.number_of_frames-curr_filter.t_max):
+# print ii, example_movie.number_of_frames, curr_filter.map(example_movie, ii)
+
+
+# off_subfield = Subfield(scale=(15,15), weight=.2, translation=(30,30))
+
+
+#
+# curr_filter = cell.get_spatio_temporal_filter((100,150))
+#
+
+#
+# # print touch_of_evil(40.41, mask=m)
+# print curr_filter.t_max
+# for ii in range(example_movie.number_of_frames-curr_filter.t_max):
+# print ii, example_movie.number_of_frames, curr_filter.map(example_movie, ii)
+
+# cell.visualize_spatial_filter((100,150))
+# show_volume(spatio_temporal_filter, vmin=spatio_temporal_filter.min(), vmax=spatio_temporal_filter.max())
+
+
+
+# def get_spatial_filter(self, image_shape, relative_spatial_location=(0,0), relative_threshold=default_relative_threshold):
+#
+# # Initialize:
+# translation_matrix = util.get_translation_matrix(relative_spatial_location)
+#
+# # On-subunit:
+# on_filter_pre_spatial = self.on_subfield.get_spatial_filter(image_shape)
+# on_filter_spatial = util.apply_transformation_matrix(on_filter_pre_spatial, translation_matrix)
+#
+# # Off-subunit:
+# off_filter_pre_spatial = self.off_subfield.get_spatial_filter(image_shape)
+# off_filter_spatial = util.apply_transformation_matrix(off_filter_pre_spatial, translation_matrix)
+#
+# spatial_filter = on_filter_spatial - off_filter_spatial
+#
+# tmp = np.abs(spatial_filter)
+# spatial_filter[np.where(tmp/tmp.max() < relative_threshold )] = 0
+#
+# return spatial_filter
+
+# kernel = float(self.dc_offset)/len(nonzero_ind_tuple[0])+spatio_temporal_filter[nonzero_ind_tuple]
+
+# def rectifying_filter_factory(kernel, movie, dc_offset=0):
+#
+# def rectifying_filter(t):
+#
+# fi = movie.frame_rate*float(t)
+# fim, fiM = np.floor(fi), np.ceil(fi)
+#
+# print t, fim, fiM
+#
+# try:
+# s1 = (movie.movie_data[int(fim)+kernel.t_ind_list, kernel.row_ind_list, kernel.col_ind_list]*kernel.kernel).sum()
+# s2 = (movie.movie_data[int(fiM)+kernel.t_ind_list, kernel.row_ind_list, kernel.col_ind_list]*kernel.kernel).sum()
+# except IndexError:
+# return None
+#
+# # Linear interpolation:
+# s_pre = dc_offset + s1*((1-(fi-fim))*.5) + s2*((fi-fim)*.5)
+#
+# if s_pre < 0:
+# return 0
+# else:
+# return float(s_pre)
+#
+# return rectifying_filter
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/cursor.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/cursor.py
new file mode 100644
index 0000000..8406fd1
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/cursor.py
@@ -0,0 +1,266 @@
+from .movie import Movie
+import numpy as np
+from .linearfilter import SpatioTemporalFilter
+from .spatialfilter import GaussianSpatialFilter
+from .temporalfilter import TemporalFilterCosineBump
+from .utilities import convert_tmin_tmax_framerate_to_trange
+import matplotlib.pyplot as plt
+from .kernel import Kernel3D
+import scipy.signal as spsig
+import time
+
+class KernelCursor(object):
+
+
+ def __init__(self, kernel, movie):
+
+ self.movie = movie
+ self.kernel = kernel
+ self.cache = {}
+
+ # print self.kernel.t_range.min(), self.kernel.t_range.max(), type(kernel), len(self.kernel)
+
+ # This ensures that the kernel frame rate matches the movie frame rate:
+ np.testing.assert_almost_equal(np.diff(self.kernel.t_range), np.ones_like(self.kernel.t_range[1:])*(1./movie.frame_rate))
+
+ @property
+ def row_range(self):
+ return self.movie.row_range
+
+ @property
+ def col_range(self):
+ return self.movie.col_range
+
+ @property
+ def t_range(self):
+ return self.movie.t_range
+
+ @property
+ def frame_rate(self):
+ return self.movie.frame_rate
+
+ def evaluate(self, t_min=None, t_max=None, downsample=1):#:#, show=True, ax=None, plot=False, save_file_name=None, plotstyle='b-'):
+
+
+ # print 'EVALUATE'
+ if t_max is None:
+ t_max = self.t_range[-1]
+
+ if t_min is None:
+ t_min = self.t_range[0]
+
+ t_range = convert_tmin_tmax_framerate_to_trange(t_min, t_max, self.movie.frame_rate)[::int(downsample)]
+ y_vals = np.array([self(t) for t in t_range])
+
+ return t_range, y_vals
+
+ def __call__(self, t):
+
+
+
+ if t < self.t_range[0] or t > self.t_range[-1]:
+ curr_rate = 0
+ else:
+# print 'zero'
+
+ ti = t*self.frame_rate
+ til, tir = int(np.floor(ti)), int(np.ceil(ti))
+
+ tl, tr = float(til)/self.frame_rate, float(tir)/self.frame_rate
+ if np.abs(tl-t)<1e-12:
+ curr_rate = self.apply_dot_product(til)
+ # print 'a'
+
+ elif np.abs(tr-t)<1e-12:
+ curr_rate = self.apply_dot_product(tir)
+ # print 'b'
+ else:
+ wa, wb = (1-(t-tl)/(tr-tl)), (1-(tr-t)/(tr-tl))
+ cl = self.apply_dot_product(til)
+ cr = self.apply_dot_product(tir)
+ curr_rate = cl*wa+cr*wb
+ # print 'c'
+
+ if np.isnan(curr_rate):
+ assert RuntimeError
+
+ return curr_rate
+
+ def apply_dot_product(self, ti_offset):
+
+ try:
+ return self.cache[ti_offset]
+
+ except KeyError:
+ t_inds = self.kernel.t_inds + ti_offset + 1 # Offset by one nhc 14 Apr '17
+ min_ind, max_ind = 0, self.movie.data.shape[0]
+ allowed_inds = np.where(np.logical_and(min_ind <= t_inds, t_inds < max_ind))
+ t_inds = t_inds[allowed_inds]
+ row_inds = self.kernel.row_inds[allowed_inds]
+ col_inds = self.kernel.col_inds[allowed_inds]
+ kernel_vector = self.kernel.kernel[allowed_inds]
+ result = np.dot(self.movie[t_inds, row_inds, col_inds],kernel_vector)
+ self.cache[ti_offset ] = result
+ return result
+
+class FilterCursor(KernelCursor):
+
+ def __init__(self, spatiotemporal_filter, movie, threshold=0):
+
+ self.spatiotemporal_filter = spatiotemporal_filter
+ kernel = self.spatiotemporal_filter.get_spatiotemporal_kernel(movie.row_range, movie.col_range, t_range=movie.t_range, threshold=threshold, reverse=True)
+
+ super(FilterCursor, self).__init__(kernel, movie)
+
+class LNUnitCursor(KernelCursor):
+
+ def __init__(self, lnunit, movie, threshold=0):
+
+ # print 'LNUnitCursor'
+
+ self.lnunit = lnunit
+
+ kernel = lnunit.get_spatiotemporal_kernel(movie.row_range, movie.col_range, movie.t_range, reverse=True, threshold=threshold)
+
+ kernel.apply_threshold(threshold)
+
+ super(LNUnitCursor, self).__init__(kernel, movie)
+
+ def __call__(self, t):
+ return self.lnunit.transfer_function(super(LNUnitCursor, self).__call__(t))
+
+class MultiLNUnitCursor(object):
+
+ def __init__(self, multi_lnunit, movie, threshold=0):
+
+ self.multi_lnunit = multi_lnunit
+ self.lnunit_cursor_list = [LNUnitCursor(lnunit, movie, threshold=threshold) for lnunit in multi_lnunit.lnunit_list]
+ self.movie = movie
+
+ def evaluate(self, **kwargs):
+
+# print len(self.lnunit_cursor_list)
+# for ii, x in enumerate(self.lnunit_cursor_list):
+#
+# print ii, self.multi_lnunit, self.multi_lnunit.transfer_function, x
+# print ii, x.evaluate(**kwargs), kwargs
+# print 'done'
+# # print lnunit, movie, curr_cursor
+
+
+
+ multi_e = [unit_cursor.evaluate(**kwargs) for unit_cursor in self.lnunit_cursor_list]
+ t_list, y_list = zip(*multi_e)
+
+# plt.figure()
+# plt.plot(t_list[0],y_list[0])
+# plt.plot(t_list[0],y_list[1],'r')
+# plt.show()
+
+ #sys.exit()
+
+# print len(y_list)
+
+ return t_list[0], self.multi_lnunit.transfer_function(*y_list)
+
+class MultiLNUnitMultiMovieCursor(MultiLNUnitCursor):
+
+ def __init__(self, multi_lnunit, movie_list, threshold=0.):
+
+ assert len(multi_lnunit.lnunit_list) == len(movie_list)
+
+ self.multi_lnunit = multi_lnunit
+ self.lnunit_movie_list = movie_list
+ self.lnunit_cursor_list = [lnunit.get_cursor(movie, threshold=threshold) for lnunit, movie in zip(multi_lnunit.lnunit_list, movie_list)]
+# for lnunit, movie, curr_cursor in zip(multi_lnunit.lnunit_list, movie_list, self.lnunit_cursor_list):
+# print lnunit, movie, curr_cursor
+
+class SeparableKernelCursor(object):
+
+ def __init__(self, spatial_kernel, temporal_kernel, movie):
+ '''Assumes temporal kernel is not reversed'''
+
+ self.movie = movie
+ self.spatial_kernel = spatial_kernel
+ self.temporal_kernel = temporal_kernel
+
+ def evaluate(self, threshold=0):
+
+ full_spatial_kernel = np.array([self.spatial_kernel.full()])
+ full_temporal_kernel = self.temporal_kernel.full()
+
+ nonzero_inds = np.where(np.abs(full_spatial_kernel[0,:,:])>=threshold)
+ rm, rM = nonzero_inds[0].min(), nonzero_inds[0].max()
+ cm, cM = nonzero_inds[1].min(), nonzero_inds[1].max()
+
+ convolution_answer_sep_spatial = (self.movie.data[:,rm:rM+1, cm:cM+1] * full_spatial_kernel[:,rm:rM+1, cm:cM+1]).sum(axis=1).sum(axis=1)
+ sig_tmp = np.zeros(len(full_temporal_kernel) + len(convolution_answer_sep_spatial) - 1)
+ sig_tmp[len(full_temporal_kernel)-1:] = convolution_answer_sep_spatial
+ convolution_answer_sep = spsig.convolve(sig_tmp, full_temporal_kernel[::-1], mode='valid')
+ t = np.arange(len(convolution_answer_sep))/self.movie.frame_rate
+ return t, convolution_answer_sep
+
+
+class SeparableSpatioTemporalFilterCursor(SeparableKernelCursor):
+
+ def __init__(self, spatiotemporal_filter, movie):
+
+ self.spatial_filter = spatiotemporal_filter.spatial_filter
+ self.temporal_filter = spatiotemporal_filter.temporal_filter
+
+ spatial_kernel = self.spatial_filter.get_kernel(movie.row_range, movie.col_range, threshold=-1)
+ temporal_kernel = self.temporal_filter.get_kernel(t_range=movie.t_range, threshold=0, reverse=True)
+ spatial_kernel.kernel *= spatiotemporal_filter.amplitude
+
+ super(SeparableSpatioTemporalFilterCursor, self).__init__(spatial_kernel,
+ temporal_kernel,
+ movie)
+
+
+class SeparableLNUnitCursor(SeparableSpatioTemporalFilterCursor):
+ def __init__(self, lnunit, movie):
+ self.lnunit = lnunit
+
+ super(SeparableLNUnitCursor, self).__init__(self.lnunit.linear_filter, movie)
+
+ def evaluate(self, downsample = 1):
+
+ assert downsample == 1
+
+ t, y = super(SeparableLNUnitCursor, self).evaluate()
+
+ return t, [self.lnunit.transfer_function(yi) for yi in y]
+
+class SeparableMultiLNUnitCursor(object):
+
+ def __init__(self, multilnunit, movie):
+
+ self.multilnunit = multilnunit
+
+ self.lnunit_cursor_list = []
+ for lnunit in self.multilnunit.lnunit_list:
+ self.lnunit_cursor_list.append(SeparableLNUnitCursor(lnunit, movie))
+
+ def evaluate(self, *args, **kwargs):
+
+ assert kwargs.get('downsample', 1) == 1
+
+ y_list = []
+ for cursor in self.lnunit_cursor_list:
+ t, y = cursor.evaluate(*args, **kwargs)
+ y_list.append(y)
+
+ return t, self.multilnunit.transfer_function(*y_list)
+
+# if __name__ == "__main__":
+# spatial_filter_1 = GaussianSpatialFilter(sigma=(2.,2.), amplitude=10)
+# temporal_filter = TemporalFilterCosineBump((.4,-.3), (40,80))
+# curr_filter = SpatioTemporalFilter(spatial_filter_1, temporal_filter)
+#
+# movie_file = '/data/mat/iSee_temp_shared/movies/TouchOfEvil.npy'
+# m_data = np.load(movie_file, 'r')
+# movie = Movie(m_data[:,:,:], frame_rate=30.)
+# cursor = FilterCursor(curr_filter, movie, threshold=-1)
+# cursor.evaluate()
+
+
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/fitfuns.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/fitfuns.py
new file mode 100644
index 0000000..5b67919
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/fitfuns.py
@@ -0,0 +1,190 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Thu Nov 13 17:07:50 2014
+
+@author: rami
+"""
+import os
+from math import *
+import numpy as np
+import numpy.fft as npft
+from random import *
+import scipy.io as sio
+#import statsmodels.api as sm
+from scipy import stats
+import matplotlib.pyplot as plt
+
+def makeFitStruct_GLM(dtsim,kbasprs,nkt,flag_exp):
+
+ gg = {}
+ gg['k'] = []
+ gg['dc'] = 0
+ gg['kt'] = np.zeros((nkt,1))
+ gg['ktbas'] = []
+ gg['kbasprs'] = kbasprs
+ gg['dt'] = dtsim
+
+ nkt = nkt
+ if flag_exp==0:
+ ktbas = makeBasis_StimKernel(kbasprs,nkt)
+ else:
+ ktbas = makeBasis_StimKernel_exp(kbasprs,nkt)
+
+ gg['ktbas'] = ktbas
+ gg['k'] = gg['ktbas']*gg['kt']
+
+ return gg
+
+def makeBasis_StimKernel(kbasprs,nkt):
+
+ neye = kbasprs['neye']
+ ncos = kbasprs['ncos']
+ kpeaks = kbasprs['kpeaks']
+ kdt = 1
+ b = kbasprs['b']
+ delays_raw = kbasprs['delays']
+ delays = delays_raw[0].astype(int)
+
+ ylim = np.array([100.,200.]) # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!HARD-CODED FOR NOW
+# yrnge = nlin(kpeaks + b*np.ones(np.shape(kpeaks)))
+ yrnge = nlin(ylim + b*np.ones(np.shape(kpeaks)))
+ db = (yrnge[-1]-yrnge[0])/(ncos-1)
+ ctrs = nlin(np.array(kpeaks))#yrnge
+ mxt = invnl(yrnge[ncos-1]+2*db)-b
+ kt0 = np.arange(0,mxt,kdt) #-delay
+ nt = len(kt0)
+ e1 = np.tile(nlin(kt0+b*np.ones(np.shape(kt0))),(ncos,1))
+ e2 = np.transpose(e1)
+ e3 = np.tile(ctrs,(nt,1))
+
+ kbasis0 = []
+ for kk in range(ncos):
+ kbasis0.append(ff(e2[:,kk],e3[:,kk],db))
+
+
+ #Concatenate identity vectors
+ nkt0 = np.size(kt0,0)
+ a1 = np.concatenate((np.eye(neye), np.zeros((nkt0,neye))),axis=0)
+ a2 = np.concatenate((np.zeros((neye,ncos)),np.array(kbasis0).T),axis=0)
+ kbasis = np.concatenate((a1,a2),axis=1)
+ kbasis = np.flipud(kbasis)
+ nkt0 = np.size(kbasis,0)
+
+ if nkt0 < nkt:
+ kbasis = np.concatenate((np.zeros((nkt-nkt0,ncos+neye)),kbasis),axis=0)
+ elif nkt0 > nkt:
+ kbasis = kbasis[-1-nkt:-1,:]
+
+
+ kbasis = normalizecols(kbasis)
+
+# plt.figure()
+# plt.plot(kbasis[:,0],'b')
+# plt.plot(kbasis[:,1],'r')
+# plt.show()
+#
+# print kpeaks
+# print nkt0, nkt
+# print delays[0][0], delays[0][1]
+# print sev
+ kbasis2_0 = np.concatenate((kbasis[:,0],np.zeros((delays[0],))),axis=0)
+ kbasis2_1 = np.concatenate((kbasis[:,1],np.zeros((delays[1],))),axis=0)
+
+# plt.figure()
+# plt.plot(kbasis2_0,'b')
+# plt.plot(kbasis2_1,'r')
+# plt.show(block=False)
+
+ len_diff = delays[1]-delays[0]
+ kbasis2_1 = kbasis2_1[len_diff:]
+
+ kbasis2 = np.zeros((len(kbasis2_0),2))
+ kbasis2[:,0] = kbasis2_0
+ kbasis2[:,1] = kbasis2_1
+ # print(np.shape(kbasis2_0))
+ # print(len(kbasis2_0), len(kbasis2_1))
+
+
+# plt.figure()
+# plt.plot(kbasis[:,0],'b')
+# plt.plot(kbasis[:,1],'r')
+# plt.plot(kbasis2_0,'m')
+# plt.plot(kbasis2_1,'k')
+# plt.show(block=False)
+
+ kbasis2 = normalizecols(kbasis2)
+
+ return kbasis2
+
+
+def makeBasis_StimKernel_exp(kbasprs,nkt):
+ ks = kbasprs['ks']
+ b = kbasprs['b']
+ x0 = np.arange(0,nkt)
+ kbasis = np.zeros((nkt,len(ks)))
+ for ii in range(len(ks)):
+ kbasis[:,ii] = invnl(-ks[ii]*x0) #(1.0/ks[ii])*
+
+ kbasis = np.flipud(kbasis)
+ #kbasis = normalizecols(kbasis)
+
+ return kbasis
+
+def nlin(x):
+ eps = 1e-20
+ #x.clip(0.)
+
+ return np.log(x+eps)
+
+def invnl(x):
+ eps = 1e-20
+ return np.exp(x)-eps
+
+def ff(x,c,dc):
+ rowsize = np.size(x,0)
+ m = []
+ for i in range(rowsize):
+ xi = x[i]
+ ci = c[i]
+ val=(np.cos(np.max([-pi,np.min([pi,(xi-ci)*pi/dc/2])]))+1)/2
+ m.append(val)
+
+ return np.array(m)
+
+def normalizecols(A):
+
+ B = A/np.tile(np.sqrt(sum(A**2,0)),(np.size(A,0),1))
+
+ return B
+
+def sameconv(A,B):
+
+ am = np.size(A)
+ bm = np.size(B)
+ nn = am+bm-1
+
+ q = npft.fft(A,nn)*npft.fft(np.flipud(B),nn)
+ p = q
+ G = npft.ifft(p)
+ G = G[range(am)]
+
+ return G
+
+# kbasprs = {}
+# kbasprs['neye'] = 0
+# kbasprs['ncos'] = 2
+# kbasprs['kpeaks'] = 40,80
+# kbasprs['b'] = .3
+#
+# nkt = 400
+#
+# filter_data = makeBasis_StimKernel(kbasprs, nkt)
+#
+# print filter_data
+#
+# print [x for x in filter_data.T]
+#
+# import matplotlib.pyplot as plt
+# plt.plot(filter_data[:,0]+filter_data[:,1])
+# plt.show()
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/kernel.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/kernel.py
new file mode 100644
index 0000000..820b1a3
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/kernel.py
@@ -0,0 +1,475 @@
+#from matplotlib import _cntr as cntr
+import matplotlib as mpl
+from mpl_toolkits.mplot3d import Axes3D
+from matplotlib import cm
+import scipy.interpolate as spinterp
+import h5py
+import numpy as np
+import bisect
+import matplotlib.pyplot as plt
+
+def find_l_r_in_t_range(t_range, t):
+
+ for tl in range(len(t_range)-1):
+ tr = tl+1
+ test_val = (t_range[tl]-t)*(t_range[tr]-t)
+ if np.abs(test_val) < 1e-16:
+
+ if np.abs(t_range[tl]-t) < 1e-16:
+ return (tl,)
+ else:
+ return (tr,)
+
+ elif test_val < 0:
+ t_range[tl], t_range[tr], t
+ return tl, tr
+
+def get_contour(X, Y, Z, c):
+ contour_obj = plt.contour(X, Y, Z)
+ #contour_obj = cntr.Cntr(X, Y, Z)
+ res = contour_obj.trace(c)
+ nseg = len(res) // 2
+ if nseg > 0:
+ seg = res[:nseg][0]
+ return seg[:,0], seg[:,1]
+ else:
+ return [],[]
+
+def plot_single_contour(ax, x_contour, y_contour, t, color):
+ t_contour = t+np.zeros_like(x_contour)
+ ax.plot(x_contour, t_contour, y_contour, zdir='z', color=color)
+
+
+class Kernel1D(object):
+
+ def rescale(self):
+ #self.kernel /= np.abs(self.kernel).sum()
+ if np.abs(self.kernel.sum())!=0:
+ self.kernel /= np.abs(self.kernel.sum())
+
+ def normalize(self):
+# self.kernel /= np.abs(self.kernel).sum()
+ self.kernel /= np.abs(self.kernel.sum())
+# self.kernel /= self.kernel.sum()
+
+
+ def __init__(self, t_range, kernel_array, threshold=0., reverse=False):
+ assert len(t_range) == len(kernel_array)
+
+ kernel_array = np.array(kernel_array)
+ inds_to_keep = np.where(np.abs(kernel_array) > threshold)
+
+ if reverse == True:
+ self.t_range = -np.array(t_range)[::-1]
+
+ t_inds_tmp = inds_to_keep[0]
+ max_t_ind = t_inds_tmp.max()
+ reversed_t_inds = max_t_ind - t_inds_tmp
+ self.t_inds = reversed_t_inds - max_t_ind - 1 # Had an off by one error here should be "- 1" nhc 14 Apr '17 change made in cursor evalutiate too
+
+ else:
+ self.t_range = np.array(t_range)
+ self.t_inds = inds_to_keep[0]
+
+ self.kernel = kernel_array[inds_to_keep]
+ assert len(self.t_inds) == len(self.kernel)
+
+ def __len__(self):
+ return len(self.kernel)
+
+ def imshow(self, ax=None, show=True, save_file_name=None, ylim=None, xlim=None,color='b'):
+
+ if ax is None:
+ _, ax = plt.subplots(1,1)
+
+ t_vals = self.t_range[self.t_inds]
+
+ ax.plot(t_vals, self.kernel, color)
+ ax.set_xlabel('Time (Seconds)')
+
+ if not ylim is None:
+ ax.set_ylim(ylim)
+
+ if not xlim is None:
+ ax.set_xlim(xlim)
+ else:
+ a,b=(t_vals[0], t_vals[-1])
+ ax.set_xlim(min(a,b), max(a,b))
+
+ if not save_file_name is None:
+ ax.savefig(save_file_name, transparent=True)
+
+ if show == True:
+ plt.show()
+
+ return ax, (t_vals, self.kernel)
+
+ def full(self, truncate_t=True):
+ data = np.zeros(len(self.t_range))
+ data[self.t_inds] = self.kernel
+
+
+ if truncate_t == True:
+ ind_min = np.where(np.abs(data) > 0)[0].min()
+ return data[ind_min:]
+ else:
+ return data
+
+
+
+ return data
+
+class Kernel2D(object):
+
+ def rescale(self):
+ #self.kernel /= np.abs(self.kernel).sum()
+ if np.abs(self.kernel.sum())!=0:
+ self.kernel /= np.abs(self.kernel.sum())
+
+ def normalize(self):
+# self.kernel /= np.abs(self.kernel).sum()
+ self.kernel /= np.abs(self.kernel.sum())
+
+ @classmethod
+ def from_dense(cls, row_range, col_range, kernel_array, threshold=0.):
+ col_range = np.array(col_range).copy()
+ row_range = np.array(row_range).copy()
+ kernel_array = np.array(kernel_array).copy()
+ inds_to_keep = np.where(np.abs(kernel_array) > threshold)
+ kernel = kernel_array[inds_to_keep]
+ if len(inds_to_keep) == 1:
+ col_inds, row_inds = np.array([]), np.array([])
+ else:
+ col_inds, row_inds = inds_to_keep
+
+ return cls(row_range, col_range, row_inds, col_inds, kernel)
+
+ @classmethod
+ def copy(cls, instance):
+ return cls(instance.row_range.copy(),
+ instance.col_range.copy(),
+ instance.row_inds.copy(),
+ instance.col_inds.copy(),
+ instance.kernel.copy())
+
+
+ def __init__(self, row_range, col_range, row_inds, col_inds, kernel):
+
+
+ self.col_range = np.array(col_range)
+ self.row_range = np.array(row_range)
+ self.row_inds = np.array(row_inds)
+ self.col_inds = np.array(col_inds)
+
+ self.kernel = np.array(kernel)
+
+ assert len(self.row_inds) == len(self.col_inds)
+ assert len(self.row_inds) == len(self.kernel)
+
+ def __mul__(self, constant):
+
+ new_copy = Kernel2D.copy(self)
+ new_copy.kernel *= constant
+ return new_copy
+
+ def __add__(self, other):
+
+
+ if len(other) == 0:
+ return self
+
+ try:
+ np.testing.assert_almost_equal(self.row_range, other.row_range)
+ np.testing.assert_almost_equal(self.col_range, other.col_range)
+ except:
+ raise Exception('Kernels must exist on same grid to be added')
+
+ row_range = self.row_range.copy()
+ col_range = self.col_range.copy()
+
+ kernel_dict = {}
+ for key, ker in zip(zip(self.row_inds, self.col_inds), self.kernel):
+ kernel_dict[key] = kernel_dict.setdefault(key, 0) + ker
+ for key, ker in zip(zip(other.row_inds, other.col_inds), other.kernel):
+ kernel_dict[key] = kernel_dict.setdefault(key, 0) + ker
+
+ key_list, kernel_list = zip(*kernel_dict.items())
+ row_inds_list, col_inds_list = zip(*key_list)
+ row_inds = np.array(row_inds_list)
+ col_inds = np.array(col_inds_list)
+ kernel = np.array(kernel_list)
+
+ return Kernel2D(row_range, col_range, row_inds, col_inds, kernel)
+
+ def apply_threshold(self, threshold):
+
+ inds_to_keep = np.where(np.abs(self.kernel) > threshold)
+ self.row_inds = self.row_inds[inds_to_keep]
+ self.col_inds = self.col_inds[inds_to_keep]
+ self.kernel = self.kernel[inds_to_keep]
+
+ def full(self):
+ data = np.zeros((len(self.row_range), len(self.col_range)))
+ data[self.row_inds, self.col_inds] = self.kernel
+ return data
+
+ def imshow(self, ax=None, show=True, save_file_name=None, clim=None, colorbar=True):
+
+ from mpl_toolkits.axes_grid1 import make_axes_locatable
+
+ if ax is None:
+ _, ax = plt.subplots(1,1)
+
+ if colorbar == True:
+ divider = make_axes_locatable(ax)
+ cax = divider.append_axes("right", size = "5%", pad = 0.05)
+
+ data = self.full()
+
+ if not clim is None:
+ im = ax.imshow(data, extent=(self.col_range[0], self.col_range[-1], self.row_range[0], self.row_range[-1]), origin='lower', clim=clim, interpolation='none')
+ else:
+ im = ax.imshow(data, extent=(self.col_range[0], self.col_range[-1], self.row_range[0], self.row_range[-1]), origin='lower', interpolation='none')
+
+ if colorbar == True:
+ plt.colorbar(im,cax=cax)
+
+ if not save_file_name is None:
+ plt.savefig(save_file_name, transparent=True)
+
+ if show == True:
+ plt.show()
+
+ return ax, data
+
+ def __len__(self):
+ return len(self.kernel)
+
+class Kernel3D(object):
+
+ def rescale(self):
+ #self.kernel /= np.abs(self.kernel).sum()
+ if np.abs(self.kernel.sum())!=0:
+ self.kernel /= np.abs(self.kernel.sum())
+
+ def normalize(self):
+ #self.kernel /= np.abs(self.kernel).sum()
+# print self.kernel.sum()
+ self.kernel /= (self.kernel.sum())*np.sign(self.kernel.sum())
+# print self.kernel.sum()
+# sys.exit()
+
+ @classmethod
+ def copy(cls, instance):
+ return cls(instance.row_range.copy(),
+ instance.col_range.copy(),
+ instance.t_range.copy(),
+ instance.row_inds.copy(),
+ instance.col_inds.copy(),
+ instance.t_inds.copy(),
+ instance.kernel.copy())
+
+ def __len__(self):
+ return len(self.kernel)
+
+ def __init__(self, row_range, col_range, t_range, row_inds, col_inds, t_inds, kernel):
+
+ self.col_range = np.array(col_range)
+ self.row_range = np.array(row_range)
+ self.t_range = np.array(t_range)
+ self.col_inds = np.array(col_inds)
+ self.row_inds = np.array(row_inds)
+ self.t_inds = np.array(t_inds)
+ self.kernel = np.array(kernel)
+
+ assert len(self.row_inds) == len(self.col_inds)
+ assert len(self.row_inds) == len(self.t_inds)
+ assert len(self.row_inds) == len(self.kernel)
+
+ def apply_threshold(self, threshold):
+
+ inds_to_keep = np.where(np.abs(self.kernel) > threshold)
+ self.row_inds = self.row_inds[inds_to_keep]
+ self.col_inds = self.col_inds[inds_to_keep]
+ self.t_inds = self.t_inds[inds_to_keep]
+ self.kernel = self.kernel[inds_to_keep]
+
+ def __add__(self, other):
+
+
+ if len(other) == 0:
+ return self
+
+ try:
+ if not (len(self.row_range) == 0 or len(other.row_range) == 0):
+ np.testing.assert_almost_equal(self.row_range, other.row_range)
+ if not (len(self.col_range) == 0 or len(other.col_range) == 0):
+ np.testing.assert_almost_equal(self.col_range, other.col_range)
+ if not (len(self.t_range) == 0 or len(other.t_range) == 0):
+ np.testing.assert_almost_equal(self.t_range, other.t_range)
+ except:
+ raise Exception('Kernels must exist on same grid to be added')
+
+ if len(self.row_range) == 0:
+ row_range = other.row_range.copy()
+ else:
+ row_range = self.row_range.copy()
+ if len(self.col_range) == 0:
+ col_range = other.col_range.copy()
+ else:
+ col_range = self.col_range.copy()
+ if len(self.t_range) == 0:
+ t_range = other.t_range.copy()
+ else:
+ t_range = self.t_range.copy()
+
+ kernel_dict = {}
+ for key, ker in zip(zip(self.row_inds, self.col_inds, self.t_inds), self.kernel):
+ kernel_dict[key] = kernel_dict.setdefault(key, 0) + ker
+ for key, ker in zip(zip(other.row_inds, other.col_inds, other.t_inds), other.kernel):
+ kernel_dict[key] = kernel_dict.setdefault(key, 0) + ker
+
+ key_list, kernel_list = zip(*kernel_dict.items())
+ row_inds_list, col_inds_list, t_inds_list = zip(*key_list)
+ row_inds = np.array(row_inds_list)
+ col_inds = np.array(col_inds_list)
+ t_inds = np.array(t_inds_list)
+ kernel = np.array(kernel_list)
+
+ return Kernel3D(row_range, col_range, t_range, row_inds, col_inds, t_inds, kernel)
+
+ def __mul__(self, constant):
+
+ new_copy = Kernel3D.copy(self)
+ new_copy.kernel *= constant
+ return new_copy
+
+ def t_slice(self, t):
+
+ ind_list = find_l_r_in_t_range(self.t_range, t)
+
+ if ind_list is None:
+ return None
+
+ elif len(ind_list) == 1:
+
+ t_ind_i = ind_list[0]
+ inds_i = np.where(self.t_range[self.t_inds] == self.t_range[t_ind_i])
+ row_inds = self.row_inds[inds_i]
+ col_inds = self.col_inds[inds_i]
+ kernel = self.kernel[inds_i]
+ return Kernel2D(self.row_range, self.col_range, row_inds, col_inds, kernel)
+
+ else:
+ t_ind_l, t_ind_r = ind_list
+ t_l, t_r = self.t_range[t_ind_l], self.t_range[t_ind_r]
+
+ inds_l = np.where(self.t_range[self.t_inds] == self.t_range[t_ind_l])
+ inds_r = np.where(self.t_range[self.t_inds] == self.t_range[t_ind_r])
+ row_inds_l = self.row_inds[inds_l]
+ col_inds_l = self.col_inds[inds_l]
+ kernel_l = self.kernel[inds_l]
+ kl = Kernel2D(self.row_range, self.col_range, row_inds_l, col_inds_l, kernel_l)
+ row_inds_r = self.row_inds[inds_r]
+ col_inds_r = self.col_inds[inds_r]
+ kernel_r = self.kernel[inds_r]
+ kr = Kernel2D(self.row_range, self.col_range, row_inds_r, col_inds_r, kernel_r)
+ wa, wb = (1-(t-t_l)/(t_r-t_l)), (1-(t_r-t)/(t_r-t_l))
+
+ return kl*wa + kr*wb
+
+ def full(self, truncate_t=True):
+
+ data = np.zeros((len(self.t_range), len(self.row_range), len(self.col_range)))
+ data[self.t_inds, self.row_inds, self.col_inds] = self.kernel
+
+ if truncate_t == True:
+ ind_max = np.where(np.abs(data) > 0)[0].min()
+ return data[ind_max:, :, :]
+ else:
+ return data
+
+
+ # if truncate_t == True:
+ # ind_min = np.where(np.abs(data) > 0)[0].min()
+ # return data[ind_min:]
+ # else:
+ # return data
+
+ def imshow(self, ax=None, t_range=None, cmap=cm.bwr, N=10, show=True, save_file_name=None, kvals=None):
+
+ if ax is None:
+ fig = plt.figure()
+ ax = fig.gca(projection='3d')
+
+ if t_range is None:
+ t_range = self.t_range
+
+ slice_list_sparse = [self.t_slice(t) for t in t_range]
+ slice_list = []
+ slice_t_list = []
+ for curr_slice, curr_t in zip(slice_list_sparse, t_range):
+ if not curr_slice is None:
+ slice_list.append(curr_slice.full())
+ slice_t_list.append(curr_t)
+ all_slice_max = max(map(np.max, slice_list))
+ all_slice_min = min(map(np.min, slice_list))
+ upper_bound = max(np.abs(all_slice_max), np.abs(all_slice_min))
+ lower_bound = -upper_bound
+ norm = mpl.colors.Normalize(vmin=lower_bound, vmax=upper_bound)
+ color_mapper = cm.ScalarMappable(norm=norm, cmap=cmap).to_rgba
+
+ if kvals is None:
+ kvals = np.linspace(lower_bound, upper_bound, N)
+
+ X, Y = np.meshgrid(self.row_range, self.col_range)
+
+ contour_dict = {}
+ for kval in kvals:
+ for t_val, curr_slice in zip(slice_t_list, slice_list):
+ x_contour, y_contour = get_contour(Y, X, curr_slice.T, kval)
+ contour_dict[kval, t_val] = x_contour, y_contour
+ color = color_mapper(kval)
+ color = color[0], color[1], color[2], np.abs(kval)/upper_bound
+ plot_single_contour(ax, x_contour, y_contour, t_val, color)
+
+ ax.set_zlim(self.row_range[0], self.row_range[-1])
+ ax.set_ylim(self.t_range[0], self.t_range[-1])
+ ax.set_xlim(self.col_range[0], self.col_range[-1])
+
+ if not save_file_name is None:
+ plt.savefig(save_file_name, transparent=True)
+
+ if show == True:
+ plt.show()
+
+ return ax, contour_dict
+
+def merge_spatial_temporal(spatial_kernel, temporal_kernel, threshold=0):
+
+ t_range = temporal_kernel.t_range
+
+ spatiotemporal_kernel = np.ones(( len(temporal_kernel), len(spatial_kernel)))
+ spatiotemporal_kernel *= spatial_kernel.kernel[None, :]
+ spatiotemporal_kernel *= temporal_kernel.kernel[:,None]
+ spatiotemporal_kernel = spatiotemporal_kernel.reshape((np.prod(spatiotemporal_kernel.shape)))
+
+ spatial_coord_array = np.empty((len(spatial_kernel),2))
+ spatial_coord_array[:,0] = spatial_kernel.col_inds
+ spatial_coord_array[:,1] = spatial_kernel.row_inds
+
+ spatiiotemporal_coord_array = np.zeros((len(spatial_kernel)*len(temporal_kernel),3))
+ spatiiotemporal_coord_array[:,0:2] = np.kron(np.ones((len(temporal_kernel),1)),spatial_coord_array)
+ spatiiotemporal_coord_array[:,2] = np.kron(temporal_kernel.t_inds, np.ones(len(spatial_kernel)))
+
+ col_inds, row_inds, t_inds = map(lambda x:x.astype(np.int),spatiiotemporal_coord_array.T)
+ kernel = Kernel3D(spatial_kernel.row_range, spatial_kernel.col_range, t_range, row_inds, col_inds, t_inds, spatiotemporal_kernel)
+ kernel.apply_threshold(threshold)
+
+ return kernel
+
+
+
+# Candidate for print
+# for ri, ci, ti, k in zip(kernel.row_inds, kernel.col_inds, kernel.t_inds, kernel.kernel):
+# print ri, ci, ti, k
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/lattice_unit_constructor.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/lattice_unit_constructor.py
new file mode 100644
index 0000000..4de580d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/lattice_unit_constructor.py
@@ -0,0 +1,254 @@
+import scipy.io as sio
+import os
+import matplotlib.pyplot as plt
+import isee_engine.nwb as nwb
+from linearfilter import SpatioTemporalFilter
+import numpy as np
+from spatialfilter import GaussianSpatialFilter
+from transferfunction import ScalarTransferFunction
+from temporalfilter import TemporalFilterCosineBump
+from cursor import LNUnitCursor, MultiLNUnitCursor
+from movie import Movie
+from lgnmodel1 import LGNModel, heat_plot
+from cellmodel import LGNOnCell, LGNOffCell,LGNOnOffCell,TwoSubfieldLinearCell
+from transferfunction import MultiTransferFunction, ScalarTransferFunction
+from lnunit import LNUnit, MultiLNUnit
+from sympy.abc import x as symbolic_x
+from sympy.abc import y as symbolic_y
+from kernel import Kernel3D
+from movie import Movie, FullFieldFlashMovie
+import itertools
+import scipy.stats as sps
+from make_cell_list import multi_cell_random_generator, make_single_unit_cell_list, make_on_off_cell_list
+#from lgnmodel.make_cell_list import two_unit_cell_config
+#from make_cell_list import single_unit_cell_config
+
+def make_lattice_unit(lattice_unit_center=None):
+ cell_list = []
+ tON_cell_list = make_tON_cell_list(lattice_unit_center)
+ tOFF_cell_list = make_tOFF_cell_list(lattice_unit_center)
+ sON_cell_list = make_sON_cell_list(lattice_unit_center)
+ sOFF_cell_list = make_sOFF_cell_list(lattice_unit_center)
+ overlap_onoff_cell_list = make_overlapping_onoff_cell_list(lattice_unit_center)
+ separate_onoff_cell_list = make_separate_onoff_cell_list(lattice_unit_center)
+
+ cell_list = tON_cell_list + tOFF_cell_list + sON_cell_list + sOFF_cell_list + overlap_onoff_cell_list + separate_onoff_cell_list
+
+ return cell_list
+
+
+def make_tON_cell_list(lattice_unit_center):
+ tON_cell_list = []
+
+ single_unit_cell_config = {}
+ single_unit_cell_config['lattice_unit_center']=lattice_unit_center
+ single_unit_cell_config['width'] = 5.
+ sz = [3,6,9]
+ ncells = [5,3,2]
+ amp_dist = sps.rv_discrete(values=([20,25], [.5,.5]))
+# kpeaks_dist = sps.multivariate_normal(mean=[40., 80.], cov=[[5.0, 0], [0, 5]])
+# wts = (.4,-.2)
+ kpeaks_dist = sps.multivariate_normal(mean=[15., 35.], cov=[[5.0, 0], [0, 5]])
+ wts = (4.,-2.5)
+ delays = (0.,0.)
+ single_unit_cell_config['amplitude'] = amp_dist
+ single_unit_cell_config['kpeaks'] = kpeaks_dist
+ single_unit_cell_config['weights'] = wts
+ single_unit_cell_config['delays'] = delays
+ for num_cells, sig in zip(ncells,sz):
+ single_unit_cell_config['number_of_cells'] = num_cells
+ single_unit_cell_config['sigma'] = (sig,sig)
+# print single_unit_cell_config
+ tON_cell_list += multi_cell_random_generator(make_single_unit_cell_list, **single_unit_cell_config)
+
+ #print len(tON_cell_list)
+ return tON_cell_list
+
+def make_tOFF_cell_list(lattice_unit_center):
+ tOFF_cell_list = []
+
+ single_unit_cell_config = {}
+ single_unit_cell_config['lattice_unit_center']=lattice_unit_center
+ single_unit_cell_config['width'] = 5.
+ sz = [3,6,9]
+ ncells = [10,5,5]
+ amp_dist = sps.rv_discrete(values=([-20,-25], [.5,.5]))
+# kpeaks_dist = sps.multivariate_normal(mean=[40., 80.], cov=[[5.0, 0], [0, 5]])
+# wts = (.4,-.2)
+ kpeaks_dist = sps.multivariate_normal(mean=[15., 35.], cov=[[5.0, 0], [0, 5]])
+ wts = (4.,-2.5)
+ delays = (0.,0.)
+ single_unit_cell_config['amplitude'] = amp_dist
+ single_unit_cell_config['kpeaks'] = kpeaks_dist
+ single_unit_cell_config['weights'] = wts
+ single_unit_cell_config['delays'] = delays
+ for num_cells, sig in zip(ncells,sz):
+ single_unit_cell_config['number_of_cells'] = num_cells
+ single_unit_cell_config['sigma'] = (sig,sig)
+ tOFF_cell_list += multi_cell_random_generator(make_single_unit_cell_list, **single_unit_cell_config)
+
+ #print len(tOFF_cell_list)
+ return tOFF_cell_list
+
+def make_sON_cell_list(lattice_unit_center):
+ sON_cell_list = []
+
+ single_unit_cell_config = {}
+ single_unit_cell_config['lattice_unit_center']=lattice_unit_center
+ single_unit_cell_config['width'] = 5.
+ sz = [3,6,9]
+ ncells = [5,3,2]
+ amp_dist = sps.rv_discrete(values=([20,25], [.5,.5]))
+# kpeaks_dist = sps.multivariate_normal(mean=[100., 160.], cov=[[5.0, 0], [0, 5]])
+# wts = (.4,-.1)
+ kpeaks_dist = sps.multivariate_normal(mean=[80., 120.], cov=[[5.0, 0], [0, 5]])
+ wts = (4.,-.85)
+ delays = (0.,0.)
+ single_unit_cell_config['amplitude'] = amp_dist
+ single_unit_cell_config['kpeaks'] = kpeaks_dist
+ single_unit_cell_config['weights'] = wts
+ single_unit_cell_config['delays'] = delays
+ for num_cells, sig in zip(ncells,sz):
+ single_unit_cell_config['number_of_cells'] = num_cells
+ single_unit_cell_config['sigma'] = (sig,sig)
+ sON_cell_list += multi_cell_random_generator(make_single_unit_cell_list, **single_unit_cell_config)
+
+ #print len(sON_cell_list)
+ return sON_cell_list
+
+def make_sOFF_cell_list(lattice_unit_center):
+ sOFF_cell_list = []
+
+ single_unit_cell_config = {}
+ single_unit_cell_config['lattice_unit_center']=lattice_unit_center
+ single_unit_cell_config['width'] = 5.
+ sz = [3,6,9]
+ ncells = [10,5,5]
+ amp_dist = sps.rv_discrete(values=([-20,-25], [.5,.5]))
+# kpeaks_dist = sps.multivariate_normal(mean=[100., 160.], cov=[[5.0, 0], [0, 5]])
+ kpeaks_dist = sps.multivariate_normal(mean=[80., 120.], cov=[[5.0, 0], [0, 5]])
+# wts = (.4,-.1)
+ wts = (4.,-.85)
+ delays = (0.,0.)
+ single_unit_cell_config['amplitude'] = amp_dist
+ single_unit_cell_config['kpeaks'] = kpeaks_dist
+ single_unit_cell_config['weights'] = wts
+ single_unit_cell_config['delays'] = delays
+ for num_cells, sig in zip(ncells,sz):
+ single_unit_cell_config['number_of_cells'] = num_cells
+ single_unit_cell_config['sigma'] = (sig,sig)
+ sOFF_cell_list += multi_cell_random_generator(make_single_unit_cell_list, **single_unit_cell_config)
+
+ #print len(sOFF_cell_list)
+ return sOFF_cell_list
+
+def make_overlapping_onoff_cell_list(lattice_unit_center):
+ overlap_onoff_cell_list = []
+
+ two_unit_cell_config = {}
+ two_unit_cell_config['lattice_unit_center']=lattice_unit_center
+ two_unit_cell_config['width']=5.
+
+ ncells = 4
+ sz = 9
+ ang_dist = sps.rv_discrete(values=(np.arange(0,180,45), 1./ncells*np.ones(ncells)))
+ amp_on_dist = sps.rv_discrete(values=([20,25], [.5,.5]))
+ amp_off_dist = sps.rv_discrete(values=([-20,-25], [.5,.5]))
+# kpeak_on_dist = sps.multivariate_normal(mean=[40., 80.], cov=[[5.0, 0], [0, 5]])
+# kpeak_off_dist = sps.multivariate_normal(mean=[50., 90.], cov=[[5.0, 0], [0, 5]])
+# wts_on = wts_off = (.4,-.2)
+ kpeak_on_dist = sps.multivariate_normal(mean=[15., 35.], cov=[[5.0, 0], [0, 5]])
+ kpeak_off_dist = sps.multivariate_normal(mean=[20., 40.], cov=[[5.0, 0], [0, 5]])
+ wts_on = wts_off = (4.,-2.5)
+ delays_on = delays_off = (0.,0.)
+ subfield_sep = 2.
+
+ two_unit_cell_config['number_of_cells'] = ncells
+ two_unit_cell_config['ang'] = ang_dist
+ two_unit_cell_config['amplitude_on'] = amp_on_dist
+ two_unit_cell_config['amplitude_off'] = amp_off_dist
+ two_unit_cell_config['kpeaks_on'] = kpeak_on_dist
+ two_unit_cell_config['kpeaks_off'] = kpeak_off_dist
+ two_unit_cell_config['weights_on'] = wts_on
+ two_unit_cell_config['weights_off'] = wts_off
+ two_unit_cell_config['sigma_on'] = (sz,sz)
+ two_unit_cell_config['sigma_off'] = (sz,sz)
+ two_unit_cell_config['subfield_separation'] = subfield_sep
+ two_unit_cell_config['dominant_subunit']='on'
+ two_unit_cell_config['delays_on']=delays_on
+ two_unit_cell_config['delays_off']=delays_off
+
+ overlap_onoff_cell_list += multi_cell_random_generator(make_on_off_cell_list, **two_unit_cell_config)
+
+ #print len(overlap_onoff_cell_list)
+ return overlap_onoff_cell_list
+
+def make_separate_onoff_cell_list(lattice_unit_center):
+ separate_onoff_cell_list = []
+
+ two_unit_cell_config = {}
+ two_unit_cell_config['lattice_unit_center']=lattice_unit_center
+ two_unit_cell_config['width']=5.
+
+ ncells = 8
+ sz = 6
+ ang_dist = np.arange(0,360,45)
+ subfield_sep = 4.
+
+# kpeak_dom_dist = sps.multivariate_normal(mean=[40., 80.], cov=[[5.0, 0], [0, 5]])
+# kpeak_nondom_dist = sps.multivariate_normal(mean=[100., 160.], cov=[[5.0, 0], [0, 5]])
+# wts_dom = (.4,-.2)
+# wts_nondom = (.4,-.1)
+
+ kpeak_dom_dist = sps.multivariate_normal(mean=[15., 35.], cov=[[5.0, 0], [0, 5]])
+ kpeak_nondom_dist = sps.multivariate_normal(mean=[80., 120.], cov=[[5.0, 0], [0, 5]])
+ wts_dom = (4.,-2.5)
+ wts_nondom = (4,-.85)
+ delays_dom = delays_nondom = (0.,0.)
+
+ two_unit_cell_config['number_of_cells'] = ncells
+ two_unit_cell_config['ang'] = ang_dist
+ two_unit_cell_config['sigma_on'] = (sz,sz)
+ two_unit_cell_config['sigma_off'] = (sz,sz)
+ two_unit_cell_config['subfield_separation'] = subfield_sep
+
+ #On-dominant
+ dom_subunit = 'on'
+ if dom_subunit=='on':
+ two_unit_cell_config['dominant_subunit'] = dom_subunit
+ amp_dom_dist = sps.rv_discrete(values=([20,25], [.5,.5]))
+ amp_nondom_dist = sps.rv_discrete(values=([-10,-15], [.5,.5]))
+ two_unit_cell_config['amplitude_on'] = amp_dom_dist
+ two_unit_cell_config['amplitude_off'] = amp_nondom_dist
+ two_unit_cell_config['kpeaks_on'] = kpeak_dom_dist
+ two_unit_cell_config['kpeaks_off'] = kpeak_nondom_dist
+ two_unit_cell_config['weights_on'] = wts_dom
+ two_unit_cell_config['weights_off'] = wts_nondom
+ two_unit_cell_config['delays_on'] = delays_dom
+ two_unit_cell_config['delays_off'] = delays_nondom
+ separate_onoff_cell_list += multi_cell_random_generator(make_on_off_cell_list, **two_unit_cell_config)
+
+ #Off-dominant
+ dom_subunit = 'off'
+ if dom_subunit=='off':
+ two_unit_cell_config['dominant_subunit'] = dom_subunit
+ amp_dom_dist = sps.rv_discrete(values=([-20,-25], [.5,.5]))
+ amp_nondom_dist = sps.rv_discrete(values=([10,15], [.5,.5]))
+ two_unit_cell_config['amplitude_off'] = amp_dom_dist
+ two_unit_cell_config['amplitude_on'] = amp_nondom_dist
+ two_unit_cell_config['kpeaks_off'] = kpeak_dom_dist
+ two_unit_cell_config['kpeaks_on'] = kpeak_nondom_dist
+ two_unit_cell_config['weights_off'] = wts_dom
+ two_unit_cell_config['weights_on'] = wts_nondom
+ two_unit_cell_config['delays_off'] = delays_dom
+ two_unit_cell_config['delays_on'] = delays_nondom
+ separate_onoff_cell_list += multi_cell_random_generator(make_on_off_cell_list, **two_unit_cell_config)
+
+ #print len(separate_onoff_cell_list)
+ return separate_onoff_cell_list
+
+if __name__ == "__main__":
+ lattice_unit_center = (40,30)
+ lattice_cell_list = make_lattice_unit(lattice_unit_center)
+ print(len(lattice_cell_list))
+
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/lgnmodel1.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/lgnmodel1.py
new file mode 100644
index 0000000..1b04710
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/lgnmodel1.py
@@ -0,0 +1,87 @@
+import numpy as np
+import matplotlib.pyplot as plt
+
+def line_plot(evaluate_result, ax=None, show=True, save_file_name=None, xlabel=None, plotstyle=None):
+
+ if ax is None:
+ _, ax = plt.subplots(1,1)
+
+ if not plotstyle is None:
+ for ((t_range, y_vals), curr_plotstyle) in zip(evaluate_result, plotstyle):
+ ax.plot(t_range, y_vals, curr_plotstyle)
+ else:
+ for t_range, y_vals in evaluate_result:
+ ax.plot(t_range, y_vals)
+
+ if xlabel is None:
+ ax.set_xlabel('Time (Seconds)')
+ else:
+ ax.set_xlabel(xlabel)
+
+ if xlabel is None:
+ ax.set_xlabel('Firing Rate (Hz)')
+ else:
+ ax.set_xlabel(xlabel)
+
+ if not save_file_name is None:
+ plt.savefig(save_file_name, transparent=True)
+
+
+
+
+ if show == True:
+ plt.show()
+
+def heat_plot(evaluate_result, ax=None, show=True, save_file_name=None, colorbar=True, **kwargs):
+
+ if ax is None:
+ _, ax = plt.subplots(1,1)
+
+ data = np.empty((len(evaluate_result), len(evaluate_result[0][0])))
+ for ii, (t_vals, y_vals) in enumerate(evaluate_result):
+ data[ii,:] = y_vals
+
+ cax = ax.pcolor(t_vals, np.arange(len(evaluate_result)), data, **kwargs)
+ ax.set_ylim([0,len(evaluate_result)-1])
+ ax.set_xlim([t_vals[0], t_vals[-1]])
+ ax.set_ylabel('Neuron id')
+ ax.set_xlabel('Time (Seconds)')
+
+ if colorbar == True:
+ plt.colorbar(cax)
+
+ if not save_file_name is None:
+ plt.savefig(save_file_name, transparent=True)
+
+ if show == True:
+ plt.show()
+
+
+
+
+class LGNModel(object):
+
+ def __init__(self, cell_list):
+ self.cell_list = cell_list
+
+ def evaluate(self, movie, **kwargs):
+ return [cell.evaluate(movie, **kwargs) for cell in self.cell_list]
+
+# def plot(self):
+# if show == True:
+# plt.show()
+
+
+# show = kwargs.pop('show', False)
+# data = [cell.evaluate_movie(movie, **kwargs) for cell in self.cell_list]
+# t_list, y_list, kernel_list = zip(*data)
+
+# if show == True:
+# for y in y_list:
+# plt.plot(t_list[0], y)
+# plt.show()
+#
+# return t_list[0], y_list, kernel_list
+
+ def __len__(self):
+ return len(self.cell_list)
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/linearfilter.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/linearfilter.py
new file mode 100644
index 0000000..af7fef2
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/linearfilter.py
@@ -0,0 +1,128 @@
+import numpy as np
+from .kernel import Kernel3D
+import matplotlib.pyplot as plt
+
+class SpatioTemporalFilter(object):
+
+ def __init__(self, spatial_filter, temporal_filter, amplitude=1.):
+
+ self.spatial_filter = spatial_filter
+ self.temporal_filter = temporal_filter
+ self.amplitude = amplitude
+
+ def get_spatiotemporal_kernel(self, row_range, col_range, t_range=None, threshold=0, reverse=False):
+
+ spatial_kernel = self.spatial_filter.get_kernel(row_range, col_range, threshold=0)
+ temporal_kernel = self.temporal_filter.get_kernel(t_range=t_range, threshold=0, reverse=reverse)
+
+ t_range = temporal_kernel.t_range
+
+ spatiotemporal_kernel = np.ones(( len(temporal_kernel), len(spatial_kernel)))
+ spatiotemporal_kernel *= spatial_kernel.kernel[None, :]
+
+ spatiotemporal_kernel *= temporal_kernel.kernel[:,None]
+ spatiotemporal_kernel = spatiotemporal_kernel.reshape((np.prod(spatiotemporal_kernel.shape)))
+
+ spatial_coord_array = np.empty((len(spatial_kernel),2))
+ spatial_coord_array[:,0] = spatial_kernel.col_inds
+ spatial_coord_array[:,1] = spatial_kernel.row_inds
+
+ spatiiotemporal_coord_array = np.zeros((len(spatial_kernel)*len(temporal_kernel),3))
+ spatiiotemporal_coord_array[:,0:2] = np.kron(np.ones((len(temporal_kernel),1)),spatial_coord_array)
+ spatiiotemporal_coord_array[:,2] = np.kron(temporal_kernel.t_inds, np.ones(len(spatial_kernel)))
+
+ col_inds, row_inds, t_inds = map(lambda x:x.astype(np.int),spatiiotemporal_coord_array.T)
+ kernel = Kernel3D(spatial_kernel.row_range, spatial_kernel.col_range, t_range, row_inds, col_inds, t_inds, spatiotemporal_kernel)
+ kernel.apply_threshold(threshold)
+
+
+ kernel.kernel *= self.amplitude
+
+
+ return kernel
+
+ def t_slice(self, t, *args, **kwargs):
+
+ k = self.get_spatiotemporal_kernel(*args, **kwargs)
+ return k.t_slice(t)
+
+ def show_temporal_filter(self, *args, **kwargs):
+
+ self.temporal_filter.imshow(*args, **kwargs)
+
+ def show_spatial_filter(self, *args, **kwargs):
+
+ self.spatial_filter.imshow(*args, **kwargs)
+
+ def to_dict(self):
+
+ return {'class':(__name__, self.__class__.__name__),
+ 'spatial_filter':self.spatial_filter.to_dict(),
+ 'temporal_filter':self.temporal_filter.to_dict(),
+ 'amplitude':self.amplitude}
+
+# class OnOffSpatioTemporalFilter(SpatioTemporalFilter):
+#
+# def __init__(self, on_spatiotemporal_filter, off_spatiotemporal_filter):
+#
+# self.on_spatiotemporal_filter = on_spatiotemporal_filter
+# self.off_spatiotemporal_filter = off_spatiotemporal_filter
+#
+# def get_spatiotemporal_kernel(self, col_range, row_range, t_range=None, threshold=0, reverse=False):
+#
+# on_kernel = self.on_spatiotemporal_filter.get_spatiotemporal_kernel(col_range, row_range, t_range, threshold, reverse)
+# off_kernel = self.off_spatiotemporal_filter.get_spatiotemporal_kernel(col_range, row_range, t_range, threshold, reverse)
+#
+# return on_kernel + off_kernel*(-1)
+#
+# def to_dict(self):
+#
+# return {'class':(__name__, self.__class__.__name__),
+# 'on_filter':self.on_spatiotemporal_filter.to_dict(),
+# 'off_filter':self.off_spatiotemporal_filter.to_dict()}
+#
+# class TwoSubfieldLinearFilter(OnOffSpatioTemporalFilter):
+#
+# def __init__(self, dominant_spatiotemporal_filter, nondominant_spatiotemporal_filter, subfield_separation=10, onoff_axis_angle=45, dominant_subfield_location=(30,40)):
+#
+# self.subfield_separation = subfield_separation
+# self.onoff_axis_angle = onoff_axis_angle
+# self.dominant_subfield_location = dominant_subfield_location
+# self.dominant_spatiotemporal_filter = dominant_spatiotemporal_filter
+# self.nondominant_spatiotemporal_filter = nondominant_spatiotemporal_filter
+#
+# dom_amp = dominant_spatiotemporal_filter.spatial_filter.amplitude
+# nondom_amp = nondominant_spatiotemporal_filter.spatial_filter.amplitude
+# if dom_amp < 0 and nondom_amp > 0:
+# super(TwoSubfieldLinearFilter, self).__init__(self.nondominant_spatiotemporal_filter, self.dominant_spatiotemporal_filter)
+# elif dom_amp > 0 and nondom_amp < 0:
+# super(TwoSubfieldLinearFilter, self).__init__(self.dominant_spatiotemporal_filter, self.nondominant_spatiotemporal_filter)
+# else:
+# raise ValueError('Subfields are not of opposite polarity')
+#
+# self.dominant_spatiotemporal_filter.spatial_filter.translate = self.dominant_subfield_location
+# hor_offset = np.cos(self.onoff_axis_angle*np.pi/180.)*self.subfield_separation + self.dominant_subfield_location[0]
+# vert_offset = np.sin(self.onoff_axis_angle*np.pi/180.)*self.subfield_separation+ self.dominant_subfield_location[1]
+# rel_translation = (hor_offset,vert_offset)
+# self.nondominant_spatiotemporal_filter.spatial_filter.translate = rel_translation
+# self.nondominant_spatiotemporal_filter.spatial_filter.origin=self.dominant_spatiotemporal_filter.spatial_filter.origin
+#
+#
+# def to_dict(self):
+#
+# raise NotImplementedError
+#
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/lnunit.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/lnunit.py
new file mode 100644
index 0000000..ebc9952
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/lnunit.py
@@ -0,0 +1,380 @@
+import os
+import itertools
+import matplotlib.pyplot as plt
+import numpy as np
+from . import utilities as util
+import importlib
+from .kernel import Kernel2D, Kernel3D
+from .linearfilter import SpatioTemporalFilter
+import json
+from .spatialfilter import GaussianSpatialFilter
+from .transferfunction import ScalarTransferFunction
+from .temporalfilter import TemporalFilterCosineBump
+from .cursor import LNUnitCursor, MultiLNUnitCursor, MultiLNUnitMultiMovieCursor, SeparableLNUnitCursor, SeparableMultiLNUnitCursor
+from .movie import Movie
+from .lgnmodel1 import LGNModel, heat_plot
+from .transferfunction import MultiTransferFunction, ScalarTransferFunction
+
+
+class LNUnit(object):
+
+ def __init__(self, linear_filter, transfer_function, amplitude=1.):
+
+ self.linear_filter = linear_filter
+ self.transfer_function = transfer_function
+ self.amplitude = amplitude
+
+ def evaluate(self, movie, **kwargs):
+ return self.get_cursor(movie, separable=kwargs.pop('separable', False)).evaluate(**kwargs)
+
+ def get_spatiotemporal_kernel(self, *args, **kwargs):
+ return self.linear_filter.get_spatiotemporal_kernel(*args, **kwargs)
+
+ def get_cursor(self, movie, threshold=0, separable = False):
+ if separable:
+ return SeparableLNUnitCursor(self, movie)
+ else:
+ return LNUnitCursor(self, movie, threshold=threshold)
+
+ def show_temporal_filter(self, *args, **kwargs):
+ self.linear_filter.show_temporal_filter(*args, **kwargs)
+
+ def show_spatial_filter(self, *args, **kwargs):
+ self.linear_filter.show_spatial_filter(*args, **kwargs)
+
+ def to_dict(self):
+ return {'class':(__name__, self.__class__.__name__),
+ 'linear_filter':self.linear_filter.to_dict(),
+ 'transfer_function':self.transfer_function.to_dict()}
+
+class MultiLNUnit(object):
+
+ def __init__(self, lnunit_list, transfer_function):
+
+ self.lnunit_list = lnunit_list
+ self.transfer_function = transfer_function
+
+ def get_spatiotemporal_kernel(self, *args, **kwargs):
+
+ k = Kernel3D([],[],[],[],[],[],[])
+ for unit in self.lnunit_list:
+ k = k+unit.get_spatiotemporal_kernel(*args, **kwargs)
+
+ return k
+
+ def show_temporal_filter(self, *args, **kwargs):
+
+ ax = kwargs.pop('ax', None)
+ show = kwargs.pop('show', None)
+ save_file_name = kwargs.pop('save_file_name', None)
+
+
+ if ax is None:
+ _, ax = plt.subplots(1,1)
+
+ kwargs.update({'ax':ax, 'show':False, 'save_file_name':None})
+ for unit in self.lnunit_list:
+ if unit.linear_filter.amplitude < 0:
+ color='b'
+ else:
+ color='r'
+ unit.linear_filter.show_temporal_filter(color=color, **kwargs)
+
+ if not save_file_name is None:
+ plt.savefig(save_file_name, transparent=True)
+
+ if show == True:
+ plt.show()
+
+ return ax
+
+ def show_spatial_filter(self, *args, **kwargs):
+
+ ax = kwargs.pop('ax', None)
+ show = kwargs.pop('show', True)
+ save_file_name = kwargs.pop('save_file_name', None)
+ colorbar = kwargs.pop('colorbar', True)
+
+ k = Kernel2D(args[0],args[1],[],[],[])
+ for lnunit in self.lnunit_list:
+ k = k + lnunit.linear_filter.spatial_filter.get_kernel(*args, **kwargs)
+ k.imshow(ax=ax, show=show, save_file_name=save_file_name, colorbar=colorbar)
+
+ def get_cursor(self, *args, **kwargs):
+
+ threshold = kwargs.get('threshold', 0.)
+ separable = kwargs.get('separable', False)
+
+ if len(args) == 1:
+ movie = args[0]
+ if separable:
+ return SeparableMultiLNUnitCursor(self, movie)
+ else:
+ return MultiLNUnitCursor(self, movie, threshold=threshold)
+ elif len(args) > 1:
+ movie_list = args
+ if separable:
+ raise NotImplementedError
+ else:
+ return MultiLNUnitMultiMovieCursor(self, movie_list, threshold=threshold)
+ else:
+ assert ValueError
+
+
+ def evaluate(self, movie, **kwargs):
+ seperable = kwargs.pop('separable', False)
+ return self.get_cursor(movie, separable=seperable).evaluate(**kwargs)
+
+from sympy.abc import x, y
+
+if __name__ == "__main__":
+
+ movie_file = '/data/mat/iSee_temp_shared/movies/TouchOfEvil.npy'
+ m_data = np.load(movie_file, 'r')
+ m = Movie(m_data[1000:], frame_rate=30.)
+
+ # Create second cell:
+ transfer_function = ScalarTransferFunction('s')
+ temporal_filter = TemporalFilterCosineBump((.4,-.3), (20,60))
+ cell_list = []
+ for xi in np.linspace(0,m.data.shape[2], 5):
+ for yi in np.linspace(0,m.data.shape[1], 5):
+ spatial_filter_on = GaussianSpatialFilter(sigma=(2,2), origin=(0,0), translate=(xi, yi))
+ on_linear_filter = SpatioTemporalFilter(spatial_filter_on, temporal_filter, amplitude=20)
+ on_lnunit = LNUnit(on_linear_filter, transfer_function)
+ spatial_filter_off = GaussianSpatialFilter(sigma=(4,4), origin=(0,0), translate=(xi, yi))
+ off_linear_filter = SpatioTemporalFilter(spatial_filter_off, temporal_filter, amplitude=-20)
+ off_lnunit = LNUnit(off_linear_filter, transfer_function)
+
+ multi_transfer_function = MultiTransferFunction((x, y), 'x+y')
+
+ multi_unit = MultiLNUnit([on_lnunit, off_lnunit], multi_transfer_function)
+ cell_list.append(multi_unit)
+
+ lgn = LGNModel(cell_list) #Here include a list of all cells
+ y = lgn.evaluate(m, downsample=10) #Does the filtering + non-linearity on movie object m
+ heat_plot(y, interpolation='none', colorbar=False)
+
+
+
+
+
+#
+# def imshow(self, ii, image_shape, fps, ax=None, show=True, relative_spatial_location=(0,0)):
+#
+# if ax is None:
+# _, ax = plt.subplots(1,1)
+#
+# curr_kernel = self.get_spatio_temporal_kernel(image_shape, fps, relative_spatial_location=relative_spatial_location)
+#
+# cax = curr_kernel.imshow(ii, ax=ax, show=False)
+#
+# if show == True:
+# plt.show()
+#
+# return ax
+#
+#
+# class OnOffCellModel(CellModel):
+#
+# def __init__(self, dc_offset=0, on_subfield=None, off_subfield=None, on_weight = 1, off_weight = -1, t_max=None):
+#
+# super(self.__class__, self).__init__(dc_offset, t_max)
+#
+# if isinstance(on_subfield, dict):
+# curr_module, curr_class = on_subfield.pop('class')
+# self.on_subfield = getattr(importlib.import_module(curr_module), curr_class)(**on_subfield)
+# else:
+# self.on_subfield = on_subfield
+#
+# super(self.__class__, self).add_subfield(on_subfield, on_weight)
+#
+# if isinstance(off_subfield, dict):
+# curr_module, curr_class = off_subfield.pop('class')
+# self.off_subfield = getattr(importlib.import_module(curr_module), curr_class)(**off_subfield)
+# else:
+# self.off_subfield = off_subfield
+#
+# super(self.__class__, self).add_subfield(off_subfield, off_weight)
+#
+#
+# def to_dict(self):
+#
+# return {'dc_offset':self.dc_offset,
+# 'on_subfield':self.on_subfield.to_dict(),
+# 'off_subfield':self.off_subfield.to_dict(),
+# 't_max':self.t_max,
+# 'class':(__name__, self.__class__.__name__)}
+#
+# class SingleSubfieldCellModel(CellModel):
+#
+# def __init__(self, subfield, weight = 1, dc_offset=0, t_max=None):
+#
+# super(SingleSubfieldCellModel, self).__init__(dc_offset, t_max)
+#
+# if isinstance(subfield, dict):
+# curr_module, curr_class = subfield.pop('class')
+# subfield = getattr(importlib.import_module(curr_module), curr_class)(**subfield)
+#
+# super(self.__class__, self).add_subfield(subfield, weight)
+#
+# def to_dict(self):
+#
+# assert len(self.subfield_list) == 1
+# subfield = self.subfield_list[0]
+# weight = self.subfield_weight_dict[subfield]
+#
+# return {'dc_offset':self.dc_offset,
+# 'subfield':subfield.to_dict(),
+# 'weight':weight,
+# 't_max':self.t_max,
+# 'class':(__name__, self.__class__.__name__)}
+#
+# class OnCellModel(SingleSubfieldCellModel):
+#
+# def __init__(self, on_subfield, weight = 1, dc_offset=0 , t_max=None):
+# assert weight > 0
+# super(OnCellModel, self).__init__(on_subfield, weight, dc_offset, t_max)
+#
+# def to_dict(self):
+# data_dict = super(OnCellModel, self).to_dict()
+# data_dict['on_subfield'] = data_dict.pop('subfield')
+# return data_dict
+#
+# class OffCellModel(SingleSubfieldCellModel):
+#
+# def __init__(self, on_subfield, weight = -1, dc_offset=0 , t_max=None):
+# assert weight < 0
+# super(OffCellModel, self).__init__(on_subfield, weight, dc_offset, t_max)
+#
+# def to_dict(self):
+# data_dict = super(OffCellModel, self).to_dict()
+# data_dict['off_subfield'] = data_dict.pop('subfield')
+# return data_dict
+
+
+# class OffCellModel(CellModel):
+#
+# def __init__(self, off_subfield, dc_offset=0, off_weight = 1, t_max=None):
+#
+# assert off_weight < 0.
+# self.weight = off_weight
+#
+#
+#
+#
+# super(self.__class__, self).__init__(dc_offset, t_max)
+#
+# if isinstance(on_subfield, dict):
+# curr_module, curr_class = on_subfield.pop('class')
+# self.subfield = getattr(importlib.import_module(curr_module), curr_class)(**on_subfield)
+# else:
+# self.subfield = on_subfield
+#
+# super(self.__class__, self).add_subfield(self.subfield, self.weight)
+#
+# def to_dict(self):
+#
+# return {'dc_offset':self.dc_offset,
+# 'on_subfield':self.subfield.to_dict(),
+# 'on_weight':self.weight,
+# 't_max':self.t_max,
+# 'class':(__name__, self.__class__.__name__)}
+
+
+
+
+
+
+# if __name__ == "__main__":
+#
+# t = np.arange(0,.5,.001)
+# example_movie = movie.Movie(file_name=os.path.join(isee_engine.movie_directory, 'TouchOfEvil.npy'), frame_rate=30.1, memmap=True)
+#
+# temporal_filter_on = TemporalFilterExponential(weight=1, tau=.05)
+# on_subfield = Subfield(scale=(5,15), weight=.5, rotation=30, temporal_filter=temporal_filter_on, translation=(0,0))
+#
+# temporal_filter_off = TemporalFilterExponential(weight=2, tau=.01)
+# off_subfield = Subfield(scale=(5,15), weight=.5, rotation=-30, temporal_filter=temporal_filter_off)
+#
+# cell = OnOffCellModel(on_subfield=on_subfield, off_subfield=off_subfield, dc_offset=0., t_max=.5)
+# curr_kernel = cell.get_spatio_temporal_kernel((100,150), 30.1)
+# curr_kernel.imshow(0)
+#
+# print cell.to_dict()
+
+
+
+# f = cell.get_spatio_temporal_filter(example_movie.movie_data.shape[1:], t,threshold=.5)
+# print len(f.t_ind_list)
+#
+#
+
+# for ii in range(example_movie.number_of_frames-curr_filter.t_max):
+# print ii, example_movie.number_of_frames, curr_filter.map(example_movie, ii)
+
+
+# off_subfield = Subfield(scale=(15,15), weight=.2, translation=(30,30))
+
+
+#
+# curr_filter = cell.get_spatio_temporal_filter((100,150))
+#
+
+#
+# # print touch_of_evil(40.41, mask=m)
+# print curr_filter.t_max
+# for ii in range(example_movie.number_of_frames-curr_filter.t_max):
+# print ii, example_movie.number_of_frames, curr_filter.map(example_movie, ii)
+
+# cell.visualize_spatial_filter((100,150))
+# show_volume(spatio_temporal_filter, vmin=spatio_temporal_filter.min(), vmax=spatio_temporal_filter.max())
+
+
+
+# def get_spatial_filter(self, image_shape, relative_spatial_location=(0,0), relative_threshold=default_relative_threshold):
+#
+# # Initialize:
+# translation_matrix = util.get_translation_matrix(relative_spatial_location)
+#
+# # On-subunit:
+# on_filter_pre_spatial = self.on_subfield.get_spatial_filter(image_shape)
+# on_filter_spatial = util.apply_transformation_matrix(on_filter_pre_spatial, translation_matrix)
+#
+# # Off-subunit:
+# off_filter_pre_spatial = self.off_subfield.get_spatial_filter(image_shape)
+# off_filter_spatial = util.apply_transformation_matrix(off_filter_pre_spatial, translation_matrix)
+#
+# spatial_filter = on_filter_spatial - off_filter_spatial
+#
+# tmp = np.abs(spatial_filter)
+# spatial_filter[np.where(tmp/tmp.max() < relative_threshold )] = 0
+#
+# return spatial_filter
+
+# kernel = float(self.dc_offset)/len(nonzero_ind_tuple[0])+spatio_temporal_filter[nonzero_ind_tuple]
+
+# def rectifying_filter_factory(kernel, movie, dc_offset=0):
+#
+# def rectifying_filter(t):
+#
+# fi = movie.frame_rate*float(t)
+# fim, fiM = np.floor(fi), np.ceil(fi)
+#
+# print t, fim, fiM
+#
+# try:
+# s1 = (movie.movie_data[int(fim)+kernel.t_ind_list, kernel.row_ind_list, kernel.col_ind_list]*kernel.kernel).sum()
+# s2 = (movie.movie_data[int(fiM)+kernel.t_ind_list, kernel.row_ind_list, kernel.col_ind_list]*kernel.kernel).sum()
+# except IndexError:
+# return None
+#
+# # Linear interpolation:
+# s_pre = dc_offset + s1*((1-(fi-fim))*.5) + s2*((fi-fim)*.5)
+#
+# if s_pre < 0:
+# return 0
+# else:
+# return float(s_pre)
+#
+# return rectifying_filter
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/make_cell_list.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/make_cell_list.py
new file mode 100644
index 0000000..aa05481
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/make_cell_list.py
@@ -0,0 +1,294 @@
+import scipy.io as sio
+import os
+import matplotlib.pyplot as plt
+import isee_engine.nwb as nwb
+from linearfilter import SpatioTemporalFilter
+import numpy as np
+from spatialfilter import GaussianSpatialFilter
+from transferfunction import ScalarTransferFunction
+from temporalfilter import TemporalFilterCosineBump
+from cursor import LNUnitCursor, MultiLNUnitCursor
+from movie import Movie
+from lgnmodel1 import LGNModel, heat_plot
+from cellmodel import LGNOnCell, LGNOffCell,LGNOnOffCell,TwoSubfieldLinearCell, OnUnit, OffUnit
+from transferfunction import MultiTransferFunction, ScalarTransferFunction
+from lnunit import LNUnit, MultiLNUnit
+from sympy.abc import x as symbolic_x
+from sympy.abc import y as symbolic_y
+from kernel import Kernel3D
+from movie import Movie, FullFieldFlashMovie
+import itertools
+import scipy.stats as sps
+
+# def multi_cell_tensor_generator(cell_creation_function, **kwargs):
+#
+# sew_param_dict = {}
+# static_param_dict = {}
+# for key, val in kwargs.items():
+# if isinstance(val, (list, np.ndarray)):
+# sew_param_dict[key]=val
+# else:
+# static_param_dict[key]=val
+#
+# cell_list = []
+# loop_keys, loop_lists = zip(*sew_param_dict.items())
+# for param_tuple in itertools.product(*loop_lists):
+# param_dict = dict(zip(loop_keys, param_tuple))
+# print param_dict
+# param_dict.update(static_param_dict)
+# cell_list += cell_creation_function(**param_dict)
+#
+# return cell_list
+
+def multi_cell_random_generator(cell_creation_function=None, **kwargs):
+
+ sew_param_dict = {}
+ static_param_dict = {}
+ range_key_dict = {}
+ for key, val in kwargs.items():
+ if isinstance(val, (sps.rv_continuous, sps.rv_discrete)) or type(val) == type(sps.multivariate_normal()):
+ sew_param_dict[key]=val
+ elif isinstance(val, np.ndarray):
+ range_key_dict[key] = val
+ else:
+ static_param_dict[key]=val
+
+ number_of_cells = static_param_dict.pop('number_of_cells', 1)
+
+ for key, val in range_key_dict.items():
+ assert len(val) == number_of_cells
+
+ cell_list = []
+ loop_keys, loop_lists = zip(*sew_param_dict.items())
+ value_instance_list = zip(*map(lambda x: x.rvs(size=number_of_cells), loop_lists))
+ for ii, curr_value_instance in enumerate(value_instance_list):
+ param_dict = dict(zip(loop_keys, curr_value_instance))
+ param_dict.update(static_param_dict)
+ param_dict['number_of_cells'] = 1
+ for range_key in range_key_dict:
+ param_dict[range_key] = range_key_dict[range_key][ii]
+
+ if cell_creation_function is None:
+ cell_list.append(param_dict)
+ else:
+ cell_list += cell_creation_function(**param_dict)
+
+ return cell_list
+
+
+def make_single_unit_cell_list(number_of_cells=None,
+ lattice_unit_center=None,
+ weights=None,
+ kpeaks=None,
+ delays=None,
+ amplitude=None,
+ sigma=None,
+ width=5,
+ transfer_function_str = 'Heaviside(s)*s'):
+
+ cell_list = []
+ for _ in range(number_of_cells):
+ dxi = np.random.uniform(-width*1./2,width*1./2)
+ dyi = np.random.uniform(-width*1./2,width*1./2)
+ temporal_filter = TemporalFilterCosineBump(weights, kpeaks,delays)
+ spatial_filter = GaussianSpatialFilter(translate=(dxi,dyi), sigma=sigma, origin=lattice_unit_center) # all distances measured from BOTTOM LEFT
+ spatiotemporal_filter = SpatioTemporalFilter(spatial_filter, temporal_filter, amplitude=amplitude)
+ transfer_function = ScalarTransferFunction(transfer_function_str)
+ if amplitude > 0.:
+ cell = OnUnit(spatiotemporal_filter, transfer_function)
+ elif amplitude < 0.:
+ cell = OffUnit(spatiotemporal_filter, transfer_function)
+ else:
+ raise Exception
+
+
+ cell_list.append(cell)
+
+ return cell_list
+
+def make_on_off_cell_list(number_of_cells=None,
+ lattice_unit_center=None,
+ weights_on=None,
+ weights_off=None,
+ kpeaks_on=None,
+ kpeaks_off=None,
+ delays_on = None,
+ delays_off = None,
+ amplitude_on=None,
+ amplitude_off=None,
+ sigma_on=None,
+ sigma_off=None,
+ subfield_separation=None,
+ ang=None,
+ dominant_subunit=None,
+ width=5,
+ transfer_function_str = 'Heaviside(x)*x + Heaviside(y)*y'):
+
+ cell_list = []
+ for _ in range(number_of_cells):
+
+ dxi = np.random.uniform(-width*1./2,width*1./2)
+ dyi = np.random.uniform(-width*1./2,width*1./2)
+
+ dominant_subfield_location = (lattice_unit_center[0]+dxi, lattice_unit_center[1]+dyi)
+# hor_offset = np.cos(ang*np.pi/180.)*subfield_separation
+# vert_offset = np.sin(ang*np.pi/180.)*subfield_separation
+# nondominant_subfield_translation = (hor_offset,vert_offset)
+
+ if dominant_subunit == 'on':
+ on_translate = dominant_subfield_location#(0,0)
+ off_translate = dominant_subfield_location#nondominant_subfield_translation
+
+ elif dominant_subunit == 'off':
+
+ off_translate = dominant_subfield_location#(0,0)
+ on_translate = dominant_subfield_location#nondominant_subfield_translation
+
+ else:
+ raise Exception
+
+ on_origin = off_origin = (0,0)#dominant_subfield_location
+
+ temporal_filter_on = TemporalFilterCosineBump(weights_on, kpeaks_on,delays_on)
+ spatial_filter_on = GaussianSpatialFilter(translate=on_translate,sigma=sigma_on, origin=on_origin) # all distances measured from BOTTOM LEFT
+ on_filter = SpatioTemporalFilter(spatial_filter_on, temporal_filter_on, amplitude=amplitude_on)
+
+ temporal_filter_off = TemporalFilterCosineBump(weights_off, kpeaks_off,delays_off)
+ spatial_filter_off = GaussianSpatialFilter(translate=off_translate,sigma=sigma_off, origin=off_origin) # all distances measured from BOTTOM LEFT
+ off_filter = SpatioTemporalFilter(spatial_filter_off, temporal_filter_off, amplitude=amplitude_off)
+
+# cell = LGNOnOffCell(on_filter, off_filter, transfer_function=MultiTransferFunction((symbolic_x, symbolic_y), transfer_function_str))
+ cell = TwoSubfieldLinearCell(on_filter,off_filter,subfield_separation=subfield_separation, onoff_axis_angle=ang, dominant_subfield_location=dominant_subfield_location)
+ cell_list.append(cell)
+
+ return cell_list
+
+# amplitude_list = amplitude_dist.rvs(size=5)
+# kpeak_list = kpeak_dist.rvs(size=5)
+# cell_config = {'number_of_cells':5,
+# 'lattice_unit_center':(40,30),
+# 'weights':(.4,-.2),
+# 'kpeaks':kpeak_list,
+# 'amplitude':amplitude_list,
+# 'sigma':(4,4),
+# 'width':5}
+# multi_cell_tensor_generator(make_single_unit_cell_list, **cell_config)
+
+
+# amplitude_dist = sps.rv_discrete(values=([20,25], [.5,.5]))
+# kpeak_dist = sps.multivariate_normal(mean=[40., 80.], cov=[[5.0, 0], [0, 5]])
+#
+# single_unit_cell_config = {'number_of_cells':10,
+# 'lattice_unit_center':(40,30),
+# 'weights':(.4,-.2),
+# 'kpeaks':kpeak_dist,
+# 'amplitude':amplitude_dist,
+# 'sigma':(4,4),
+# 'width':5}
+#
+#
+# amplitude_on_dist = sps.rv_discrete(values=([20,25], [.5,.5]))
+# amplitude_off_dist = sps.rv_discrete(values=([-10,-15], [.5,.5]))
+# kpeak_on_dist = sps.multivariate_normal(mean=[40., 80.], cov=[[5.0, 0], [0, 5]])
+# kpeak_off_dist = sps.multivariate_normal(mean=[100., 160.], cov=[[5.0, 0], [0, 5]])
+# #ang_dist = sps.rv_discrete(values=(np.arange(0,360,45), 1./8*np.ones((1,8))))
+# ang_dist = np.arange(0,360,45)
+#
+# two_unit_cell_config={'number_of_cells':8,
+# 'lattice_unit_center':(40,30),
+# 'weights_on':(.4,-.2),
+# 'weights_off':(.4,-.1),
+# 'kpeaks_on':kpeak_on_dist,
+# 'kpeaks_off':kpeak_off_dist,
+# 'amplitude_on':20.,
+# 'amplitude_off':-10.,
+# 'sigma_on':(4,4),
+# 'sigma_off':(4,4),
+# 'subfield_separation':2.,
+# 'ang':ang_dist,
+# 'dominant_subunit':'on',
+# 'width':5}
+
+
+def evaluate_cell_and_plot(input_cell, input_movie, ax, show=False):
+ t, y = input_cell.evaluate(input_movie,downsample = 10)
+ ax.plot(t, y)
+
+ if show == True:
+ plt.show()
+
+
+# if __name__ == "__main__":
+#
+# # Create stimulus 0:
+# frame_rate = 60
+# m1 = FullFieldFlashMovie(np.arange(60), np.arange(80), 1., 3., frame_rate=frame_rate).full(t_max=3)
+# m2 = FullFieldFlashMovie(np.arange(60), np.arange(80), 0, 2, frame_rate=frame_rate, max_intensity=-1).full(t_max=2)
+# m3 = FullFieldFlashMovie(np.arange(60), np.arange(80), 0, 2., frame_rate=frame_rate).full(t_max=2)
+# m4 = FullFieldFlashMovie(np.arange(60), np.arange(80), 0, 2, frame_rate=frame_rate, max_intensity=0).full(t_max=2)
+# m0 = m1+m2+m3+m4
+#
+# # Create stimulus 1:
+# movie_file = '/data/mat/RamIyer/for_Anton/grating_ori0_res2.mat'
+# m_file = sio.loadmat(movie_file)
+# m_data_raw = m_file['mov_fine'].T
+# m_data = np.reshape(m_data_raw,(3000,64,128))
+# m1 = Movie(m_data, frame_rate=1000.)
+#
+# #Create stimulus 2:
+# movie_file = '/data/mat/iSee_temp_shared/TouchOfEvil_norm.npy'
+# m_data = np.load(movie_file, 'r')
+# m = Movie(m_data[1000:], frame_rate=30.)
+#
+# movie_list = [m0, m1, m2]
+#
+# #====================================================
+#
+# #Create cell list
+#
+# cell_list = []
+#
+# #On cells
+# params_tON = (5, (40,30), (.4,-.2),(40,80),20.,(4,4))
+# tON_list = make_single_unit_cell_list(*params_tON)
+# cell_list.append(tON_list)
+#
+# params_sON = (5, (40,30), (.4,-.1),(100,160),20.,(4,4))
+# sON_list = make_single_unit_cell_list(*params_sON)
+# cell_list.append(sON_list)
+#
+# #Off cells
+# params_tOFF = (5, (40,30), (.4,-.2),(40,80),-20.,(4,4))
+# tOFF_list = make_single_unit_cell_list(*params_tOFF)
+# cell_list.append(tOFF_list)
+#
+# params_sOFF = (5, (40,30), (.4,-.1),(100,160),-20.,(4,4))
+# sOFF_list = make_single_unit_cell_list(*params_sOFF)
+# cell_list.append(sOFF_list)
+#
+# #ONOFF cells
+# params_onoff = (5, (40,30),(.4, -.2),(.4,-.2),(40, 80),(50,100),20.,-20.,(4,4),(4,4),2.,0,'on')
+# onoff_list = make_on_off_cell_list(*params_onoff)
+# cell_list.append(onoff_list)
+#
+# #Two subunit cells
+# params_twosub = (5, (40,30),(.4, -.2),(.4,-.1),(40, 80),(100,160),20.,-10.,(4,2),(3,4),10.,90,'on')
+# twosub_list = make_on_off_cell_list(*params_twosub)
+# cell_list.append(twosub_list)
+#
+# #=====================================================
+# #Evaluate and plot responses
+# nc = len(movie_list)
+# nr = len(cell_list)
+# fig, axes = plt.subplots(nr,nc+2)
+#
+# for curr_row, curr_cell in zip(axes, cell_list):
+# curr_cell.show_spatial_filter(np.arange(60),np.arange(80), ax=curr_row[0], show=False, colorbar=False)
+# curr_cell.show_temporal_filter(ax=curr_row[1], show=False)
+#
+# for curr_row, curr_cell in zip(axes, cell_list):
+# for curr_ax, curr_movie in zip(curr_row[2:], movie_list):
+# evaluate_cell_and_plot(curr_cell, curr_movie, curr_ax, show=False)
+#
+# plt.tight_layout()
+# plt.show()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/movie.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/movie.py
new file mode 100644
index 0000000..a9d4e67
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/movie.py
@@ -0,0 +1,196 @@
+import matplotlib.pyplot as plt
+import numpy as np
+from .utilities import convert_tmin_tmax_framerate_to_trange
+
+
+class Movie(object):
+ def __init__(self, data, row_range=None, col_range=None, labels=('time', 'y', 'x'),
+ units=('second', 'pixel', 'pixel'), frame_rate=None, t_range=None):
+ self.data = data
+ self.labels = labels
+ self.units = units
+ assert units[0] == 'second'
+
+ if t_range is None:
+ self.frame_rate = float(frame_rate)
+ self.t_range = np.arange(data.shape[0])*(1./self.frame_rate)
+ else:
+ self.t_range = np.array(t_range)
+ self.frame_rate = 1./np.mean(np.diff(t_range))
+
+ if row_range is None:
+ self.row_range = np.arange(data.shape[1])
+ else:
+ self.row_range = np.array(row_range)
+ if col_range is None:
+ self.col_range = np.arange(data.shape[2])
+ else:
+ self.col_range = np.array(col_range)
+
+ def imshow_summary(self, ax=None, show=True, xlabel=None):
+ if ax is None:
+ _, ax = plt.subplots(1,1)
+
+ t_vals = self.t_range.copy()
+ y_vals = self.data.mean(axis=2).mean(axis=1)
+ ax.plot(t_vals, y_vals)
+ ax.set_ylim(y_vals.min()-np.abs(y_vals.min())*.05, y_vals.max()+np.abs(y_vals.max())*.05)
+
+ if not xlabel is None:
+ ax.set_xlabel(xlabel)
+
+ ax.set_ylabel('Average frame intensity')
+
+ if show == True:
+ plt.show()
+
+ return ax, (t_vals, y_vals)
+
+ def imshow(self, t, show=True, vmin=-1, vmax=1, cmap=plt.cm.gray):
+ ti = int(t*self.frame_rate)
+ data = self.data[ti,:,:]
+ plt.imshow(data, vmin=vmin, vmax=vmax, cmap=cmap)
+ plt.colorbar()
+ if show:
+ plt.show()
+
+ def __add__(self, other):
+
+ assert self.labels == other.labels
+ assert self.units == other.units
+ assert self.frame_rate == other.frame_rate
+ np.testing.assert_almost_equal(self.col_range, other.col_range)
+ np.testing.assert_almost_equal(self.row_range, other.row_range)
+
+
+ new_data = np.empty((len(self.t_range)+len(other.t_range)-1, len(self.row_range), len(self.col_range)))
+ new_data[:len(self.t_range), :,:] = self.data[:,:,:]
+ new_data[len(self.t_range):, :,:] = other.data[1:,:,:]
+
+ return Movie(new_data, row_range=self.row_range.copy(), col_range=self.col_range.copy(), labels=self.labels, units=self.units, frame_rate=self.frame_rate)
+
+ @property
+ def ranges(self):
+ return self.t_range, self.row_range, self.col_range
+
+ def get_nwb_GrayScaleMovie(self):
+
+ t_scale = nwb.Scale(self.t_range, 'time', self.units[0])
+ row_scale = nwb.Scale(self.row_range, 'distance', self.units[1])
+ col_scale = nwb.Scale(self.col_range, 'distance', self.units[2])
+
+ return nwb.GrayScaleMovie(self.data, scale=(t_scale, row_scale, col_scale))
+
+ def __getitem__(self, *args):
+ return self.data.__getitem__(*args)
+
+
+class FullFieldMovie(Movie):
+ def __init__(self, f, row_range, col_range, frame_rate=24):
+ self.row_range = row_range
+ self.col_range = col_range
+ self.frame_size = (len(self.row_range), len(self.col_range))
+ self._frame_rate = frame_rate
+ self.f = f
+
+ @property
+ def frame_rate(self):
+ return self._frame_rate
+
+ @property
+ def data(self):
+ return self
+
+ def __getitem__(self, *args):
+
+ t_inds, x_inds, y_inds = args[0]
+
+ assert (len(x_inds) == len(y_inds)) and (len(y_inds) == len(t_inds))
+
+ # Convert frame indices to times:
+ t_vals = (1./self.frame_rate)*t_inds
+
+ # Evaluate and return:
+ return self.f(t_vals)
+
+ def full(self, t_min=0, t_max=None):
+ # Compute t_range
+ t_range = convert_tmin_tmax_framerate_to_trange(t_min, t_max, self.frame_rate)
+
+ nt = len(t_range)
+ nr = len(self.row_range)
+ nc = len(self.col_range)
+ a,b,c = np.meshgrid(range(nt),range(nr),range(nc))
+ af, bf, cf = map(lambda x: x.flatten(), [a,b,c])
+ data = np.empty((nt, nr, nc))
+ data[af, bf, cf] = self.f(t_range[af])
+
+ return Movie(data, row_range=self.row_range, col_range=self.col_range, labels=('time', 'y', 'x'), units=('second', 'pixel', 'pixel'), frame_rate=self.frame_rate)
+
+
+class FullFieldFlashMovie(FullFieldMovie):
+ def __init__(self, row_range, col_range, t_on, t_off, max_intensity=1, frame_rate=24):
+ assert t_on < t_off
+
+ def f(t):
+ return np.piecewise(t, *zip(*[(t < t_on, 0), (np.logical_and(t_on <= t, t < t_off), max_intensity),
+ (t_off <= t, 0)]))
+
+ super(FullFieldFlashMovie, self).__init__(f, row_range, col_range, frame_rate=frame_rate)
+
+
+class GratingMovie(Movie):
+ def __init__(self, row_size, col_size, frame_rate=1000.):
+ self.row_size = row_size #in degrees
+ self.col_size = col_size #in degrees
+ self.frame_rate = float(frame_rate) #in Hz
+
+ def create_movie(self, t_min = 0, t_max = 1, gray_screen_dur = 0, cpd = 0.05, temporal_f = 4, theta = 45, phase = 0., contrast = 1.0, row_size_new = None, col_size_new = None):
+ """Create the grating movie with the desired parameters
+ :param t_min: start time in seconds
+ :param t_max: end time in seconds
+ :param gray_screen_dur: Duration of gray screen before grating stimulus starts
+ :param cpd: cycles per degree
+ :param temporal_f: in Hz
+ :param theta: orientation angle
+ :return: Movie object of grating with desired parameters
+ """
+ assert contrast <= 1, "Contrast must be <= 1"
+ assert contrast > 0, "Contrast must be > 0"
+
+ physical_spacing = 1. / (float(cpd) * 10) #To make sure no aliasing occurs
+ self.row_range = np.linspace(0, self.row_size, self.row_size / physical_spacing, endpoint = True)
+ self.col_range = np.linspace(0, self.col_size, self.col_size / physical_spacing, endpoint = True)
+ numberFramesNeeded = int(round(self.frame_rate * (t_max - gray_screen_dur))) + 1
+ time_range = np.linspace(gray_screen_dur, t_max - gray_screen_dur, numberFramesNeeded, endpoint=True)
+
+ tt, yy, xx = np.meshgrid(time_range, self.row_range, self.col_range, indexing='ij')
+
+ thetaRad = -np.pi*(180-theta)/180.
+ phaseRad = np.pi*(180-phase)/180.
+ xy = xx * np.cos(thetaRad) + yy * np.sin(thetaRad)
+ data = contrast*np.sin(2*np.pi*(cpd * xy + temporal_f *tt) + phaseRad)
+
+ if row_size_new != None:
+ self.row_range = np.linspace(0, row_size_new, data.shape[1], endpoint = True)
+ if col_size_new != None:
+ self.col_range = np.linspace(0, col_size_new, data.shape[2], endpoint = True)
+
+ if gray_screen_dur > 0:
+ # just adding one or two seconds to gray screen so flash never "happens"
+ m_gray = FullFieldFlashMovie(self.row_range, self.col_range, gray_screen_dur + 1, gray_screen_dur + 2,
+ frame_rate=self.frame_rate).full(t_max=gray_screen_dur)
+ mov = m_gray + Movie(data, row_range=self.row_range, col_range=self.col_range, labels=('time', 'y', 'x'),
+ units=('second', 'pixel', 'pixel'), frame_rate=self.frame_rate)
+ else:
+ mov = Movie(data, row_range=self.row_range, col_range=self.col_range, labels=('time', 'y', 'x'),
+ units=('second', 'pixel', 'pixel'), frame_rate=self.frame_rate)
+
+ return mov
+
+
+if __name__ == "__main__":
+ m1 = FullFieldFlashMovie(range(60), range(80), 1, 2).full(t_max=2)
+ m2 = FullFieldFlashMovie(range(60), range(80), 1, 2).full(t_max=2)
+ m3 = m1+m2
+ m3.imshow_summary()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/poissongeneration.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/poissongeneration.py
new file mode 100644
index 0000000..b2125b1
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/poissongeneration.py
@@ -0,0 +1,104 @@
+import numpy as np
+import scipy.interpolate as sinterp
+import scipy.integrate as spi
+import warnings
+import scipy.optimize as sopt
+import scipy.stats as sps
+
+def generate_renewal_process(t0, t1, renewal_distribution):
+ last_event_time = t0
+ curr_interevent_time = float(renewal_distribution())
+ event_time_list = []
+ while last_event_time+curr_interevent_time <= t1:
+ event_time_list.append(last_event_time+curr_interevent_time)
+ curr_interevent_time = float(renewal_distribution())
+ last_event_time = event_time_list[-1]
+
+ return event_time_list
+
+def generate_poisson_process(t0, t1, rate):
+
+ if rate is None: raise ValueError('Rate cannot be None')
+ if rate > 10000: warnings.warn('Very high rate encountered: %s' % rate)
+
+
+ try:
+ assert rate >= 0
+ except AssertionError:
+ raise ValueError('Negative rate (%s) not allowed' % rate)
+
+ try:
+ assert rate < np.inf
+ except AssertionError:
+ raise ValueError('Rate (%s) must be finite' % rate)
+
+
+
+
+
+
+
+ if rate == 0:
+ return []
+ else:
+ return generate_renewal_process(t0, t1, sps.expon(0,1./rate).rvs)
+
+def generate_inhomogenous_poisson(t_range, y_range, seed=None):
+ if not seed == None: np.random.seed(seed)
+ spike_list = []
+ for tl, tr, y in zip(t_range[:-1], t_range[1:], y_range[:-1]):
+ spike_list += generate_poisson_process(tl, tr, y)
+ return spike_list
+
+
+
+
+def generate_poisson_rescaling(t, y, seed=None):
+ y = np.array(y)
+ t = np.array(t)
+ assert not np.any(y<0)
+ f = sinterp.interp1d(t, y, fill_value=0, bounds_error=False)
+ return generate_poisson_rescaling_function(lambda y, t: f(t), t[0], t[-1], seed=seed)
+
+
+
+def generate_poisson_rescaling_function(f, t_min, t_max, seed=None):
+
+
+
+ def integrator(t0, t1):
+ return spi.odeint(f, 0, [t0, t1])[1][0]
+
+ if not seed == None:
+ np.random.seed(seed)
+
+ spike_train = []
+ while t_min < t_max:
+ e0 = np.random.exponential()
+ def root_function(t):
+ return e0 - integrator(t_min, t)
+
+ try:
+ with warnings.catch_warnings(record=True) as w:
+ result = sopt.root(root_function, .1)
+ assert result.success
+ except AssertionError:
+ if not e0 < integrator(t_min, t_max):
+ assert Exception
+ else:
+ break
+
+
+
+
+ t_min = result.x[0]
+ spike_train.append(t_min)
+
+ return np.array(spike_train)
+
+
+def test_generate_poisson_function():
+
+ f = lambda y, t:10
+
+ assert len(generate_poisson_function(f,0,1,seed=5)) == 12
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/singleunitcell.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/singleunitcell.py
new file mode 100644
index 0000000..d3e0b24
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/singleunitcell.py
@@ -0,0 +1,8 @@
+from temporalfilter import TemporalFilterCosineBump
+from transferfunction import ScalarTransferFunction
+from linearfilter import SpatioTemporalFilter
+import numpy as np
+from spatialfilter import GaussianSpatialFilter
+from cellmodel import OnUnit, OffUnit
+
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/spatialfilter.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/spatialfilter.py
new file mode 100644
index 0000000..466db94
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/spatialfilter.py
@@ -0,0 +1,215 @@
+from scipy import ndimage
+import numpy as np
+import itertools
+import importlib
+import scipy.interpolate as spinterp
+from . import utilities as util
+import matplotlib.pyplot as plt
+import scipy.misc as spmisc
+import scipy.ndimage as spndimage
+from .kernel import Kernel2D, Kernel3D
+
+class ArrayFilter(object):
+
+ default_threshold = .01
+
+ def __init__(self, mask):
+
+ self.mask = mask
+
+ def imshow(self, row_range, col_range, threshold=0, **kwargs):
+
+ return self.get_kernel(row_range, col_range,threshold).imshow(**kwargs)
+
+ def get_kernel(self, row_range, col_range, threshold=0, amplitude=1.):
+
+# print np.where(self.mask>threshold)
+ row_vals, col_vals = np.where(self.mask>threshold)
+
+ kernel_vals = self.mask[row_vals, col_vals]
+ kernel_vals = amplitude*kernel_vals/kernel_vals.sum()
+
+ return Kernel2D(row_range, col_range, row_vals, col_vals, kernel_vals) # row_range, col_range, row_inds, col_inds, kernel):
+
+
+class GaussianSpatialFilter(object):
+
+ default_threshold = .01
+
+ def __init__(self, translate=(0, 0), sigma=(1.,1.), rotation=0, origin='center'):
+ '''When w=1 and rotation=0, half-height will be at y=1'''
+
+ self.translate = translate
+ self.rotation = rotation
+ self.sigma = sigma
+ self.origin = origin
+
+ def imshow(self, row_range, col_range, threshold=0, **kwargs):
+ return self.get_kernel(row_range, col_range,threshold).imshow(**kwargs)
+
+ def to_dict(self):
+
+ return {'class':(__name__, self.__class__.__name__),
+ 'translate':self.translate,
+ 'rotation':self.rotation,
+ 'sigma':self.sigma}
+
+ def get_kernel(self, row_range, col_range, threshold=0, amplitude=1.):
+
+ # Create symmetric initial point at center:
+ image_shape = len(col_range), len(row_range)
+ h, w = image_shape
+ on_filter_spatial = np.zeros(image_shape)
+ if h%2 == 0 and w%2 == 0:
+ for ii, jj in itertools.product(range(2), range(2)):
+ on_filter_spatial[int(h/2)+ii-1,int(w/2)+jj-1] = .25
+ elif h%2 == 0 and w%2 != 0:
+ for ii in range(2):
+ on_filter_spatial[int(h/2)+ii-1,int(w/2)] = .25
+ elif h%2 != 0 and w%2 == 0:
+ for jj in range(2):
+ on_filter_spatial[int(h/2),int(w/2)+jj-1] = .25
+ else:
+ on_filter_spatial[int(h/2),int(w/2)] = .25
+
+ # Apply gaussian filter to create correct sigma:
+ scaled_sigma_x =float(self.sigma[0])/(col_range[1]-col_range[0])
+ scaled_sigma_y = float(self.sigma[1])/(row_range[1]-row_range[0])
+ on_filter_spatial = ndimage.gaussian_filter(on_filter_spatial, (scaled_sigma_x, scaled_sigma_y), mode='nearest', cval=0)
+# on_filter_spatial = skf.gaussian_filter(on_filter_spatial, sigma=(scaled_sigma_x, scaled_sigma_y))
+
+ # Rotate and translate at center:
+ rotation_matrix = util.get_rotation_matrix(self.rotation, on_filter_spatial.shape)
+ translation_x = float(self.translate[1])/(row_range[1]-row_range[0])
+ translation_y = -float(self.translate[0])/(col_range[1]-col_range[0])
+ translation_matrix = util.get_translation_matrix((translation_x, translation_y))
+ if self.origin != 'center':
+ center_y = -(self.origin[0]-(col_range[-1]+col_range[0])/2)/(col_range[1]-col_range[0])
+ center_x = (self.origin[1]-(row_range[-1]+row_range[0])/2)/(row_range[1]-row_range[0])
+ translation_matrix += util.get_translation_matrix((center_x, center_y))
+ kernel_data = util.apply_transformation_matrix(on_filter_spatial, translation_matrix+rotation_matrix)
+
+ kernel = Kernel2D.from_dense(row_range, col_range, kernel_data, threshold=0)
+ kernel.apply_threshold(threshold)
+ kernel.normalize()
+
+ kernel.kernel *= amplitude
+
+
+ return kernel
+
+
+
+# spatial_model = GaussianSpatialFilterModel(height=21, aspect_ratio=1., rotation=0)
+# spatial_filter = spatial_model(center=(30,40))
+# k = spatial_filter.get_spatial_kernel(range(60), range(80))
+# k.imshow(frame_size=(60,80))
+
+
+
+
+
+
+
+
+
+
+
+
+
+# def evaluate_movie(self, movie, t, show=False):
+#
+# y = []
+# for ti in t:
+# kernel_result = movie.evaluate_Kernel3D(ti, self)
+# y.append(self.transfer_function(kernel_result))
+#
+# if show == True:
+# plt.plot(t, y)
+# plt.show()
+#
+# return t, y
+
+# print mesh_range[0]
+#
+# ii = mesh_range[0][inds]
+# jj = mesh_range[1][inds]
+# print ii, jj
+# print tmp[jj,ii]
+
+# plt.figure()
+# plt.pcolor(mesh_range[0], mesh_range[1], tmp)
+# plt.colorbar()
+# plt.axis('equal')
+# plt.show()
+
+# print self.xydata[0].shape
+#
+# t0 = spndimage.rotate(self.xydata[0],30,reshape=False, mode=mode)
+# t1 = spndimage.rotate(self.xydata[1],30, reshape=False, mode=mode)
+
+# print t0.shape
+# print t1.shape
+# print on_filter_spatial.shape
+
+# plt.pcolor(t0,t1, on_filter_spatial)
+
+
+# self.interpolation_function = spinterp.interp2d(self.w_values, self.h_values, on_filter_spatial, fill_value=0, bounds_error=False)
+#
+# print self.interpolation_function((t0,t1))
+
+# translation_matrix = util.get_translation_matrix(self.translation)
+# tmp = util.apply_transformation_matrix(on_filter_spatial, translation_matrix)
+#
+# plt.pcolor(self.xydata[0], self.xydata[1], tmp)
+# plt.show()
+
+# # print self.xydata_trans[0][0], self.xydata_trans[0],[-1]
+# # print self.xydata_trans[1][0], self.xydata_trans[1],[-1]
+# print self.xydata_trans
+# rotation_matrix = util.get_rotation_matrix(self.rotation, on_filter_spatial.shape)
+# translation_matrix = util.get_translation_matrix(self.translation)
+# on_filter_spatial = util.apply_transformation_matrix(on_filter_spatial, translation_matrix+rotation_matrix)
+
+# plt.imshow(on_filter_spatial, extent=(self.w_values[0], self.w_values[-1], self.h_values[0], self.h_values[-1]), aspect=1.)
+# plt.show()
+
+# def to_dict(self):
+#
+# return {'scale':self.scale,
+# 'translation':self.translation,
+# 'rotation':self.rotation,
+# 'weight':self.weight,
+# 'temporal_filter':self.temporal_filter.to_dict(),
+# 'class':(__name__, self.__class__.__name__)}
+
+# def get_kernel(self, xdata, ydata, threshold=default_threshold):
+#
+#
+# # Rotate and translate at center:
+# rotation_matrix = util.get_rotation_matrix(self.rotation, on_filter_spatial.shape)
+# translation_matrix = util.get_translation_matrix(self.translation)
+# on_filter_spatial = util.apply_transformation_matrix(on_filter_spatial, translation_matrix+rotation_matrix)
+#
+# # Now translate center of field in image:
+# # translation_matrix = util.get_translation_matrix(relative_spatial_location)
+# # on_filter_spatial = util.apply_transformation_matrix(on_filter_spatial, translation_matrix)
+#
+# # Create and return thresholded 2D mask:
+# row_ind_list, col_ind_list = np.where(on_filter_spatial != 0)
+# kernel = on_filter_spatial[row_ind_list, col_ind_list]
+#
+#
+#
+#
+# # filter_mask = Kernel2D(row_ind_list, col_ind_list, kernel, threshold=threshold)
+#
+# return filter_mask
+
+# translation_matrix = util.get_translation_matrix((1.*translation[0]/fudge_factor,-1.*translation[1]/fudge_factor))
+
+# plt.figure()
+# plt.pcolor(self.mesh_support[0], self.mesh_support[1], self.kernel_data)
+# plt.axis('equal')
+# plt.show()
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/temporalfilter.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/temporalfilter.py
new file mode 100644
index 0000000..1e604bf
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/temporalfilter.py
@@ -0,0 +1,114 @@
+import numpy as np
+from . import fitfuns
+import scipy.interpolate as spinterp
+import matplotlib.pyplot as plt
+from .kernel import Kernel1D
+
+class TemporalFilter(object):
+
+ def __init__(self, *args, **kwargs): pass
+
+ def imshow(self, t_range=None, threshold=0, reverse=False, rescale=False, **kwargs):
+ return self.get_kernel(t_range, threshold, reverse, rescale).imshow(**kwargs)
+
+
+ def to_dict(self):
+ return {'class':(__name__, self.__class__.__name__)}
+
+ def get_kernel(self, t_range=None, threshold=0, reverse=False, rescale=False):
+
+ if t_range is None:
+ t_range = self.get_default_t_grid()
+
+# print self.t_support
+# print self.kernel_data
+
+ if len(self.t_support) == 1:
+ k = Kernel1D(self.t_support, self.kernel_data, threshold=threshold, reverse=reverse)
+ else:
+ interpolation_function = spinterp.interp1d(self.t_support, self.kernel_data, fill_value=0, bounds_error=False, assume_sorted=True)
+ k = Kernel1D(t_range, interpolation_function(t_range), threshold=threshold, reverse=reverse)
+ if rescale == True:
+ k.rescale()
+
+ #assert np.abs(np.abs(k.kernel).sum() - 1) < 1e-14
+ assert np.abs(np.abs(k.kernel.sum()) - 1) < 1e-14
+
+ return k
+
+class ArrayTemporalFilter(TemporalFilter):
+
+ def __init__(self, mask,t_support):
+
+ self.mask = mask
+ self.t_support = t_support
+
+ assert len(self.mask) == len(self.t_support)
+
+ self.nkt = 600
+
+ super(self.__class__, self).__init__()
+
+ self.kernel_data = self.mask
+ #self.t_support = np.arange(0, len(self.kernel_data)*.001, .001)
+ #assert len(self.t_support) == len(self.kernel_data)
+
+ def get_default_t_grid(self):
+
+ return np.arange(self.nkt)*.001
+
+class TemporalFilterCosineBump(TemporalFilter):
+
+ def __init__(self, weights, kpeaks, delays):
+
+ assert len(kpeaks) == 2
+ assert kpeaks[0] 0
+ assert delays[0] <= delays[1]
+
+ self.ncos = len(weights)
+
+ # Not likely to change defaults:
+ self.neye = 0
+ self.b = .3
+ self.nkt = 600
+
+ super(self.__class__, self).__init__()
+
+ # Parameters
+ self.weights = np.array([weights]).T
+ self.kpeaks = kpeaks
+ self.delays = np.array([delays]).astype(int)
+
+ # Adapter code to get filters from Ram's code:
+ kbasprs = {}
+ kbasprs['neye'] = self.neye
+ kbasprs['ncos'] = self.ncos
+ kbasprs['kpeaks'] = self.kpeaks
+ kbasprs['b'] = self.b
+ kbasprs['delays'] = self.delays
+ nkt = self.nkt
+ #kbasprs['bases'] = fitfuns.makeBasis_StimKernel(kbasprs, nkt)
+ self.kernel_data = np.dot(fitfuns.makeBasis_StimKernel(kbasprs, nkt), self.weights)[::-1].T[0]
+# plt.figure()
+# plt.plot(self.kernel_data)
+# plt.show()
+# sys.exit()
+ self.t_support = np.arange(0, len(self.kernel_data)*.001, .001)
+ self.kbasprs = kbasprs
+ assert len(self.t_support) == len(self.kernel_data)
+
+ def __call__(self, t):
+ return self.interpolation_function(t)
+
+ def get_default_t_grid(self):
+ return np.arange(self.nkt)*.001
+
+ def to_dict(self):
+
+ param_dict = super(self.__class__, self).to_dict()
+
+ param_dict.update({'weights':self.weights.tolist(),
+ 'kpeaks':self.kpeaks})
+
+ return param_dict
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/transferfunction.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/transferfunction.py
new file mode 100644
index 0000000..03ff617
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/transferfunction.py
@@ -0,0 +1,58 @@
+from sympy.utilities.lambdify import lambdify
+import sympy.parsing.sympy_parser as symp
+import sympy.abc
+import numpy as np
+
+
+class ScalarTransferFunction(object):
+ def __init__(self, transfer_function_string, symbol=sympy.abc.s):
+ self.symbol = symbol
+ self.transfer_function_string = transfer_function_string
+ self.closure = lambdify(self.symbol, symp.parse_expr(self.transfer_function_string), modules=['sympy'])
+
+ def __call__(self, s):
+ return self.closure(s)
+
+ def to_dict(self):
+ return {'class': (__name__, self.__class__.__name__),
+ 'function': self.transfer_function_string}
+
+ def imshow(self, xlim, ax=None, show=True, save_file_name=None, ylim=None):
+ # TODO: This function should be removed (as Ram to see if/where it's used) since it will fail (no t_vals)
+ import matplotlib.pyplot as plt
+ if ax is None:
+ _, ax = plt.subplots(1, 1)
+
+ plt.plot(self.t_vals, self.kernel)
+ ax.set_xlabel('Time (Seconds)')
+
+ if ylim is not None:
+ ax.set_ylim(ylim)
+
+ if xlim is not None:
+ ax.set_xlim((self.t_range[0], self.t_range[-1]))
+
+ if save_file_name is not None:
+ plt.savefig(save_file_name, transparent=True)
+
+ if show:
+ plt.show()
+
+ return ax
+
+
+class MultiTransferFunction(object):
+ def __init__(self, symbol_tuple, transfer_function_string):
+ self.symbol_tuple = symbol_tuple
+ self.transfer_function_string = transfer_function_string
+ self.closure = lambdify(self.symbol_tuple, symp.parse_expr(self.transfer_function_string), modules=['sympy'])
+
+ def __call__(self, *s):
+ if isinstance(s[0], (float,)):
+ return self.closure(*s)
+ else:
+ return np.array(list(map(lambda x: self.closure(*x), zip(*s))))
+
+ def to_dict(self):
+ return {'class': (__name__, self.__class__.__name__),
+ 'function': self.transfer_function_string}
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/util_fns.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/util_fns.py
new file mode 100644
index 0000000..af297a0
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/util_fns.py
@@ -0,0 +1,190 @@
+import os
+import re
+import matplotlib.mlab as mlab
+import numpy as np
+import scipy.io as sio
+from scipy.fftpack import fft
+import pandas as pd
+from .movie import Movie, FullFieldFlashMovie
+
+
+pd.set_option('display.width', 1000)
+pd.set_option('display.max_columns', 100)
+
+
+#################################################
+def chunks(l, n):
+ """Yield successive n-sized chunks from l."""
+ for i in range(0, len(l), n):
+ yield l[i:i + n]
+
+
+##################################################
+def compute_FFT_OneCycle(FR, TF, downsample):
+ one_cyc = np.int(((1000. / downsample) / TF))
+ FR_cyc = list(chunks(FR, one_cyc))
+ if (TF == 15. or TF == 8.):
+ FR_cyc = FR_cyc[:-1]
+
+ FR_cyc_avg = np.mean(FR_cyc, axis=0)
+ y = FR_cyc_avg
+ AMP = 2 * np.abs(fft(y) / len(y))
+ F0 = 0.5 * AMP[0]
+ assert (F0 - np.mean(y) < 1.e-4)
+ F1 = AMP[1]
+
+ return F0, F1
+
+
+##################################################
+def create_ff_mov(frame_rate, tst, tend, xrng, yrng):
+ ff_mov_on = FullFieldFlashMovie(np.arange(xrng), np.arange(yrng), tst, tend, frame_rate=frame_rate,
+ max_intensity=1).full(t_max=tend) # +0.5)
+ ff_mov_off = FullFieldFlashMovie(np.arange(xrng), np.arange(yrng), tst, tend, frame_rate=frame_rate,
+ max_intensity=-1).full(t_max=tend) # +0.5)
+
+ return ff_mov_on, ff_mov_off
+
+
+##################################################
+def create_grating_movie_list(gr_dir_name):
+ gr_fnames = os.listdir(gr_dir_name)
+ gr_fnames_ord = sorted(gr_fnames, key=lambda x: (int(re.sub('\D', '', x)), x))
+
+ gr_mov_list = []
+ for fname in gr_fnames_ord[:5]:
+ movie_file = os.path.join(gr_dir_name, fname)
+ m_file = sio.loadmat(movie_file)
+ m_data_raw = m_file['mov'].T
+ swid = np.shape(m_data_raw)[1]
+ res = int(np.sqrt(swid / (8 * 16)))
+ m_data = np.reshape(m_data_raw, (3000, 8 * res, 16 * res))
+ m1 = Movie(m_data[:500, :, :], row_range=np.linspace(0, 120, m_data.shape[1], endpoint=True), col_range=np.linspace(0, 120, m_data.shape[2], endpoint=True), frame_rate=1000.)
+ gr_mov_list.append(m1)
+
+ return gr_mov_list
+
+
+##################################################
+metrics_dir = os.path.join(os.path.dirname(__file__), 'cell_metrics')
+def get_data_metrics_for_each_subclass(ctype):
+ # Load csv file into dataframe
+ if ctype.find('_sus') >= 0:
+ prs_fn = os.path.join(metrics_dir, '{}_cells_v3.csv'.format(ctype))
+ else:
+ prs_fn = os.path.join(metrics_dir, '{}_cell_data.csv'.format(ctype))
+
+ prs_df = pd.read_csv(prs_fn)
+ N_class, nmet = np.shape(prs_df)
+
+ # Group data by subclasses based on max F0 vals
+ exp_df = prs_df.iloc[:, [13, 14, 17, 18, 28, 45, 46, 47, 48, 49, 50, 51, 52, 53,
+ 54]].copy() # Bl_lat,Wh_lat,Bl_si, wh_si, spont, 5 F0s, 5 F1s
+ sub_df = exp_df.iloc[:, [5, 6, 7, 8, 9]]
+ exp_df['max_tf'] = sub_df.idxmax(axis=1).values # sub_df.idxmax(axis=1)
+
+ exp_means = exp_df.groupby(['max_tf']).mean()
+ exp_std = exp_df.groupby(['max_tf']).std()
+ exp_nsub = exp_df.groupby(['max_tf']).size()
+
+ max_ind_arr = np.where(exp_nsub == np.max(exp_nsub))
+ max_nsub_ind = max_ind_arr[0][0]
+
+ # Get means and std dev for subclasses
+ exp_prs_dict = {}
+ for scn in np.arange(len(exp_nsub)):
+ f0_exp = exp_means.iloc[scn, 5:10].values
+ f1_exp = exp_means.iloc[scn, 10:].values
+ spont_exp = exp_means.iloc[scn, 4:5].values
+ if ctype.find('OFF') >= 0:
+ si_exp = exp_means.iloc[scn, 2:3].values
+ ttp_exp = exp_means.iloc[scn, 0:1].values
+ elif ctype.find('ON') >= 0:
+ si_exp = exp_means.iloc[scn, 3:4].values
+ ttp_exp = exp_means.iloc[scn, 1:2].values
+ else:
+ si_exp = np.NaN * np.ones((1, 5))
+ ttp_exp = np.NaN * np.ones((1, 2))
+
+ nsub = exp_nsub.iloc[scn]
+ if nsub == 1:
+ f0_std = np.mean(exp_std.iloc[max_nsub_ind, 5:10].values) * np.ones((1, 5))
+ f1_std = np.mean(exp_std.iloc[max_nsub_ind, 10:].values) * np.ones((1, 5))
+ spont_std = np.mean(exp_std.iloc[max_nsub_ind, 4:5].values) * np.ones((1, 5))
+ if ctype.find('OFF') >= 0:
+ si_std = np.mean(exp_std.iloc[max_nsub_ind, 2:3].values) * np.ones((1, 5))
+ elif ctype.find('ON') >= 0:
+ si_std = np.mean(exp_std.iloc[max_nsub_ind, 3:4].values) * np.ones((1, 5))
+ else:
+ si_std = np.NaN * np.ones((1, 5))
+
+ else:
+ f0_std = exp_std.iloc[scn, 5:10].values
+ f1_std = exp_std.iloc[scn, 10:].values
+ spont_std = exp_std.iloc[scn, 4:5].values
+ if ctype.find('OFF') >= 0:
+ si_std = exp_std.iloc[scn, 2:3].values
+ elif ctype.find('ON') >= 0:
+ si_std = exp_std.iloc[scn, 3:4].values
+ else:
+ si_std = np.NaN * np.ones((1, 5))
+
+ if ctype.find('t') >= 0:
+ tcross = 40.
+ si_inf_exp = (si_exp - tcross / 200.) * (200. / (200. - tcross - 40.))
+ elif ctype.find('s') >= 0:
+ tcross = 60.
+ si_inf_exp = (si_exp - tcross / 200.) * (200. / (200. - tcross - 40.))
+
+ dict_key = exp_means.iloc[scn].name[3:]
+ exp_prs_dict[dict_key] = {}
+ exp_prs_dict[dict_key]['f0_exp'] = f0_exp
+ exp_prs_dict[dict_key]['f1_exp'] = f1_exp
+ exp_prs_dict[dict_key]['spont_exp'] = spont_exp
+ exp_prs_dict[dict_key]['si_exp'] = si_exp
+ exp_prs_dict[dict_key]['si_inf_exp'] = si_inf_exp
+ exp_prs_dict[dict_key]['ttp_exp'] = ttp_exp
+ exp_prs_dict[dict_key]['f0_std'] = f0_std
+ exp_prs_dict[dict_key]['f1_std'] = f1_std
+ exp_prs_dict[dict_key]['spont_std'] = spont_std
+ exp_prs_dict[dict_key]['si_std'] = si_std
+ exp_prs_dict[dict_key]['nsub'] = nsub
+ exp_prs_dict[dict_key]['N_class'] = N_class
+
+ return exp_prs_dict
+
+
+##################################################
+def check_optim_results_against_bounds(bounds, opt_wts, opt_kpeaks):
+ bds_wts0 = bounds[0]
+ bds_wts1 = bounds[1]
+ bds_kp0 = bounds[2]
+ bds_kp1 = bounds[3]
+
+ opt_wts0 = opt_wts[0]
+ opt_wts1 = opt_wts[1]
+ opt_kp0 = opt_kpeaks[0]
+ opt_kp1 = opt_kpeaks[1]
+
+ if (opt_wts0 == bds_wts0[0] or opt_wts0 == bds_wts0[1]):
+ prm_on_bds = 'w0'
+ elif (opt_wts1 == bds_wts1[0] or opt_wts1 == bds_wts1[1]):
+ prm_on_bds = 'w1'
+ elif (opt_kp0 == bds_kp0[0] or opt_kp0 == bds_kp0[1]):
+ prm_on_bds = 'kp0'
+ elif (opt_kp1 == bds_kp1[0] or opt_kp1 == bds_kp1[1]):
+ prm_on_bds = 'kp1'
+ else:
+ prm_on_bds = 'None'
+
+ return prm_on_bds
+
+
+#######################################################
+def get_tcross_from_temporal_kernel(temporal_kernel):
+ max_ind = np.argmax(temporal_kernel)
+ min_ind = np.argmin(temporal_kernel)
+
+ temp_tcross_ind = mlab.cross_from_above(temporal_kernel[max_ind:min_ind], 0.0)
+ tcross_ind = max_ind + temp_tcross_ind[0]
+ return tcross_ind
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/utilities.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/utilities.py
new file mode 100644
index 0000000..69e61d9
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/lgnmodel/utilities.py
@@ -0,0 +1,123 @@
+import array
+import matplotlib.pyplot as plt
+import skimage.transform as transform
+import numpy as np
+import scipy.integrate as spi
+import scipy.optimize as sopt
+import warnings
+import scipy.interpolate as sinterp
+
+def get_vanhateren(filename, src_dir):
+ with open(filename, 'rb') as handle:
+ s = handle.read()
+ arr = array.array('H', s)
+ arr.byteswap()
+ return np.array(arr, dtype='uint16').reshape(1024, 1536)
+
+def convert_tmin_tmax_framerate_to_trange(t_min,t_max,frame_rate):
+ duration = t_max-t_min
+ number_of_frames = duration*frame_rate # Assumes t_min/t_max in same time units as frame_rate
+ dt= 1./frame_rate
+ return t_min+np.arange(number_of_frames+1)*dt
+
+def get_rotation_matrix(rotation, shape):
+ '''Angle in degrees'''
+
+ shift_y, shift_x = np.array(shape) / 2.
+ tf_rotate = transform.SimilarityTransform(rotation=np.deg2rad(rotation))
+ tf_shift = transform.SimilarityTransform(translation=[-shift_x, -shift_y])
+ tf_shift_inv = transform.SimilarityTransform(translation=[shift_x, shift_y])
+ return (tf_shift + (tf_rotate + tf_shift_inv))
+
+def get_translation_matrix(translation):
+ shift_x, shift_y = translation
+ tf_shift = transform.SimilarityTransform(translation=[-shift_x, shift_y])
+ return tf_shift
+
+
+def get_scale_matrix(scale, shape):
+ shift_y, shift_x = np.array(shape) / 2.
+ tf_rotate = transform.SimilarityTransform(scale=(1./scale[0], 1./scale[1]))
+ tf_shift = transform.SimilarityTransform(translation=[-shift_x, -shift_y])
+ tf_shift_inv = transform.SimilarityTransform(translation=[shift_x, shift_y])
+ return tf_shift + (tf_rotate + tf_shift_inv)
+
+def apply_transformation_matrix(image, matrix):
+ return transform.warp(image, matrix)
+
+
+def get_convolution_ind(curr_fi, flipped_t_inds, kernel, data):
+
+ flipped_and_offset_t_inds = flipped_t_inds + curr_fi
+
+ if np.all( flipped_and_offset_t_inds >= 0):
+
+ # No negative entries; still might be over the end though:
+ try:
+ return np.dot(data[flipped_and_offset_t_inds], kernel)
+
+ except IndexError:
+
+ # Requested some indices out of range of data:
+ indices_within_range = np.where(flipped_and_offset_t_inds < len(data))
+ valid_t_inds = flipped_and_offset_t_inds[indices_within_range]
+ valid_kernel = kernel[indices_within_range]
+ return np.dot(data[valid_t_inds], valid_kernel)
+
+ else:
+
+# # Some negative entries:
+# if np.all( flipped_and_offset_t_inds < 0):
+#
+# # All are negative:
+# return 0
+#
+# else:
+
+ # Only some are negative, so restrict:
+ indices_within_range = np.where(flipped_and_offset_t_inds >= 0)
+ valid_t_inds = flipped_and_offset_t_inds[indices_within_range]
+ valid_kernel = kernel[indices_within_range]
+
+ return np.dot(data[valid_t_inds], valid_kernel)
+
+def get_convolution(t, frame_rate, flipped_t_inds, kernel, data):
+
+ # Get frame indices:
+ fi = frame_rate*float(t)
+ fim = int(np.floor(fi))
+ fiM = int(np.ceil(fi))
+
+ if fim != fiM:
+
+ # Linear interpolation:
+ sm = get_convolution_ind(fim, flipped_t_inds, kernel, data)
+ sM = get_convolution_ind(fiM, flipped_t_inds, kernel, data)
+ return sm*(1-(fi-fim)) + sM*(fi-fim)
+
+ else:
+
+ # Requested time is exactly one piece of data:
+ return get_convolution_ind(fim, flipped_t_inds, kernel, data)
+
+if __name__ == "__main__":
+ pass
+# print generate_poisson([0,1,2,3],[.5,1,2,3])
+
+
+
+# test_generate_poisson_function()
+
+# image = np.zeros((101,151))
+# image[48:52+1]=1
+#
+# mr = get_rotation_matrix(30, image.shape)
+# mt = get_translation_matrix((20,0))
+# ms = get_scale_matrix((.5,1),image.shape)
+#
+# m = mr
+#
+# fig, ax = plt.subplots(2,1)
+# ax[0].imshow(image)
+# ax[1].imshow(apply_transformation_matrix(image, m))
+# plt.show()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/modules/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/modules/__init__.py
new file mode 100644
index 0000000..13185dd
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/modules/__init__.py
@@ -0,0 +1,2 @@
+from .record_rates import RecordRates
+from .create_spikes import SpikesGenerator
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/modules/base.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/modules/base.py
new file mode 100644
index 0000000..1bf7865
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/modules/base.py
@@ -0,0 +1,9 @@
+class SimModule(object):
+ def initialize(self, sim):
+ pass
+
+ def save(self, sim, **kwargs):
+ pass
+
+ def finalize(self, sim):
+ pass
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/modules/create_spikes.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/modules/create_spikes.py
new file mode 100644
index 0000000..d2acf96
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/modules/create_spikes.py
@@ -0,0 +1,99 @@
+import os
+import numpy as np
+import random
+import six
+
+from .base import SimModule
+from bmtk.utils.io.spike_trains import SpikeTrainWriter
+from bmtk.simulator.filternet.lgnmodel import poissongeneration as pg
+
+
+class SpikesGenerator(SimModule):
+ def __init__(self, spikes_file_csv=None, spikes_file=None, spikes_file_nwb=None, tmp_dir='output'):
+ def _get_file_path(file_name):
+ if file_name is None or os.path.isabs(file_name):
+ return file_name
+
+ return os.path.join(tmp_dir, file_name)
+
+ self._csv_fname = _get_file_path(spikes_file_csv)
+ self._save_csv = spikes_file_csv is not None
+
+ self._h5_fname = _get_file_path(spikes_file)
+ self._save_h5 = spikes_file is not None
+
+ self._nwb_fname = _get_file_path(spikes_file_nwb)
+ self._save_nwb = spikes_file_nwb is not None
+
+ self._tmpdir = tmp_dir
+
+ self._spike_writer = SpikeTrainWriter(tmp_dir=tmp_dir)
+
+ def save(self, sim, gid, times, rates):
+ try:
+ spike_trains = np.array(f_rate_to_spike_train(times*1000.0, rates, np.random.randint(10000),
+ 1000.*min(times), 1000.*max(times), 0.1))
+ except:
+ # convert to milliseconds and hence the multiplication by 1000
+ spike_trains = 1000.0*np.array(pg.generate_inhomogenous_poisson(times, rates,
+ seed=np.random.randint(10000)))
+
+ self._spike_writer.add_spikes(times=spike_trains, gid=gid)
+
+ def finalize(self, sim):
+ self._spike_writer.flush()
+
+ if self._save_csv:
+ self._spike_writer.to_csv(self._csv_fname)
+
+ if self._save_h5:
+ self._spike_writer.to_hdf5(self._h5_fname)
+
+ if self._save_nwb:
+ self._spike_writer.to_nwb(self._nwb_fname)
+
+ self._spike_writer.close()
+
+
+def f_rate_to_spike_train(t, f_rate, random_seed, t_window_start, t_window_end, p_spike_max):
+ # t and f_rate are lists containing time stamps and corresponding firing rate values;
+ # they are assumed to be of the same length and ordered with the time strictly increasing;
+ # p_spike_max is the maximal probability of spiking that we allow within the time bin; it is used to decide on the size of the time bin; should be less than 1!
+
+ if np.max(f_rate) * np.max(np.diff(t))/1000. > 0.1: #Divide by 1000 to convert to seconds
+ print('Firing rate to high for time interval and will not estimate spike correctly. Spikes will ' \
+ 'be calculated with the slower inhomogenous poisson generating fucntion')
+ raise Exception()
+
+ spike_times = []
+
+ # Use seed(...) to instantiate the random number generator. Otherwise, current system time is used.
+ random.seed(random_seed)
+
+ # Assume here for each pair (t[k], f_rate[k]) that the f_rate[k] value applies to the time interval [t[k], t[k+1]).
+ for k in six.moves.range(0, len(f_rate)-1):
+ t_k = t[k]
+ t_k_1 = t[k+1]
+ if ((t_k >= t_window_start) and (t_k_1 <= t_window_end)):
+ delta_t = t_k_1 - t_k
+ # Average number of spikes expected in this interval (note that firing rate is in Hz and time is in ms).
+ av_N_spikes = f_rate[k] / 1000.0 * delta_t
+
+ if (av_N_spikes > 0):
+ if (av_N_spikes <= p_spike_max):
+ N_bins = 1
+ else:
+ N_bins = int(np.ceil(av_N_spikes / p_spike_max))
+
+ t_base = t[k]
+ t_bin = 1.0 * delta_t / N_bins
+ p_spike_bin = 1.0 * av_N_spikes / N_bins
+ for i_bin in six.moves.range(0, N_bins):
+ rand_tmp = random()
+ if rand_tmp < p_spike_bin:
+ spike_t = t_base + random() * t_bin
+ spike_times.append(spike_t)
+
+ t_base += t_bin
+
+ return spike_times
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/modules/record_rates.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/modules/record_rates.py
new file mode 100644
index 0000000..b2978a3
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/modules/record_rates.py
@@ -0,0 +1,29 @@
+import os
+import csv
+
+from .base import SimModule
+
+
+class RecordRates(SimModule):
+ def __init__(self, csv_file=None, h5_file=None, tmp_dir='output'):
+ csv_file = csv_file if csv_file is None or os.path.isabs(csv_file) else os.path.join(tmp_dir, csv_file)
+ self._save_to_csv = csv_file is not None
+ self._tmp_csv_file = csv_file if self._save_to_csv else os.path.join(tmp_dir, '__tmp_rates.csv')
+
+ self._tmp_csv_fhandle = open(self._tmp_csv_file, 'w')
+ self._tmp_csv_writer = csv.writer(self._tmp_csv_fhandle, delimiter=' ')
+
+ self._save_to_h5 = h5_file is not None
+
+ def save(self, sim, gid, times, rates):
+ for t, r in zip(times, rates):
+ self._tmp_csv_writer.writerow([gid, t, r])
+ self._tmp_csv_fhandle.flush()
+
+ def finalize(self, sim):
+ if self._save_to_h5:
+ raise NotImplementedError
+
+ self._tmp_csv_fhandle.close()
+ if not self._save_to_csv:
+ os.remove(self._tmp_csv_file)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/pyfunction_cache.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/pyfunction_cache.py
new file mode 100644
index 0000000..9ac949a
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/pyfunction_cache.py
@@ -0,0 +1,98 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import types
+from functools import wraps
+
+
+class _PyFunctions(object):
+ """Structure for holding custom user-defined python functions.
+
+ Will store a set of functions created by the user. Should not access this directly but rather user the
+ decorators or setter functions, and use the py_modules class variable to access individual functions. Is divided
+ up into
+ synaptic_weight: functions for calcuating synaptic weight.
+ cell_model: should return NEURON cell hobj.
+ synapse model: should return a NEURON synapse object.
+ """
+ def __init__(self):
+ self.__cell_processors = {}
+
+ def clear(self):
+ self.__cell_processors.clear()
+
+ @property
+ def cell_processors(self):
+ return self.__cell_processors.keys()
+
+ def cell_processor(self, name):
+ return self.__cell_processors[name]
+
+ def add_cell_processor(self, name, func, overwrite=True):
+ if overwrite or name not in self.__cell_processors:
+ self.__cell_processors[name] = func
+
+ def __repr__(self):
+ return self.__cell_processors
+
+
+py_modules = _PyFunctions()
+
+
+def cell_processor(*wargs, **wkwargs):
+ """A decorator for registering NEURON cell loader functions."""
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_cell_processor(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_cell_processor(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def add_cell_processor(func, name=None, overwrite=True):
+ assert(callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_cell_processor(func_name, func, overwrite)
+
+
+def load_py_modules(cell_processors):
+ # py_modules.clear()
+ assert (isinstance(cell_processors, types.ModuleType))
+ for f in [cell_processors.__dict__.get(f) for f in dir(cell_processors)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_cell_processor(f.__name__, f)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/transfer_functions.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/transfer_functions.py
new file mode 100644
index 0000000..6517719
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/transfer_functions.py
@@ -0,0 +1 @@
+from bmtk.simulator.filternet.lgnmodel.transferfunction import *
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/filternet/utils.py b/bmtk-vb/build/lib/bmtk/simulator/filternet/utils.py
new file mode 100644
index 0000000..c01045c
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/filternet/utils.py
@@ -0,0 +1 @@
+from bmtk.simulator.filternet.lgnmodel.util_fns import *
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/Image_Library.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/Image_Library.py
new file mode 100644
index 0000000..506a040
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/Image_Library.py
@@ -0,0 +1,105 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import os
+from PIL import Image
+
+# Image_Batch
+# .data (image_data)
+# .image_dir, .new_size
+
+
+# add seed for random
+# call should return indices into im_list
+class Image_Experiment(object):
+
+ def __init__(self,stuff):
+
+ self.image_dir
+ self.new_size
+ self.sample_indices
+ self.im_list
+ # creating of pandas table, template
+
+
+
+
+
+class Image_Library (object):
+ def __init__(self, image_dir,new_size=(128,192)): # NOTE: change this so that sequential is a class variable, not an argument to the call
+ self.image_dir = image_dir
+ self.new_size = new_size
+
+ im_list = os.listdir(image_dir)
+
+ remove_list = []
+ for im in im_list:
+ if im[-5:]!='.tiff' and im[-5:]!='.JPEG' and im[-4:]!='.jpg':
+ remove_list.append(im)
+
+ for im in remove_list:
+ im_list.remove(im)
+
+ self.im_list = im_list
+
+ self.current_location = 0 # used for sequential samples
+ self.lib_size = len(self.im_list)
+
+ def __call__(self,num_samples, sequential=False):
+
+ image_data = np.zeros([num_samples,self.new_size[0],self.new_size[1],1],dtype=np.float32)
+
+ if sequential:
+ if self.lib_size-self.current_location > num_samples:
+ sample_indices = np.arange(self.current_location,self.current_location + num_samples)
+ self.current_location += num_samples
+ else:
+ sample_indices = np.arange(self.current_location,self.lib_size)
+ self.current_location = 0
+ else:
+ sample_indices = np.random.randint(0,len(self.im_list),num_samples)
+
+ for i,s in enumerate(sample_indices):
+ im = Image.open(os.path.join(self.image_dir,self.im_list[s]))
+ im = im.convert('L')
+ im = im.resize((self.new_size[1],self.new_size[0]))
+ image_data[i,:,:,0] = np.array(im,dtype=np.float32)
+
+ return image_data
+
+ def create_experiment(self):
+
+ data = self()
+ return Image_Experiment(stuff)
+
+ def experiment_from_table(self,table):
+ pass
+
+ def to_h5(self,sample_indices=None):
+ pass
+
+ def template(self):
+ pass
+
+ def table(self,*params):
+ pass
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/Image_Library_Supervised.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/Image_Library_Supervised.py
new file mode 100644
index 0000000..756b62b
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/Image_Library_Supervised.py
@@ -0,0 +1,93 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from PIL import Image
+import numpy as np
+import os
+
+class Image_Library_Supervised (object):
+
+ def __init__(self,image_dir,new_size=(256,256)):
+
+ self.categories = os.listdir(image_dir)
+
+ self.num_categories = len(self.categories) #len(image_dir_list)
+ self.image_dir_list = [os.path.join(image_dir,x) for x in self.categories]
+ self.new_size = new_size
+
+
+ # self.categories = []
+ # for d in self.image_dir_list:
+ # self.categories += [os.path.basename(d)]
+
+ self.im_lists = {}
+ for i,cat in enumerate(self.categories):
+ d = self.image_dir_list[i]
+ if os.path.basename(d[0])=='.': continue
+ self.im_lists[cat] = os.listdir(d)
+
+ for cat in self.im_lists:
+ remove_list = []
+ for im in self.im_lists[cat]:
+ if im[-4:]!='.jpg':
+ remove_list.append(im)
+
+ for im in remove_list:
+ self.im_lists[cat].remove(im)
+
+
+ self.current_location = np.zeros(len(self.categories)) # used for sequential samples
+ self.lib_size = [len(self.im_lists[x]) for x in self.categories]
+ #self.lib_size = len(self.im_list)
+
+ def __call__(self,num_samples,sequential=False):
+
+ image_data = np.zeros([self.num_categories*num_samples,self.new_size[0],self.new_size[1],1],dtype=np.float32)
+
+ # y_vals = np.tile(np.arange(self.num_categories),(num_samples,1)).T.flatten()
+ # y_vals = y_vals.astype(np.float32)
+
+ y_vals = np.zeros([num_samples*self.num_categories,self.num_categories],np.float32)
+
+ for i,cat in enumerate(self.categories):
+
+ y_vals[num_samples*i:num_samples*i+num_samples].T[i] = 1
+
+ if sequential:
+ if self.lib_size[i]-self.current_location[i] > num_samples:
+ sample_indices = np.arange(self.current_location[i],self.current_location[i] + num_samples,dtype=np.int64)
+ self.current_location[i] += num_samples
+ else:
+ sample_indices = np.arange(self.current_location[i],self.lib_size[i],dtype=np.int64)
+ self.current_location[i] = 0
+ else:
+ sample_indices = np.random.randint(0,len(self.im_lists[cat]),num_samples)
+
+ for j,s in enumerate(sample_indices):
+ im = Image.open(os.path.join(self.image_dir_list[i],self.im_lists[cat][s]))
+ im = im.convert('L')
+ im = im.resize((self.new_size[1],self.new_size[0]))
+ index = j + num_samples*i
+ image_data[index,:,:,0] = np.array(im,dtype=np.float32)
+
+ return y_vals, image_data
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/__init__.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/analysis/LocallySparseNoise.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/analysis/LocallySparseNoise.py
new file mode 100644
index 0000000..60b9228
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/analysis/LocallySparseNoise.py
@@ -0,0 +1,105 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import pandas as pd
+import h5py
+
+
+class LocallySparseNoise (object):
+
+ def __init__(self,data_file_name):
+
+ self.stim_table = pd.read_hdf(data_file_name,'stim_table')
+ self.node_table = pd.read_hdf(data_file_name,'node_table')
+
+
+ self.data_file_name = data_file_name
+
+ data = h5py.File(self.data_file_name,'r')
+
+ self.data_sets = data.keys()
+ self.data_sets.remove('stim_table')
+ self.data_sets.remove('node_table')
+ self.data_sets.remove('stim_template')
+
+ self.stim_template = data['stim_template'].value
+
+ data.close()
+
+ @staticmethod
+ def rf(response, stim_template, stim_shape):
+ T = stim_template.shape[0]
+ rf_shape = tuple(stim_template.shape[1:])
+
+ unit_shape = tuple(response.shape[1:])
+
+ response.resize([T,np.prod(unit_shape)])
+
+ rf = np.dot(response.T,stim_template)
+
+ rf_new_shape = tuple([rf.shape[0]] + list(rf_shape))
+ rf.resize(rf_new_shape)
+ rf_final_shape = tuple(list(unit_shape) + list(stim_shape))
+ rf.resize(rf_final_shape)
+
+ return rf
+
+ def compute_receptive_fields(self, dtype=np.float32):
+
+ output = h5py.File(self.data_file_name[:-3]+'_analysis.ic','a')
+ data = h5py.File(self.data_file_name,'r')
+
+ # convert to +/-1 or 0
+ stim_template = data['stim_template'].value.astype(dtype)
+ stim_template = stim_template-127
+ stim_template = np.sign(stim_template)
+ #print np.unique(stim_template)
+
+ stim_shape = tuple(stim_template.shape[1:])
+ T = stim_template.shape[0]
+
+ stim_template.resize([T,np.prod(stim_shape)])
+
+ stim_template_on = stim_template.copy()
+ stim_template_off = stim_template.copy()
+
+ stim_template_on[stim_template_on<0] = 0.0
+ stim_template_off[stim_template_off>0] = 0.0
+
+ for data_set in self.data_sets:
+
+ response = data[data_set].value
+ response = response - np.mean(response,axis=0)
+
+ key_onoff = data_set+'/lsn/on_off'
+ key_on = data_set+'/lsn/on'
+ key_off = data_set+'/lsn/off'
+ for key in [key_onoff, key_on, key_off]:
+ if key in output:
+ del output[key]
+
+ output[key_onoff] = self.rf(response, stim_template, stim_shape)
+ output[key_on] = self.rf(response, stim_template_on, stim_shape)
+ output[key_off] = self.rf(response, stim_template_off, stim_shape)
+
+ data.close()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/analysis/StaticGratings.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/analysis/StaticGratings.py
new file mode 100644
index 0000000..10a019b
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/analysis/StaticGratings.py
@@ -0,0 +1,101 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import pandas as pd
+import h5py
+import sys
+import os
+
+class StaticGratings (object):
+
+ def __init__(self,data_file_name):
+
+ self.stim_table = pd.read_hdf(data_file_name,'stim_table')
+ self.node_table = pd.read_hdf(data_file_name,'node_table')
+ self.tunings_file = None
+
+ f = lambda label: self.stim_table.dropna().drop_duplicates([label])[label].sort_values(inplace=False).values
+
+ self.orientations = f('orientation')
+ self.spatial_frequencies = f('spatial_frequency')
+ self.phases = f('phase')
+
+ self.data_file_name = data_file_name
+
+ data = h5py.File(self.data_file_name,'r')
+
+ self.data_sets = data.keys()
+ self.data_sets.remove('stim_table')
+ self.data_sets.remove('node_table')
+ self.data_sets.remove('stim_template')
+
+ data.close()
+
+ def tuning_matrix(self, response, dtype=np.float32):
+
+ tuning_shape = tuple([len(self.orientations), len(self.spatial_frequencies), len(self.phases)] + list(response.shape[1:]))
+
+ tuning_matrix = np.empty(tuning_shape, dtype=dtype)
+
+ for i,ori in enumerate(self.orientations):
+ for j,sf in enumerate(self.spatial_frequencies):
+ for k,ph in enumerate(self.phases):
+
+ index = self.stim_table[(self.stim_table.spatial_frequency==sf) & (self.stim_table.orientation==ori) & (self.stim_table.phase==ph)].index
+
+ tuning_matrix[i,j,k] = np.mean(response[index],axis=0)
+
+ return tuning_matrix
+
+ def compute_all_tuning(self, dtype=np.float32, force=False):
+ self.tunings_file = self.data_file_name[:-3]+'_analysis.ic'
+ if os.path.exists(self.tunings_file) and not force:
+ print('Using existing tunings file {}.'.format(self.tunings_file))
+ return
+
+ output = h5py.File(self.tunings_file,'a')
+ data = h5py.File(self.data_file_name,'r')
+
+ for i, data_set in enumerate(self.data_sets):
+ sys.stdout.write( '\r{0:.02f}'.format(float(i)*100/len(self.data_sets))+'% done')
+ sys.stdout.flush()
+
+ response = data[data_set].value
+
+ tuning = self.tuning_matrix(response, dtype=dtype)
+
+ key = data_set+'/sg/tuning'
+ if key in output:
+ del output[key]
+ output[key] = tuning
+
+ sys.stdout.write( '\r{0:.02f}'.format(float(100))+'% done')
+ sys.stdout.flush()
+
+ data.close()
+
+ def get_tunings_file(self):
+ if self.tunings_file is None:
+ self.compute_all_tuning()
+
+ return h5py.File(self.tunings_file, 'r')
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/analysis/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/analysis/__init__.py
new file mode 100644
index 0000000..2d56a26
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/analysis/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/C_Layer.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/C_Layer.py
new file mode 100644
index 0000000..1489c89
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/C_Layer.py
@@ -0,0 +1,260 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import tensorflow as tf
+import os
+import pandas as pd
+
+class C_Layer (object):
+ def __init__(self,node_name,S_Layer_input,bands):
+ '''
+ :type S_Layer: S_Layer object
+ :param S_Layer: instance of S_Layer object that serves as input for this C_Layer
+
+ :type bands: list
+ :param bands: bands[i] = [[list of frequency indices for S_layer over which to pool], grid_size, sample_step]
+ '''
+ self.node_name = node_name
+ self.input = S_Layer_input.input
+
+ self.tf_sess = S_Layer_input.tf_sess
+
+ s_output = S_Layer_input.output
+
+ self.K = S_Layer_input.K
+
+ band_output = {}
+
+ num_bands = len(bands)
+
+ self.band_output = {}
+
+ self.band_shape = {}
+
+ with tf.name_scope(self.node_name):
+ for b in range(num_bands):
+ bands_to_pool, grid_size, sample_step = bands[b]
+
+ sub_band_shape = []
+ for sub_band in bands_to_pool:
+ sub_band_shape += [S_Layer_input.band_shape[sub_band]]
+
+ max_band_shape = sub_band_shape[0]
+ for shape in sub_band_shape[1:]:
+ if shape[0] > max_band_shape[0]: max_band_shape[0] = shape[0]
+ if shape[1] > max_band_shape[1]: max_band_shape[1] = shape[1]
+
+ # print "max_band_shape = ", max_band_shape
+ # for sub_band in bands_to_pool:
+ # print "\tsub_band_shape = ", S_Layer_input.band_shape[sub_band]
+ # print "\tinput band shape = ", s_output[sub_band].get_shape()
+
+ #resize all inputs to highest resolution so that we can maxpool over equivalent scales
+ resize_ops = []
+ for sub_band in bands_to_pool:
+ op = s_output[sub_band]
+ # resize_ops += [tf.image.resize_images(op,max_band_shape[0],max_band_shape[1],method=ResizeMethod.NEAREST_NEIGHBOR)]
+ resize_ops += [tf.image.resize_nearest_neighbor(op,max_band_shape)]
+ #print "\tresize op shape = ", resize_ops[-1].get_shape()
+
+ #take the maximum for each input channel, element-wise
+ max_channel_op = resize_ops[0]
+ for op in resize_ops[1:]:
+ max_channel_op = tf.maximum(op,max_channel_op)
+
+ #print "\tmax channel op shape = ", max_channel_op.get_shape()
+
+ # new shape for mode 'SAME'
+ # new_band_shape = (max_band_shape[0]/sample_step, max_band_shape[1]/sample_step)
+ new_band_shape = np.ceil(np.array(max_band_shape)/float(sample_step)).astype(np.int64)
+
+ # make sure the grid_size and sample_step aren't bigger than the image
+ if max_band_shape[0] < grid_size:
+ y_size = max_band_shape[0]
+ else:
+ y_size = grid_size
+
+ if max_band_shape[1] < grid_size:
+ x_size = max_band_shape[1]
+ else:
+ x_size = grid_size
+
+ if sample_step > max_band_shape[0]:
+ y_step = max_band_shape[0]
+ new_band_shape = (1,new_band_shape[1])
+ else:
+ y_step = sample_step
+ if sample_step > max_band_shape[1]:
+ x_step = max_band_shape[1]
+ new_band_shape = (new_band_shape[0],1)
+ else:
+ x_step = sample_step
+
+ # max pool
+ max_pool_op = tf.nn.max_pool(max_channel_op,[1,y_size,x_size,1],strides=[1,y_step,x_step,1],padding='SAME')
+
+ self.band_shape[b] = new_band_shape
+ #print "max_band_shape: ", max_band_shape
+
+ self.band_output[b]=max_pool_op
+
+ self.num_units = 0
+ for b in self.band_shape:
+ self.num_units += np.prod(self.band_shape[b])*self.K
+
+ self.output = self.band_output
+
+ def __repr__(self):
+ return "C_Layer"
+
+ def compute_output(self,X,band):
+ return self.tf_sess.run(self.output[band],feed_dict={self.input:X})
+
+ # def get_compute_ops(self):
+ #
+ # node_table = pd.DataFrame(columns=['node','band'])
+ # compute_list = []
+ #
+ # for band in self.band_output:
+ # node_table = node_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+ #
+ # compute_list.append(self.output[band])
+ #
+ # return node_table, compute_list
+
+ def get_compute_ops(self,unit_table=None):
+
+ compute_list = []
+
+ if unit_table is not None:
+
+ for i, row in unit_table.iterrows():
+
+ if 'y' in unit_table:
+ node, band, y, x = row['node'], int(row['band']), int(row['y']), int(row['x'])
+ compute_list.append(self.output[band][:,y,x,:])
+
+ elif 'band' in unit_table:
+ node, band = row['node'], int(row['band'])
+ compute_list.append(self.output[band])
+
+ else:
+ return self.get_all_compute_ops()
+
+ else:
+ return self.get_all_compute_ops()
+
+ return unit_table, compute_list
+
+ def get_all_compute_ops(self):
+
+ compute_list = []
+ unit_table = pd.DataFrame(columns=['node','band'])
+ for band in self.band_output:
+ unit_table = unit_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+
+ compute_list.append(self.output[band])
+
+ return unit_table, compute_list
+
+
+def test_C1_Layer():
+
+ from S1_Layer import S1_Layer
+ import matplotlib.pyplot as plt
+
+ fig_dir = 'Figures'
+ # First we need an S1 Layer
+ # these parameters are taken from Serre, et al PNAS for HMAX
+ freq_channel_params = [ [7,2.8,3.5],
+ [9,3.6,4.6],
+ [11,4.5,5.6],
+ [13,5.4,6.8],
+ [15,6.3,7.9],
+ [17,7.3,9.1],
+ [19,8.2,10.3],
+ [21,9.2,11.5],
+ [23,10.2,12.7],
+ [25,11.3,14.1],
+ [27,12.3,15.4],
+ [29,13.4,16.8],
+ [31,14.6,18.2],
+ [33,15.8,19.7],
+ [35,17.0,21.2],
+ [37,18.2,22.8],
+ [39,19.5,24.4]]
+
+ orientations = np.arange(4)*np.pi/4
+
+ input_shape = (128,192)
+ s1 = S1_Layer(input_shape,freq_channel_params,orientations)
+
+ # Now we need to define a C1 Layer
+ bands = [ [[0,1], 8, 3],
+ [[2,3], 10, 5],
+ [[4,5], 12, 7],
+ [[6,7], 14, 8],
+ [[8,9], 16, 10],
+ [[10,11], 18, 12],
+ [[12,13], 20, 13],
+ [[14,15,16], 22, 15]]
+
+ c1 = C_Layer(s1,bands)
+
+ # Test c1 on an image
+ from isee_engine.mintnet.Image_Library import Image_Library
+
+ image_dir = '/Users/michaelbu/Code/HCOMP/SampleImages'
+
+ im_lib = Image_Library(image_dir)
+
+ image_data = im_lib(1)
+
+ fig, ax = plt.subplots(1)
+ ax.imshow(image_data[0,:,:,0],cmap='gray')
+
+ print(image_data.shape)
+
+ fig, ax = plt.subplots(len(bands),len(orientations)*2)
+ result = {}
+ for b in range(len(bands)):
+ result[b] = c1.compute_output(image_data,b)
+ print(result[b].shape)
+ n, y,x,K = result[b].shape
+
+ for k in range(K):
+ #print result[b][i].shape
+ # y = i/8
+ # x = i%8
+ # ax[y,x].imshow(result[b][0,i],interpolation='nearest',cmap='gray')
+ # ax[y,x].axis('off')
+
+ ax[b,k].imshow(result[b][0,:,:,k],interpolation='nearest',cmap='gray')
+ ax[b,k].axis('off')
+
+ fig.savefig(os.path.join(fig_dir,'c1_layer.tiff'))
+ plt.show()
+
+if __name__=='__main__':
+
+ test_C1_Layer()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/Readout_Layer.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/Readout_Layer.py
new file mode 100644
index 0000000..9126ea1
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/Readout_Layer.py
@@ -0,0 +1,243 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import tensorflow as tf
+from bmtk.simulator.mintnet.Image_Library_Supervised import Image_Library_Supervised
+import h5py
+
+class Readout_Layer (object):
+
+ def __init__(self,node_name,input_layer,K,lam,alt_image_dir='',file_name=None):
+
+ self.node_name = node_name
+ self.K = K
+ self.input_layer = input_layer
+ self.weight_file = file_name
+ self.lam = lam
+
+ self.alt_image_dir = alt_image_dir
+
+ if file_name==None:
+ new_weights=True
+ self.train_state = False
+ else:
+
+ weight_h5 = h5py.File(self.weight_file,'a')
+ file_open=True
+
+ if self.node_name in weight_h5.keys():
+
+ new_weights = False
+ weight_data = weight_h5[self.node_name]['weights'].value
+ self.train_state = weight_h5[self.node_name]['train_state'].value
+
+ else:
+
+ new_weights = True
+ self.train_state =False
+ weight_h5.create_group(self.node_name)
+ weight_h5[self.node_name]['train_state']=self.train_state
+
+ self.input = self.input_layer.input
+ #self.tf_sess = self.input_layer.tf_sess
+ self.tf_sess = tf.Session()
+
+ self.w_shape = (self.input_layer.K,self.K)
+
+ if new_weights:
+ #weights=1.0*np.ones(self.w_shape).astype(np.float32)
+ weights=100000*np.random.normal(size=self.w_shape).astype(np.float32)
+ if file_name!=None:
+ weight_h5[self.node_name].create_dataset('weights',shape=weights.shape,dtype=np.float32,compression='gzip',compression_opts=9)
+ weight_h5[self.node_name]['weights'][...]=weights
+ else:
+ weights=weight_data
+
+ self.weights = tf.Variable(weights.astype(np.float32),trainable=True,name='weights')
+ self.weights.initializer.run(session=self.tf_sess)
+ self.bias = tf.Variable(np.zeros(self.K,dtype=np.float32),trainable=True,name='bias')
+ self.bias.initializer.run(session=self.tf_sess)
+
+ # sigmoid doesn't seem to work well, and is slow
+ #self.output = tf.sigmoid(tf.matmul(self.input_layer.output,W)+self.bias)
+
+ self.input_placeholder = tf.placeholder(tf.float32,shape=(None,self.input_layer.K))
+ #self.output = tf.nn.softmax(tf.matmul(self.input_placeholder,self.weights) + self.bias)
+ self.linear = tf.matmul(self.input_placeholder,self.weights) #+ self.bias
+
+ self.output = tf.sign(self.linear)
+ #self.output = tf.nn.softmax(self.linear)
+ #self.output = tf.nn.softmax(tf.matmul(self.input_layer.output,self.weights) + self.bias)
+
+ self.y = tf.placeholder(tf.float32,shape=(None,self.K))
+
+
+ #self.cost = -tf.reduce_mean(self.y*tf.log(self.output))
+ self.cost = tf.reduce_mean((self.y - self.output)**2) + self.lam*(tf.reduce_sum(self.weights))**2
+
+ # not gonna do much with current cost function :)
+ self.train_step = tf.train.GradientDescentOptimizer(0.1).minimize(self.cost)
+
+ self.num_units = self.K
+
+ if file_open:
+ weight_h5.close()
+
+ def compute_output(self,X):
+
+ #return self.tf_sess.run(self.output,feed_dict={self.input:X})
+
+ rep = self.input_layer.tf_sess.run(self.input_layer.output,feed_dict={self.input:X})
+
+ return self.tf_sess.run(self.output,feed_dict={self.input_placeholder:rep})
+
+ def predict(self,X):
+
+ y_vals = self.compute_output(X)
+
+ return np.argmax(y_vals,axis=1)
+
+ def train(self,image_dir,batch_size=10,image_shape=(256,256),max_iter=200):
+
+ print("Training")
+
+ im_lib = Image_Library_Supervised(image_dir,new_size=image_shape)
+
+ # let's use the linear regression version for now
+ training_lib_size = 225
+ y_vals, image_data = im_lib(training_lib_size,sequential=True)
+
+ y_vals = y_vals.T[0].T
+ y_vals = 2*y_vals - 1.0
+
+ print(y_vals)
+ # print y_vals
+ # print image_data.shape
+
+ # import matplotlib.pyplot as plt
+ # plt.imshow(image_data[0,:,:,0])
+ # plt.figure()
+ # plt.imshow(image_data[1,:,:,0])
+ # plt.figure()
+ # plt.imshow(image_data[9,:,:,0])
+
+ # plt.show()
+
+ num_batches = int(np.ceil(2*training_lib_size/float(batch_size)))
+ rep_list = []
+ for i in range(num_batches):
+ print(i)
+ # if i==num_batches-1:
+ # rep = self.input_layer.tf_sess.run(self.input_layer.output,feed_dict={self.input:image_data[i*batch_size:i*batch_size + training_lib_size%batch_size]})
+ # else:
+ rep = self.input_layer.tf_sess.run(self.input_layer.output,feed_dict={self.input:image_data[i*batch_size:(i+1)*batch_size]})
+ rep_list += [rep]
+
+ rep = np.vstack(rep_list)
+
+
+ C = np.dot(rep.T,rep) + self.lam*np.eye(self.input_layer.K)
+ W = np.dot(np.linalg.inv(C),np.dot(rep.T,y_vals)).astype(np.float32)
+
+ self.tf_sess.run(self.weights.assign(tf.expand_dims(W,1)))
+
+ train_result = self.tf_sess.run(self.output,feed_dict={self.input_placeholder:rep})
+
+ print(W)
+ print(train_result.flatten())
+ print(y_vals.flatten())
+ #print (train_result.flatten() - y_vals.flatten())
+ print("train error = ", np.mean((train_result.flatten() != y_vals.flatten())))
+
+ from scipy.stats import norm
+ target_mask = y_vals==1
+ dist_mask = np.logical_not(target_mask)
+ hit_rate = np.mean(train_result.flatten()[target_mask] == y_vals.flatten()[target_mask])
+ false_alarm = np.mean(train_result.flatten()[dist_mask] != y_vals.flatten()[dist_mask])
+ dprime = norm.ppf(hit_rate) - norm.ppf(false_alarm)
+ print("dprime = ", dprime)
+
+ # Test error
+ im_lib = Image_Library_Supervised('/Users/michaelbu/Data/SerreOlivaPoggioPNAS07/Train_Test_Set/Test',new_size=image_shape)
+
+ testing_lib_size = 300
+ y_vals_test, image_data_test = im_lib(testing_lib_size,sequential=True)
+
+ y_vals_test = y_vals_test.T[0].T
+ y_vals_test = 2*y_vals_test - 1.0
+
+ num_batches = int(np.ceil(2*testing_lib_size/float(batch_size)))
+ rep_list = []
+ for i in range(num_batches):
+ print(i)
+ # if i==num_batches-1:
+ # rep = self.input_layer.tf_sess.run(self.input_layer.output,feed_dict={self.input:image_data[i*batch_size:i*batch_size + training_lib_size%batch_size]})
+ # else:
+ rep = self.input_layer.tf_sess.run(self.input_layer.output,feed_dict={self.input:image_data_test[i*batch_size:(i+1)*batch_size]})
+ rep_list += [rep]
+
+ rep_test = np.vstack(rep_list)
+
+ test_result = self.tf_sess.run(self.output,feed_dict={self.input_placeholder:rep_test})
+
+ #print test_result
+ print("test error = ", np.mean((test_result.flatten() != y_vals_test.flatten())))
+ target_mask = y_vals_test==1
+ dist_mask = np.logical_not(target_mask)
+ hit_rate = np.mean(test_result.flatten()[target_mask] == y_vals_test.flatten()[target_mask])
+ false_alarm = np.mean(test_result.flatten()[dist_mask] != y_vals_test.flatten()[dist_mask])
+ dprime = norm.ppf(hit_rate) - norm.ppf(false_alarm)
+ print("dprime = ", dprime)
+
+ print(rep_test.shape)
+
+
+ # logistic regression unit
+ # import time
+ # for n in range(max_iter):
+ # start = time.time()
+ # print "\tIteration ", n
+
+ # y_vals, image_data = im_lib(batch_size,sequential=True)
+
+ # print "\tComputing representation"
+ # rep = self.input_layer.tf_sess.run(self.input_layer.output,feed_dict={self.input:image_data})
+
+ # print "\tGradient descent step"
+ # #print "rep shape = ", rep.shape
+ # self.tf_sess.run(self.train_step,feed_dict={self.input_placeholder:rep,self.y:y_vals})
+
+
+ # #self.tf_sess.run(self.train_step,feed_dict={self.input:image_data,self.y:y_vals})
+
+ # #print "\t\ttraining batch cost = ", self.tf_sess.run(self.cost,feed_dict={self.input:image_data,self.y:y_vals})
+
+ # print "\t\tTraining error = ", np.mean(np.abs(np.argmax(y_vals,axis=1) - self.predict(image_data)))
+ # print y_vals
+ # print
+ # print self.predict(image_data)
+ # print "\t\ttraining batch cost = ", self.tf_sess.run(self.cost,feed_dict={self.input_placeholder:rep,self.y:y_vals})
+ # print "\t\ttraining linear model = ", self.tf_sess.run(self.linear,feed_dict={self.input_placeholder:rep,self.y:y_vals})
+
+ # print "\t\ttotal time = ", time.time() - start
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/S1_Layer.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/S1_Layer.py
new file mode 100644
index 0000000..44bed67
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/S1_Layer.py
@@ -0,0 +1,273 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import tensorflow as tf
+import os
+import pandas as pd
+
+def gabor(X,Y,lamb,sigma,theta,gamma,phase):
+
+ X_hat = X*np.cos(theta) + Y*np.sin(theta)
+ Y_hat = -X*np.sin(theta) + Y*np.cos(theta)
+
+ arg1 = (0.5/sigma**2)*(X_hat**2 + (gamma**2)*Y_hat**2)
+ arg2 = (2.0*np.pi/lamb)*X_hat
+
+ return np.exp(-arg1)*np.cos(arg2 + phase)
+
+class S1_Layer (object):
+ def __init__(self,node_name,input_shape,freq_channel_params,orientations): #,num_cores=8):
+ '''
+ freq_channel_params is a dictionary of features for each S1 channel
+ len(freq_channel_params) ==num_bands freq_channel_params[i] = [pixels,sigma,lambda,stride]
+ orientations is a list of angles in radians for each filter
+ '''
+ #self.tf_sess = tf.Session()
+
+ self.node_name = node_name
+# NUM_CORES = num_cores # Choose how many cores to use.
+# NUM_CORES = 1
+# self.tf_sess = tf.Session(config=tf.ConfigProto(inter_op_parallelism_threads=NUM_CORES,
+# intra_op_parallelism_threads=NUM_CORES))
+ self.tf_sess = tf.Session()
+# print "Warning: Using hard-coded number of CPU Cores. This should be changed to auto-configure when TensorFlow has been updated."
+
+ self.input_shape = (None,input_shape[0],input_shape[1],1)
+ self.input = tf.placeholder(tf.float32,shape=self.input_shape,name="input")
+
+ #phases = np.array([0, np.pi/2])
+ phases = np.array([0.0]) # HMAX uses dense tiling in lieu of phases (make this explicit later)
+
+ num_bands = len(freq_channel_params)
+ num_orientations = len(orientations)
+ num_phases = len(phases)
+ self.K = num_orientations*num_phases #number of features per band
+
+ #n_output = num_frequency_channels*num_orientations*num_phases
+
+ n_input = 1
+
+ self.band_filters = {}
+ self.filter_params = {}
+ self.band_output = {}
+ self.output = self.band_output
+ self.band_shape = {}
+
+ with tf.name_scope(self.node_name):
+ for band in range(num_bands):
+ pixels, sigma, lamb, stride = freq_channel_params[band]
+ self.band_shape[band] = input_shape
+
+ w_shape = np.array([pixels,pixels,n_input,self.K])
+
+ W = np.zeros(w_shape,dtype=np.float32)
+
+ #compute w values from parameters
+ gamma = 0.3 # value taken from Serre et al giant HMAX manuscript from 2005
+ X,Y = np.meshgrid(np.arange(pixels),np.arange(pixels))
+ X = X - pixels/2
+ Y = Y - pixels/2
+
+ #self.filter_params[band] = freq_channel_params[band]
+ self.filter_params[band] = {'pixels':pixels,'sigma':sigma,'lambda':lamb, 'stride':stride} #should I add orientations and phases to this?
+
+ for i in range(self.K):
+
+ ori_i = i%num_orientations
+ phase_i = i/num_orientations
+
+ theta = orientations[ori_i]
+ phase = phases[phase_i]
+
+ zero_mask = np.zeros([pixels,pixels],dtype='bool')
+ zero_mask = (X*X + Y*Y > pixels*pixels/4)
+
+ W[:,:,0,i] = gabor(X,Y,lamb,sigma,theta,gamma,phase)
+ W[:,:,0,i][zero_mask] = 0.0
+ W[:,:,0,i] = W[:,:,0,i]/np.sqrt(np.sum(W[:,:,0,i]**2))
+
+ W = tf.Variable(W,trainable=False,name='W_'+str(band))
+ W.initializer.run(session=self.tf_sess)
+
+ self.band_filters[band] = W
+
+ input_norm = tf.reshape(tf.reduce_sum(self.input*self.input,[1,2,3]),[-1,1,1,1])
+ normalized_input = tf.div(self.input,tf.sqrt(input_norm))
+ self.band_output[band] = tf.nn.conv2d(normalized_input,W,strides=[1,stride,stride,1],padding='SAME')
+ self.band_shape[band] = tuple([int(x) for x in self.band_output[band].get_shape()[1:3]])
+
+
+ self.num_units = 0
+ for b in self.band_shape:
+ self.num_units += np.prod(self.band_shape[band])*self.K
+
+ def __del__(self):
+ self.tf_sess.close()
+
+ def __repr__(self):
+ return "S1_Layer"
+
+ def compute_output(self,X,band):
+
+ return self.tf_sess.run(self.output[band],feed_dict={self.input:X})
+
+ def get_compute_ops(self,unit_table=None):
+
+ compute_list = []
+
+ if unit_table is not None:
+
+ for i, row in unit_table.iterrows():
+
+ if 'y' in unit_table:
+ node, band, y, x = row['node'], int(row['band']), int(row['y']), int(row['x'])
+ compute_list.append(self.output[band][:,y,x,:])
+
+ elif 'band' in unit_table:
+ node, band = row['node'], int(row['band'])
+ compute_list.append(self.output[band])
+
+ else:
+ return self.get_all_compute_ops()
+
+ else:
+ return self.get_all_compute_ops()
+
+ return unit_table, compute_list
+
+ def get_all_compute_ops(self):
+
+ compute_list = []
+ unit_table = pd.DataFrame(columns=['node','band'])
+ for band in self.band_output:
+ unit_table = unit_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+
+ compute_list.append(self.output[band])
+
+ return unit_table, compute_list
+
+def S1_Layer_test():
+
+ import matplotlib.pyplot as plt
+
+ fig_dir = 'Figures'
+
+ # these parameters are taken from Serre, et al PNAS for HMAX
+ freq_channel_params = [ [7,2.8,3.5],
+ [9,3.6,4.6],
+ [11,4.5,5.6],
+ [13,5.4,6.8],
+ [15,6.3,7.9],
+ [17,7.3,9.1],
+ [19,8.2,10.3],
+ [21,9.2,11.5],
+ [23,10.2,12.7],
+ [25,11.3,14.1],
+ [27,12.3,15.4],
+ [29,13.4,16.8],
+ [31,14.6,18.2],
+ [33,15.8,19.7],
+ [35,17.0,21.2],
+ [37,18.2,22.8],
+ [39,19.5,24.4]]
+
+ orientations = np.arange(4)*np.pi/4
+
+ input_shape = (128,192)
+ s1 = S1_Layer(input_shape,freq_channel_params,orientations)
+
+ #plot filters, make sure they are correct
+ fig, ax = plt.subplots(len(orientations),len(freq_channel_params))
+ fig2,ax2 = plt.subplots(len(orientations),len(freq_channel_params))
+ for i,theta in enumerate(orientations):
+ for j,params in enumerate(freq_channel_params):
+
+ #index = j*len(orientations)*2 + i*2
+
+ fil = s1.tf_sess.run(s1.band_filters[j])[:,:,0,i]
+
+ ax[i,j].imshow(fil,interpolation='nearest',cmap='gray')
+ ax[i,j].axis('off')
+
+ fil = s1.tf_sess.run(s1.band_filters[j])[:,:,0,i+4]
+
+ ax2[i,j].imshow(fil,interpolation='nearest',cmap='gray')
+ ax2[i,j].axis('off')
+
+
+ from Image_Library import Image_Library
+
+ image_dir = '/Users/michaelbu/Code/HCOMP/SampleImages'
+
+ im_lib = Image_Library(image_dir)
+
+ image_data = im_lib(1)
+
+ fig, ax = plt.subplots(1)
+ ax.imshow(image_data[0,:,:,0],cmap='gray')
+
+ import timeit
+ #print timeit.timeit('result = s1.compute_output(image_data)','from __main__ import s1',number=10)
+
+ def f():
+ for band in range(len(freq_channel_params)):
+ s1.compute_output(image_data,band)
+
+ number = 10
+ runs = timeit.Timer(f).repeat(repeat=10,number=number)
+ print("Average time (s) for output evaluation for ", number, " runs: ", np.mean(runs)/number, '+/-', np.std(runs)/np.sqrt(number))
+
+
+
+ print("Image shape = ", image_data.shape)
+
+
+ fig_r, ax_r = plt.subplots(len(orientations),len(freq_channel_params))
+ fig_r2,ax_r2 = plt.subplots(len(orientations),len(freq_channel_params))
+
+ for j,params in enumerate(freq_channel_params):
+
+ result = s1.compute_output(image_data,j)
+ print("result shape = ", result.shape)
+
+ for i,theta in enumerate(orientations):
+
+ #fil = np.zeros([39,39])
+ #index = j*len(orientations)*2 + i*2
+ #print s1.params[0]
+
+ ax_r[i,j].imshow(result[0,:,:,i],interpolation='nearest',cmap='gray')
+ ax_r[i,j].axis('off')
+
+ ax_r2[i,j].imshow(result[0,:,:,i+4],interpolation='nearest',cmap='gray')
+ ax_r2[i,j].axis('off')
+
+ fig_r.savefig(os.path.join(fig_dir,'s1_layer_0.tiff'))
+ fig_r2.savefig(os.path.join(fig_dir,'s1_layer_1.tiff'))
+ plt.show()
+
+ #sess.close()
+
+if __name__=='__main__':
+
+ S1_Layer_test()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/S_Layer.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/S_Layer.py
new file mode 100644
index 0000000..df10f08
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/S_Layer.py
@@ -0,0 +1,404 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import tensorflow as tf
+from bmtk.simulator.mintnet.Image_Library import Image_Library
+import os
+import h5py
+import pandas as pd
+
+class S_Layer (object):
+ def __init__(self, node_name, C_Layer_input, grid_size, pool_size, K, file_name=None, randomize=False):
+ self.node_name = node_name
+
+ self.input = C_Layer_input.input
+
+ self.tf_sess = C_Layer_input.tf_sess
+ #self.input_layer = C_Layer_input
+ # c_output should be a dictionary indexed over bands
+
+ c_output = C_Layer_input.output
+ self.C_Layer_input = C_Layer_input
+
+ self.K = K
+ self.input_K = C_Layer_input.K
+ self.grid_size = grid_size
+ self.pool_size = pool_size
+
+ self.band_output = {}
+ #self.band_filters = {}
+ self.band_shape = C_Layer_input.band_shape
+ #print self.band_shape
+
+ file_open = False
+ if file_name==None:
+ self.train_state=False
+ new_weights = True
+ else:
+
+ self.weight_file = file_name
+
+ weight_h5 = h5py.File(self.weight_file, 'a')
+ file_open = True
+
+ if self.node_name in weight_h5.keys():
+
+ new_weights=False
+ weight_data = weight_h5[self.node_name]['weights']
+ self.train_state = weight_h5[self.node_name]['train_state'].value
+
+ else:
+
+ new_weights=True
+ self.train_state = False
+ weight_h5.create_group(self.node_name)
+ #weight_h5[self.node_name].create_group('weights')
+ weight_h5[self.node_name]['train_state']=self.train_state
+
+
+
+ # perform checks to make sure weight_file is consistent with the Layer parameters
+ # check input bands
+ # check grid_size, pool_size, K
+
+ with tf.name_scope(self.node_name):
+ #for band in c_output.keys():
+
+ if new_weights:
+
+ # if self.grid_size >= self.band_shape[band][0]:
+ # size_y = self.band_shape[band][0]
+ # else:
+ # size_y = grid_size
+ # if self.grid_size >= self.band_shape[band][1]:
+ # size_x = self.band_shape[band][1]
+ # else:
+ # size_x = grid_size
+
+ w_shape = np.array([self.grid_size,self.grid_size,self.input_K,self.K])
+
+ self.w_shape = w_shape
+
+ w_bound = np.sqrt(np.prod(w_shape[1:]))
+ if randomize:
+ W = np.random.uniform(low= -1.0/w_bound, high=1.0/w_bound, size=w_shape).astype(np.float32)
+ else:
+ W = np.zeros(w_shape).astype(np.float32)
+
+ if file_name!=None:
+ weight_h5[self.node_name].create_dataset('weights',shape=w_shape,dtype=np.float32)
+
+ else:
+ # Need to check that c_output.keys() has the same set of keys that weight_dict is expecting
+ W = weight_data.value
+ self.w_shape = W.shape
+
+
+
+
+ W = tf.Variable(W,trainable=False,name='W')
+ W.initializer.run(session=self.tf_sess)
+
+ #self.band_filters[band]= W
+ self.weights = W
+
+ for band in c_output.keys():
+ W_slice = W[:self.band_shape[band][0],:self.band_shape[band][1]]
+
+ input_norm = tf.expand_dims(tf.reduce_sum(c_output[band]*c_output[band],[1,2]),1) #,[-1,1,1,self.input_K])
+ input_norm = tf.expand_dims(input_norm,1)
+ normalized_input = tf.div(c_output[band],tf.maximum(tf.sqrt(input_norm),1e-12))
+ self.band_output[band] = tf.nn.conv2d(normalized_input,W_slice,strides=[1,1,1,1],padding='SAME')
+
+ self.output = self.band_output
+
+ self.num_units = 0
+ for b in self.band_shape:
+ self.num_units += np.prod(self.band_shape[b])*self.K
+
+ if file_open:
+ weight_h5.close()
+
+ def __repr__(self):
+ return "S_Layer"
+
+ def compute_output(self,X,band):
+
+ return self.tf_sess.run(self.output[band],feed_dict={self.input:X})
+
+ def find_band_and_coords_for_imprinting_unit(self, imprinting_unit_index):
+
+ cumulative_units = 0
+ for band in self.C_Layer_input.output:
+
+ units_in_next_band = int(np.prod(self.C_Layer_input.output[band].get_shape()[1:3]))
+
+ if imprinting_unit_index < cumulative_units + units_in_next_band:
+ # found the right band!
+ yb, xb = self.C_Layer_input.band_shape[band]
+
+ band_index = imprinting_unit_index - cumulative_units
+
+ y = band_index/xb
+ x = band_index%xb
+ break
+ else:
+ cumulative_units += units_in_next_band
+
+ return band, y, x
+
+
+
+ def get_total_pixels_in_C_Layer_input(self):
+
+ total = 0
+
+ band_shape = self.C_Layer_input.band_shape
+ band_ids = band_shape.keys()
+ band_ids.sort()
+
+ for band in band_ids:
+ total += np.prod(band_shape[band])
+
+ return total
+
+
+ def get_patch_bounding_box_and_shift(self,band,y,x):
+ y_lower = y - self.grid_size/2
+ y_upper = y_lower + self.grid_size
+
+ x_lower = x - self.grid_size/2
+ x_upper = x_lower + self.grid_size
+
+ yb, xb = self.C_Layer_input.band_shape[band]
+
+ # compute shifts in lower bound to deal with overlap with the edges
+ y_shift_lower = np.max([-y_lower,0])
+ x_shift_lower = np.max([-x_lower,0])
+
+
+ y_lower = np.max([y_lower,0])
+ y_upper = np.min([y_upper,yb])
+
+ x_lower = np.max([x_lower,0])
+ x_upper = np.min([x_upper,xb])
+
+ y_shift_upper = y_shift_lower + y_upper - y_lower
+ x_shift_upper = x_shift_lower + x_upper - x_lower
+
+ return y_lower, y_upper, x_lower, x_upper, y_shift_lower, y_shift_upper, x_shift_lower, x_shift_upper
+
+ def train(self,image_dir,batch_size=100,image_shape=(256,256)): #,save_file='weights.pkl'):
+
+ print("Training")
+
+ im_lib = Image_Library(image_dir,new_size=image_shape)
+
+ new_weights = np.zeros(self.w_shape).astype(np.float32)
+
+
+ for k in range(self.K):
+
+ if k%10==0:
+ print("Imprinting feature ", k)
+ # how to handle the randomly picked neuron; rejection sampling?
+ imprinting_unit_index = np.random.randint(self.get_total_pixels_in_C_Layer_input())
+
+ #print "Imprinting unit index ", imprinting_unit_index
+ band, y, x = self.find_band_and_coords_for_imprinting_unit(imprinting_unit_index)
+ #print "Imprinting unit in band ", band, " at ", (y, x)
+
+ im_data = im_lib(1)
+
+ output = self.C_Layer_input.compute_output(im_data,band)
+
+ # grab weights from chosen unit, save them to new_weights
+ y_lower, y_upper, x_lower, x_upper, y_shift_lower, y_shift_upper, x_shift_lower, x_shift_upper = self.get_patch_bounding_box_and_shift(band,y,x)
+
+ w_patch = output[0,y_lower:y_upper,x_lower:x_upper,:].copy()
+
+ #print "(y_lower, y_upper), (x_lower, x_upper) = ", (y_lower, y_upper), (x_lower, x_upper)
+ #print "Patch shape = ", w_patch.shape
+
+ patch_size = np.prod(w_patch.shape)
+ # print "self.w_shape = ", self.w_shape, " patch_size = ", patch_size, " pool_size = ", self.pool_size
+ # print "band, y, x = ", band,y,x
+
+ pool_size = np.min([self.pool_size,patch_size])
+ pool_mask_indices = np.random.choice(np.arange(patch_size), size=pool_size, replace=False)
+ pool_mask = np.zeros(patch_size,dtype=np.bool)
+ pool_mask[pool_mask_indices] = True
+ pool_mask.resize(w_patch.shape)
+ pool_mask = np.logical_not(pool_mask) # we want a mask for the indices to zero out
+
+ w_patch[pool_mask] = 0.0
+
+ # will need to enlarge w_patch if the edges got truncated
+
+ new_weights[y_shift_lower:y_shift_upper,x_shift_lower:x_shift_upper,:,k] = w_patch
+
+
+ # old code starts here
+ # num_batches = self.K/batch_size
+ # if self.K%batch_size!=0:
+ # num_batches = num_batches+1
+
+ self.tf_sess.run(self.weights.assign(new_weights))
+ print()
+ print("Saving weights to file in ", self.weight_file)
+
+ weight_h5 = h5py.File(self.weight_file,'a')
+ #for band in new_weights:
+ weight_h5[self.node_name]['weights'][...] = new_weights
+ weight_h5[self.node_name]['train_state'][...]=True
+
+ weight_h5.close()
+
+ # def get_compute_ops(self):
+ #
+ # node_table = pd.DataFrame(columns=['node','band'])
+ # compute_list = []
+ #
+ # for band in self.band_output:
+ # node_table = node_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+ #
+ # compute_list.append(self.output[band])
+ #
+ # return node_table, compute_list
+
+ def get_compute_ops(self,unit_table=None):
+
+ compute_list = []
+
+ if unit_table is not None:
+
+ for i, row in unit_table.iterrows():
+
+ if 'y' in unit_table:
+ node, band, y, x = row['node'], int(row['band']), int(row['y']), int(row['x'])
+ compute_list.append(self.output[band][:,y,x,:])
+
+ elif 'band' in unit_table:
+ node, band = row['node'], int(row['band'])
+ compute_list.append(self.output[band])
+
+ else:
+ return self.get_all_compute_ops()
+
+ else:
+ return self.get_all_compute_ops()
+
+ return unit_table, compute_list
+
+ def get_all_compute_ops(self):
+
+ compute_list = []
+ unit_table = pd.DataFrame(columns=['node','band'])
+ for band in self.band_output:
+ unit_table = unit_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+
+ compute_list.append(self.output[band])
+
+ return unit_table, compute_list
+
+
+def test_S_Layer_ouput():
+
+ from S1_Layer import S1_Layer
+ import matplotlib.pyplot as plt
+ from C_Layer import C_Layer
+
+ fig_dir = 'Figures'
+ # First we need an S1 Layer
+ # these parameters are taken from Serre, et al PNAS for HMAX
+ freq_channel_params = [ [7,2.8,3.5],
+ [9,3.6,4.6],
+ [11,4.5,5.6],
+ [13,5.4,6.8],
+ [15,6.3,7.9],
+ [17,7.3,9.1],
+ [19,8.2,10.3],
+ [21,9.2,11.5],
+ [23,10.2,12.7],
+ [25,11.3,14.1],
+ [27,12.3,15.4],
+ [29,13.4,16.8],
+ [31,14.6,18.2],
+ [33,15.8,19.7],
+ [35,17.0,21.2],
+ [37,18.2,22.8],
+ [39,19.5,24.4]]
+
+ orientations = np.arange(4)*np.pi/4
+
+ input_shape = (128,192)
+ s1 = S1_Layer(input_shape,freq_channel_params,orientations)
+
+ # Now we need to define a C1 Layer
+ bands = [ [[0,1], 8, 3],
+ [[2,3], 10, 5],
+ [[4,5], 12, 7],
+ [[6,7], 14, 8],
+ [[8,9], 16, 10],
+ [[10,11], 18, 12],
+ [[12,13], 20, 13],
+ [[14,15,16], 22, 15]]
+
+ c1 = C_Layer(s1,bands)
+
+ grid_size = 3
+ pool_size = 10
+ K = 10
+
+ s2 = S_Layer('s2',c1,grid_size,pool_size,K,file_name='S_test_file.h5',randomize=False)
+
+ # Test s2 on an image
+ image_dir = '/Users/michaelbu/Code/HCOMP/SampleImages'
+
+ im_lib = Image_Library(image_dir,new_size=input_shape)
+
+ image_data = im_lib(1)
+
+ fig, ax = plt.subplots(1)
+ ax.imshow(image_data[0,:,:,0],cmap='gray')
+
+ fig,ax = plt.subplots(8,10)
+
+ result = {}
+ for b in range(len(bands)):
+ result[b] = s2.compute_output(image_data,b)
+
+ for k in range(K):
+ ax[b,k].imshow(result[b][0,:,:,k],interpolation='nearest',cmap='gray')
+ ax[b,k].axis('off')
+
+ fig.savefig(os.path.join(fig_dir,'s2_layer.tiff'))
+ plt.show()
+
+ s2.train(image_dir,batch_size=10,image_shape=input_shape) #,save_file='test_weights.pkl')
+
+
+
+
+if __name__=='__main__':
+ test_S_Layer_ouput()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/Sb_Layer.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/Sb_Layer.py
new file mode 100644
index 0000000..4731323
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/Sb_Layer.py
@@ -0,0 +1,242 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import tensorflow as tf
+from S_Layer import S_Layer
+import pandas as pd
+
+class Sb_Layer (object):
+ def __init__(self,node_name,C_Layer_input,grid_size,pool_size,K_per_subband,file_name=None):
+ '''grid_size is a list, unlike the standard S_Layer, as is file_names'''
+
+ self.node_name = node_name
+ self.tf_sess = C_Layer_input.tf_sess
+
+ self.input = C_Layer_input.input
+
+ self.num_sublayers = len(grid_size)
+ self.K = K_per_subband*self.num_sublayers #number of features will be number of sub bands times the K per subband
+ self.pool_size = pool_size
+ self.grid_size = grid_size
+
+ c_output = C_Layer_input.output
+
+ self.sublayers = {}
+ with tf.name_scope(self.node_name):
+ for i in range(self.num_sublayers):
+ subnode_name = node_name+'_'+str(i)
+ self.sublayers[i] = S_Layer(subnode_name,C_Layer_input,grid_size[i],pool_size,K_per_subband,file_name)
+
+ self.band_output = {}
+ self.band_shape = C_Layer_input.band_shape
+
+ for band in c_output.keys():
+
+ sub_band_list = []
+ for i in range(self.num_sublayers):
+ sub_band_list += [self.sublayers[i].band_output[band]]
+
+
+
+ #gather sub_layer outputs and stack them for each band
+ self.band_output[band] = tf.concat(sub_band_list, 3)
+
+ self.output = self.band_output
+
+ self.num_units = 0
+ for b in self.band_shape:
+ self.num_units += np.prod(self.band_shape[b])*self.K
+
+ def __repr__(self):
+ return "Sb_Layer"
+
+ def compute_output(self,X,band):
+
+ return self.tf_sess.run(self.output[band],feed_dict={self.input:X})
+
+ def train(self,image_dir,batch_size=100,image_shape=(256,256)): #,save_file_prefix='weights'):
+
+ for i in range(self.num_sublayers):
+ #save_file = save_file_prefix + '_'+str(i)+'.pkl'
+
+ #try:
+ self.sublayers[i].train(image_dir,batch_size,image_shape) #,save_file)
+ #except Exception as e:
+ # print i
+ # raise e
+
+ # def get_compute_ops(self):
+ #
+ # node_table = pd.DataFrame(columns=['node','band'])
+ # compute_list = []
+ #
+ # for band in self.band_output:
+ # node_table = node_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+ #
+ # compute_list.append(self.output[band])
+ #
+ # return node_table, compute_list
+
+ def get_compute_ops(self,unit_table=None):
+
+ compute_list = []
+
+ if unit_table is not None:
+
+ for i, row in unit_table.iterrows():
+
+ if 'y' in unit_table:
+ node, band, y, x = row['node'], int(row['band']), int(row['y']), int(row['x'])
+ compute_list.append(self.output[band][:,y,x,:])
+
+ elif 'band' in unit_table:
+ node, band = row['node'], int(row['band'])
+ compute_list.append(self.output[band])
+
+ else:
+ return self.get_all_compute_ops()
+
+ else:
+ return self.get_all_compute_ops()
+
+ return unit_table, compute_list
+
+ def get_all_compute_ops(self):
+
+ compute_list = []
+ unit_table = pd.DataFrame(columns=['node','band'])
+ for band in self.band_output:
+ unit_table = unit_table.append(pd.DataFrame([[self.node_name,band]],columns=['node','band']),ignore_index=True)
+
+ compute_list.append(self.output[band])
+
+ return unit_table, compute_list
+
+
+def test_S2b_Layer():
+
+ from S1_Layer import S1_Layer
+ import matplotlib.pyplot as plt
+ from C_Layer import C_Layer
+
+ fig_dir = 'Figures'
+ # First we need an S1 Layer
+ # these parameters are taken from Serre, et al PNAS for HMAX
+ freq_channel_params = [ [7,2.8,3.5],
+ [9,3.6,4.6],
+ [11,4.5,5.6],
+ [13,5.4,6.8],
+ [15,6.3,7.9],
+ [17,7.3,9.1],
+ [19,8.2,10.3],
+ [21,9.2,11.5],
+ [23,10.2,12.7],
+ [25,11.3,14.1],
+ [27,12.3,15.4],
+ [29,13.4,16.8],
+ [31,14.6,18.2],
+ [33,15.8,19.7],
+ [35,17.0,21.2],
+ [37,18.2,22.8],
+ [39,19.5,24.4]]
+
+ orientations = np.arange(4)*np.pi/4
+
+ input_shape = (128,192)
+ s1 = S1_Layer(input_shape,freq_channel_params,orientations)
+
+ # Now we need to define a C1 Layer
+ bands = [ [[0,1], 8, 3],
+ [[2,3], 10, 5],
+ [[4,5], 12, 7],
+ [[6,7], 14, 8],
+ [[8,9], 16, 10],
+ [[10,11], 18, 12],
+ [[12,13], 20, 13],
+ [[14,15,16], 22, 15]]
+
+ c1 = C_Layer(s1,bands)
+
+ print("s1 shape: ", s1.band_shape)
+ print("c1 shape: ", c1.band_shape)
+
+ grid_size = [6,9,12,15]
+ pool_size = 10
+ K = 10
+
+ s2b = Sb_Layer(c1,grid_size,pool_size,K)
+
+ print("s2b shape: ", s2b.band_shape)
+
+ c2b_bands = [ [[0,1,2,3,4,5,6,7],40,40]]
+
+ c2b = C_Layer(s2b,c2b_bands)
+
+
+ print("c2b shape: ", c2b.band_shape)
+ #print c2b.band_output.keys()
+ # Test s2 on an image
+ from Image_Library import Image_Library
+
+ image_dir = '/Users/michaelbu/Code/HCOMP/SampleImages'
+
+ im_lib = Image_Library(image_dir,new_size=input_shape)
+
+ image_data = im_lib(1)
+
+ fig, ax = plt.subplots(1)
+ ax.imshow(image_data[0,:,:,0],cmap='gray')
+
+ fig,ax = plt.subplots(8,10)
+
+ result = {}
+ for b in range(len(bands)):
+ result[b] = s2b.compute_output(image_data,b)
+
+ for k in range(K):
+ ax[b,k].imshow(result[b][0,:,:,k],interpolation='nearest',cmap='gray')
+ ax[b,k].axis('off')
+
+ fig.savefig(os.path.join(fig_dir,'s2b_layer.tiff'))
+
+ fig,ax = plt.subplots(8,10)
+
+ result = {}
+
+ #only one band for c2b
+ result[0] = c2b.compute_output(image_data,0)
+
+ for k in range(K):
+ ax[b,k].imshow(result[0][0,:,:,k],interpolation='nearest',cmap='gray')
+ ax[b,k].axis('off')
+
+ fig.savefig(os.path.join(fig_dir,'c2b_layer.tiff'))
+
+
+ #plt.show()
+
+ s2b.train(image_dir,batch_size=10,image_shape=input_shape,save_file_prefix='test_S2b_weights')
+
+if __name__=='__main__':
+
+ test_S2b_Layer()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/ViewTunedLayer.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/ViewTunedLayer.py
new file mode 100644
index 0000000..1ae95e1
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/ViewTunedLayer.py
@@ -0,0 +1,219 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import tensorflow as tf
+from bmtk.simulator.mintnet.Image_Library import Image_Library
+#from bmtk.mintnet.Stimulus.NaturalScenes import NaturalScenes
+import h5py
+import pandas as pd
+
+class ViewTunedLayer (object):
+ def __init__(self,node_name,K,alt_image_dir='',*inputs,**keyword_args):
+
+ self.node_name=node_name
+
+ file_name = keyword_args.get('file_name',None)
+
+ self.alt_image_dir = alt_image_dir
+
+ if file_name==None:
+ print("No filename given. Generating new (random) weights for layer ", node_name)
+ self.train_state = False
+ new_weights=True
+ else:
+
+ self.weight_file = file_name
+ weight_h5 = h5py.File(self.weight_file,'a')
+ file_open=True
+
+ if self.node_name in weight_h5.keys():
+
+ #print "Loading weights for layer ", node_name, " from ", self.weight_file
+ new_weights = False
+ weight_data = weight_h5[self.node_name]['weights'].value
+ self.train_state = weight_h5[self.node_name]['train_state']
+
+ else:
+
+ new_weights=True
+ self.train_state=False
+ weight_h5.create_group(self.node_name)
+ weight_h5[self.node_name]['train_state']=self.train_state
+
+ self.input = inputs[0].input
+ self.tf_sess = inputs[0].tf_sess
+ #should add a check that all inputs have the same value of inputs[i].input
+
+ self.K = K
+
+ concat_list = []
+ total_K = 0
+
+ with tf.name_scope(self.node_name):
+ for i, node in enumerate(inputs):
+
+ output_i = node.output
+
+ for b in output_i:
+ shape = node.band_shape[b]
+
+ num_K = np.prod(shape)*node.K
+ total_K = total_K + num_K
+ #print "shape = ", shape, " total_K = ", num_K
+ reshape_op = tf.reshape(output_i[b],[-1,num_K])
+ concat_list += [reshape_op]
+
+ self.input_unit_vector = tf.concat(concat_list, 1) #shape [batch_size, total_K]
+
+ self.w_shape = (total_K,K)
+ #weight = np.random.normal(size=self.w_shape).astype(np.float32)
+ if new_weights:
+ weight = np.zeros(self.w_shape).astype(np.float32)
+ weight_h5[self.node_name].create_dataset('weights',shape=weight.shape,dtype=np.float32,compression='gzip',compression_opts=9)
+ else:
+ weight = weight_data #ict['ViewTunedWeight']
+ assert weight.shape[0]==total_K, "weights from file are not equal to total input size for layer "+self.node_name
+
+
+ self.weights = tf.Variable(weight,trainable=False,name='weights')
+ self.weights.initializer.run(session=self.tf_sess)
+
+ #print self.input_unit_vector.get_shape(), total_K
+ #should this be a dictionary for consistency?
+ #print "input unit vector shape = ", self.input_unit_vector.get_shape()
+ #print "total_K = ", total_K
+
+ input_norm = tf.expand_dims(tf.reduce_sum(self.input_unit_vector*self.input_unit_vector,[1]),1) #,[-1,total_K])
+ normalized_input = tf.div(self.input_unit_vector,tf.sqrt(input_norm))
+ self.output = tf.matmul(normalized_input,self.weights) #/0.01
+
+ # try gaussian tuning curve centered on preferred feature
+ # self.output = tf.exp(-0.5*tf.reduce_sum(self.weights - self.input_unit_vector))
+
+ self.num_units = K
+
+ if file_open:
+ weight_h5.close()
+
+ def __repr__(self):
+ return "ViewTunedLayer"
+
+ def compute_output(self,X):
+
+ return self.tf_sess.run(self.output,feed_dict={self.input:X})
+
+ def train(self,image_dir,batch_size=10,image_shape=(256,256)): #,save_file=None):
+
+ print("Training")
+
+ im_lib = Image_Library(image_dir,new_size=image_shape)
+
+ #ns_lib = NaturalScenes.with_new_stimulus_from_folder(image_dir, new_size=image_shape, add_channels=True)
+
+ new_weights = np.zeros(self.w_shape,dtype=np.float32)
+
+ num_batches = self.K/batch_size
+
+ for n in range(num_batches):
+ #for k in range(self.K):
+ print("\t\tbatch: ", n, " Total features: ", n*batch_size)
+ print("\t\t\tImporting images for batch")
+ image_data = im_lib(batch_size,sequential=True)
+ print("\t\t\tDone")
+
+ print("\t\t\tComputing responses for batch")
+ batch_output = self.tf_sess.run(self.input_unit_vector,feed_dict={self.input:image_data})
+ new_weights[:,n*batch_size:(n+1)*batch_size] = batch_output.T
+
+ print("\t\t\tDone")
+
+ if self.K%batch_size!=0:
+ last_batch_size = self.K%batch_size
+ print("\t\tbatch: ", n+1, " Total features: ", (n+1)*batch_size)
+ print("\t\t\tImporting images for batch")
+ image_data = im_lib(last_batch_size,sequential=True)
+ print("\t\t\tDone")
+
+ print("\t\t\tComputing responses for batch")
+ batch_output = self.tf_sess.run(self.input_unit_vector,feed_dict={self.input:image_data})
+ new_weights[:,-last_batch_size:] = batch_output.T
+
+ new_weights = new_weights/np.sqrt(np.maximum(np.sum(new_weights**2,axis=0),1e-12))
+
+ self.tf_sess.run(self.weights.assign(new_weights))
+
+ print("")
+ print("Saving weights to file ", self.weight_file)
+ weight_h5 = h5py.File(self.weight_file,'a')
+ weight_h5[self.node_name]['weights'][...] = new_weights
+ weight_h5[self.node_name]['train_state'][...] = True
+ weight_h5.close()
+
+ def get_compute_ops(self,unit_table=None):
+
+ compute_list = []
+
+ if unit_table is not None:
+ for i, row in unit_table.iterrows():
+ compute_list = [self.output]
+
+ else:
+ unit_table = pd.DataFrame([[self.node_name]], columns=['node'])
+ compute_list = [self.output]
+
+ return unit_table, compute_list
+
+
+
+def test_ViewTunedLayer():
+
+ from hmouse_test import hmouse
+
+ image_dir = '/Users/michaelbu/Code/H-MOUSE/ILSVRC2015/Data/DET/test'
+ image_shape = (256,256)
+ weight_file_prefix = 'S2b_weights_500'
+
+ print("Configuring HMAX network")
+ hm = hmouse('config/nodes.csv','config/node_types.csv')
+
+ for node in hm.nodes:
+ print(node, " num_units = ", hm.nodes[node].num_units)
+
+ s4 = ViewTunedLayer(10,hm.nodes['c1'],hm.nodes['c2'],hm.nodes['c2b']) #,hm.nodes['c3'])
+
+ im_lib = Image_Library(image_dir,new_size=image_shape)
+ image_data = im_lib(1)
+
+ print(s4.tf_sess.run(tf.shape(s4.input_unit_vector),feed_dict={s4.input:image_data}))
+ print(s4.tf_sess.run(tf.shape(s4.weights)))
+
+ print(s4.compute_output(image_data).shape)
+
+ #s4.train(image_dir,batch_size=10,image_shape=image_shape,save_file='s4_test_weights.pkl')
+
+
+
+
+if __name__=='__main__':
+
+ test_ViewTunedLayer()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/__init__.py
new file mode 100644
index 0000000..44200f2
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/__init__.py
@@ -0,0 +1,28 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import S_Layer
+import S1_Layer
+import Sb_Layer
+import C_Layer
+import ViewTunedLayer
+import hmax
diff --git a/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/hmax.py b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/hmax.py
new file mode 100644
index 0000000..c770ec6
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/mintnet/hmax/hmax.py
@@ -0,0 +1,432 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import sys
+import json
+from S1_Layer import S1_Layer
+from C_Layer import C_Layer
+from S_Layer import S_Layer
+from Sb_Layer import Sb_Layer
+from ViewTunedLayer import ViewTunedLayer
+from Readout_Layer import Readout_Layer
+import tensorflow as tf
+import os
+import h5py
+import pandas as pd
+
+from bmtk.simulator.mintnet.Image_Library import Image_Library
+import matplotlib.pyplot as plt
+
+class hmax (object):
+
+ def __init__(self, configuration, name=None): #,num_cores=8):
+ self.name = name
+
+ if os.path.isdir(configuration):
+ # If configuration is a directory look for a config-file inside it.
+ self.config_file = os.path.join(configuration, 'config_' + configuration + '.json')
+ if self.name is None:
+ self.name = os.path.basename(configuration)
+
+ elif os.path.isfile(configuration):
+ # If configuration is a json file
+ if self.name is None:
+ raise Exception("A name is required for configuration parameters")
+ self.config_file = configuration
+
+ with open(self.config_file,'r') as f:
+ self.config_data = json.loads(f.read())
+
+ self.config_dir = os.path.dirname(os.path.abspath(configuration))
+ self.train_state_file = self.__get_config_file(self.config_data['train_state_file'])
+ self.image_dir = self.__get_config_file(self.config_data['image_dir'])
+
+ # Find, and create if necessary, the output directory
+ if 'output_dir' in self.config_data:
+ self.output_dir = self.__get_config_file(self.config_data['output_dir'])
+ else:
+ self.output_dir = os.path.join(self.config_dir, 'output')
+
+ if not os.path.exists(self.output_dir):
+ os.makedirs(self.output_dir)
+
+ with open(self.train_state_file, 'r') as f:
+ self.train_state = json.loads(f.read())
+
+ if not os.path.exists(self.output_dir):
+ os.makedirs(self.output_dir)
+
+ # get the nodes
+ models_file = self.__get_config_file(self.config_data['network']['node_types'])
+ nodes_file = self.__get_config_file(self.config_data['network']['nodes'])
+ self.__nodes_table = self.__build_nodes_table(nodes_file, models_file, self.config_data)
+
+ # Read the connections
+ self.nodes = {}
+ self.train_order = []
+
+ edges_file = self.__get_config_file(self.config_data['network']['edges'])
+ for (node_name, input_node, node_dict) in self.__get_edges(edges_file, self.config_data):
+ model_class = self.__nodes_table[node_name]['model_id']
+
+ print("Constructing node: ", node_name)
+ if model_class=='S1_Layer':
+ node_type = S1_Layer
+ freq_channel_params = node_dict['freq_channel_params']
+ input_shape = node_dict['input_shape']
+ self.input_shape = input_shape
+ orientations = node_dict['orientations']
+
+ self.nodes[node_name] = node_type(node_name,input_shape,freq_channel_params,orientations) #,num_cores=num_cores)
+ #writer = tf.train.SummaryWriter('tmp/hmax', self.nodes['s1'].tf_sess.graph_def)
+ #merged = tf.merge_all_summaries()
+
+ #writer.add_summary(self.nodes[node_name].tf_sess.run(merged),0)
+
+ elif model_class=='C_Layer':
+ node_type = C_Layer
+ bands = node_dict['bands']
+
+
+ self.nodes[node_name] = node_type(node_name,self.nodes[input_node],bands)
+ #writer = tf.train.SummaryWriter('tmp/hmax', self.nodes['s1'].tf_sess.graph_def)
+
+ elif model_class=='S_Layer':
+ node_type = S_Layer
+ K = node_dict['K']
+ weight_file = self.__get_config_file(node_dict['weight_file']) if 'weight_file' in node_dict else None
+ pool_size = node_dict['pool_size']
+ grid_size = node_dict['grid_size']
+ self.train_order += [node_name]
+
+ self.nodes[node_name] = node_type(node_name, self.nodes[input_node], grid_size, pool_size,K,
+ file_name=weight_file)
+
+ elif model_class=='Sb_Layer':
+ node_type = Sb_Layer
+ K = node_dict['K']
+ weight_file = self.__get_config_file(node_dict['weight_file']) if 'weight_file' in node_dict else None
+ pool_size = node_dict['pool_size']
+ grid_size = node_dict['grid_size']
+
+ self.train_order += [node_name]
+
+ self.nodes[node_name] = node_type(node_name,self.nodes[input_node],grid_size,pool_size,K,file_name=weight_file)
+
+ elif model_class=='ViewTunedLayer':
+ node_type = ViewTunedLayer
+ K = node_dict['K']
+ input_nodes = node_dict['inputs']
+ input_nodes = [self.nodes[node] for node in input_nodes]
+ weight_file = self.__get_config_file(node_dict['weight_file']) if 'weight_file' in node_dict else None
+ alt_image_dir = node_dict['alt_image_dir']
+
+ self.train_order += [node_name]
+
+ #print "alt_image_dir=",alt_image_dir
+ self.nodes[node_name] = node_type(node_name,K,alt_image_dir,*input_nodes,file_name=weight_file)
+
+ elif model_class=='Readout_Layer':
+ node_type = Readout_Layer
+ K = node_dict['K']
+ input_nodes = self.nodes[input_node]
+ weight_file = os.path.join(config_dir,node_dict['weight_file'])
+ if weight_file=='': weight_file=None
+ alt_image_dir = node_dict['alt_image_dir']
+ lam = node_dict['lam']
+
+ self.train_order += [node_name]
+
+ self.nodes[node_name] = node_type(node_name,self.nodes[input_node],K,lam,alt_image_dir,file_name=weight_file)
+
+ else:
+ raise Exception("Unknown model class {}".format(model_class))
+
+ # print "Done"
+ # print
+
+ #nfhandle.close()
+
+
+
+ self.node_names = self.nodes.keys()
+
+ self.input_shape = (self.nodes['s1'].input_shape[1], self.nodes['s1'].input_shape[2])
+
+ print("Done")
+ #writer = tf.train.SummaryWriter('tmp/hmax', self.nodes['s1'].tf_sess.graph_def)
+
+
+ def __build_nodes_table(self, nodes_csv, models_csv, config):
+ models_df = pd.read_csv(models_csv, sep=' ')
+ nodes_df = pd.read_csv(nodes_csv, sep=' ')
+ nodes_df.set_index('id')
+ nodes_full = pd.merge(left=nodes_df, right=models_df, on='model_id')
+ nodes_table = {r['id']: {'model_id': r['model_id'], 'python_object': r['python_object']}
+ for _, r in nodes_full.iterrows() }
+
+ return nodes_table
+
+ def __get_edges(self, edges_csv, config):
+ def parse_query(query_str):
+ if query_str == '*' or query_str == 'None':
+ return None
+ elif query_str.startswith('id=='):
+ return query_str[5:-1]
+ else:
+ raise Exception('Unknown query string {}'.format(query_str))
+
+ # location where config files are located
+ params_dir = self.__get_config_file(config.get('node_config_dir', ''))
+
+ edges_df = pd.read_csv(edges_csv, sep=' ')
+ edges = []
+ for _, row in edges_df.iterrows():
+ # find source and target
+ source = parse_query(row['source_query'])
+ target = parse_query(row['target_query'])
+
+ # load the parameters from the file
+ params_file = os.path.join(params_dir, row['params_file'])
+ params = json.load(open(params_file, 'r'))
+
+ # Add to list
+ edges.append((target, source, params))
+
+ # TODO: check list and reorder to make sure the layers are in a valid order
+
+ # return the edges. Should we use a generator?
+ return edges
+
+ def __get_config_file(self, fpath):
+ if os.path.isabs(fpath):
+ return fpath
+ else:
+ return os.path.join(self.config_dir, fpath)
+
+
+
+ @classmethod
+ def load(cls, config_dir, name=None):
+ return cls(config_dir, name)
+
+ def train(self): #,alt_image_dict=None):
+
+ for node in self.train_order:
+ if not self.train_state.get(node, False):
+ print("Training Node: ", node)
+
+ if hasattr(self.nodes[node],'alt_image_dir') and self.nodes[node].alt_image_dir!='':
+ print("\tUsing alternate image directory: ", self.nodes[node].alt_image_dir) # alt_image_dict[node]
+ self.nodes[node].train(self.nodes[node].alt_image_dir,batch_size=self.config_data['batch_size'],image_shape=self.input_shape)
+ self.train_state[node]=True
+ else:
+ print("\tUsing default image directory: ", self.image_dir)
+ self.nodes[node].train(self.image_dir,batch_size=self.config_data['batch_size'],image_shape=self.input_shape)
+ self.train_state[node]=True
+
+
+ # if node not in alt_image_dict:
+ # print "\tUsing default image directory: ", image_dir
+ # self.nodes[node].train(image_dir,batch_size=self.config_data['batch_size'],image_shape=self.input_shape)
+ # self.train_state[node]=True
+ # else:
+ # print "\tUsing alternate image directory: ", alt_image_dict[node]
+ # self.nodes[node].train(alt_image_dict[node],batch_size=self.config_data['batch_size'],image_shape=self.input_shape)
+ # self.train_state[node]=True
+
+ print("Done")
+
+ with open(self.config_data['train_state_file'], 'w') as f:
+ f.write(json.dumps(self.train_state))
+
+
+ def run_stimulus(self,stimulus, node_table=None, output_file='output'):
+ '''stimulus is an instance of one of the mintnet.Stimulus objects, i.e. LocallySparseNoise'''
+
+ if output_file[-3:]!=".ic":
+ output_file = output_file+".ic" # add *.ic suffix if not already there
+
+ stim_template = stimulus.get_image_input(new_size=self.input_shape, add_channels=True)
+
+ print("Creating new output file: ", output_file, " (and removing any previous one)")
+ if os.path.exists(output_file):
+ os.remove(output_file)
+ output_h5 = h5py.File(output_file,'w')
+
+ T, y, x, K = stim_template.shape
+ all_nodes = self.nodes.keys()
+
+ if node_table is None: # just compute everything and return it all; good luck!
+
+ new_node_table = pd.DataFrame(columns=['node','band'])
+
+ compute_list = []
+ for node in all_nodes:
+
+ add_to_node_table, new_compute_list = self.nodes[node].get_compute_ops()
+ new_node_table = new_node_table.append(add_to_node_table,ignore_index=True)
+ compute_list += new_compute_list
+ else:
+ compute_list = []
+
+ new_node_table = node_table.sort_values('node')
+ new_node_table = new_node_table.reindex(np.arange(len(new_node_table)))
+
+ for node in all_nodes:
+ unit_table = new_node_table[node_table['node']==node]
+ if (new_node_table['node']==node).any():
+ _, new_compute_list = self.nodes[node].get_compute_ops(unit_table=unit_table)
+
+ compute_list += new_compute_list
+
+
+ # create datasets in hdf5 file from node_table, with data indexed by table index
+ for i, row in new_node_table.iterrows():
+
+ output_shape = tuple([T] + [ int(x) for x in compute_list[i].get_shape()[1:]])
+ output_h5.create_dataset(str(i), output_shape, dtype=np.float32)
+
+
+
+ batch_size = self.config_data['batch_size']
+ num_batches = T/batch_size
+ if T%self.config_data['batch_size']!=0:
+ num_batches += 1
+
+ for i in range(num_batches):
+ sys.stdout.write( '\r{0:.02f}'.format(float(i)*100/num_batches)+'% done')
+ sys.stdout.flush()
+ output_list = self.nodes[all_nodes[0]].tf_sess.run(compute_list,feed_dict={self.nodes[all_nodes[0]].input: stim_template[i*batch_size:(i+1)*batch_size]})
+
+ for io, output in enumerate(output_list):
+ # dataset_string = node_table['node'].loc[io] + "/" + str(int(node_table['band'].loc[io]))
+ # output_h5[dataset_string][i*batch_size:(i+1)*batch_size] = output
+
+ output_h5[str(io)][i*batch_size:(i+1)*batch_size] = output
+ sys.stdout.write( '\r{0:.02f}'.format(float(100))+'% done')
+ sys.stdout.flush()
+
+ output_h5['stim_template'] = stimulus.stim_template
+ output_h5.close()
+ new_node_table.to_hdf(output_file,'node_table')
+ if hasattr(stimulus,'label_dataframe') and stimulus.label_dataframe is not None:
+ stimulus.label_dataframe.to_hdf(output_file,'labels')
+ stimulus.stim_table.to_hdf(output_file,'stim_table')
+
+
+ def get_exemplar_node_table(self):
+
+ node_table = pd.DataFrame(columns=['node','band','y','x'])
+ for node in self.nodes:
+ node_output = self.nodes[node].output
+ if hasattr(self.nodes[node],'band_shape'):
+ for band in node_output:
+ y,x = [int(x) for x in node_output[band].get_shape()[1:3]]
+ y /= 2
+ x /= 2
+ new_row = pd.DataFrame([[self.nodes[node].node_name, band, y, x]], columns=['node','band','y','x'])
+ node_table = node_table.append(new_row, ignore_index=True)
+ else:
+ new_row = pd.DataFrame([[self.nodes[node].node_name]], columns=['node'])
+ node_table = node_table.append(new_row, ignore_index=True)
+
+ return node_table
+
+
+ def generate_output(self):
+ try:
+ im_lib = Image_Library(self.image_dir,new_size=self.input_shape)
+ except OSError as e:
+ print('''A repository of images (such as a collection from ImageNet - http://www.image-net.org) is required for input.
+ An example would be too large to include in the isee_engine itself.
+ Set the path for this image repository in hmax/config_hmax.json''')
+ raise e
+
+ image_data = im_lib(1)
+
+ fig, ax = plt.subplots(1)
+ ax.imshow(image_data[0,:,:,0],cmap='gray')
+
+ fig.savefig(os.path.join(self.output_dir,'input_image'))
+ plt.close(fig)
+
+ nodes = self.nodes
+
+ for node_to_plot in nodes:
+ print("Generating output for node ", node_to_plot)
+ node_output_dir = os.path.join(self.output_dir,node_to_plot)
+
+ if not os.path.exists(node_output_dir):
+ os.makedirs(node_output_dir)
+
+ if type(self.nodes[node_to_plot])==ViewTunedLayer:
+ print("ViewTunedLayer")
+ self.nodes[node_to_plot].compute_output(image_data)
+ continue
+
+ if type(self.nodes[node_to_plot])==Readout_Layer:
+ print("Readout_Layer")
+ self.nodes[node_to_plot].compute_output(image_data)
+ continue
+
+ num_bands = len(nodes[node_to_plot].output)
+
+ if type(self.nodes[node_to_plot])==S1_Layer or node_to_plot=='c1':
+ #print "Yes, this is an S1_Layer"
+ num_filters_to_plot = 4
+ fig, ax = plt.subplots(num_filters_to_plot,num_bands,figsize=(20,8))
+ #fig2,ax2 = plt.subplots(num_filters_to_plot,num_bands,figsize=(20,8))
+ else:
+ num_filters_to_plot = 8
+ fig, ax = plt.subplots(num_filters_to_plot,num_bands,figsize=(20,8))
+
+ for band in range(num_bands):
+ result = nodes[node_to_plot].compute_output(image_data,band)
+ #print result[band].shape
+ n, y,x,K = result.shape
+
+ for k in range(num_filters_to_plot):
+
+ if num_bands!=1:
+ ax[k,band].imshow(result[0,:,:,k],interpolation='nearest',cmap='gray')
+ ax[k,band].axis('off')
+ else:
+ ax[k].imshow(result[0,:,:,k],interpolation='nearest',cmap='gray')
+ ax[k].axis('off')
+
+ # if type(self.nodes[node_to_plot])==S1_Layer:
+ # for k in range(num_filters_to_plot):
+
+ # ki = 4+k
+ # ax2[k,band].imshow(result[0,:,:,ki],interpolation='nearest',cmap='gray')
+ # ax2[k,band].axis('off')
+
+ if type(self.nodes[node_to_plot])==S1_Layer:
+ fig.savefig(os.path.join(node_output_dir,'output_phase0.pdf'))
+ #fig2.savefig(os.path.join(node_output_dir,'output_phase1.pdf'))
+ #plt.close(fig2)
+ else:
+ fig.savefig(os.path.join(node_output_dir,'output.pdf'))
+
+ plt.close(fig)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/__init__.py
new file mode 100644
index 0000000..2ad957d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/__init__.py
@@ -0,0 +1,26 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from . import default_setters
+from .config import Config
+from .pointnetwork import PointNetwork
+from .pointsimulator import PointSimulator
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/config.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/config.py
new file mode 100644
index 0000000..a6644d5
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/config.py
@@ -0,0 +1,48 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+
+from bmtk.simulator.core.config import ConfigDict
+from bmtk.simulator.pointnet.io_tools import io
+
+
+# TODO: Implement pointnet validator and create json schema for pointnet
+def from_json(config_file, validate=False):
+ conf_dict = ConfigDict.from_json(config_file)
+ conf_dict.io = io
+ return conf_dict
+
+def from_dict(config_file, validate=False):
+ conf_dict = ConfigDict.from_dict(config_file)
+ conf_dict.io = io
+ return conf_dict
+
+class Config(ConfigDict):
+ def __init__(self, dict_obj):
+ super(Config, self).__init__(dict_obj)
+ self._io = io
+
+ @property
+ def io(self):
+ return io
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/default_setters/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/default_setters/__init__.py
new file mode 100644
index 0000000..b07cc2d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/default_setters/__init__.py
@@ -0,0 +1,2 @@
+from . import synaptic_weights
+from . import synapse_models
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/default_setters/synapse_models.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/default_setters/synapse_models.py
new file mode 100644
index 0000000..8e94328
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/default_setters/synapse_models.py
@@ -0,0 +1,16 @@
+from bmtk.simulator.pointnet.pyfunction_cache import add_synapse_model
+
+
+def static_synapse(edge):
+ model_params = {
+ 'model': 'static_synapse',
+ 'delay': edge.delay,
+ 'weight': edge.syn_weight(None, None)
+ }
+
+ model_params.update(edge.dynamics_params)
+ return model_params
+
+
+add_synapse_model(static_synapse, 'default', overwrite=False)
+add_synapse_model(static_synapse, overwrite=False)
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/default_setters/synaptic_weights.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/default_setters/synaptic_weights.py
new file mode 100644
index 0000000..4c66ae1
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/default_setters/synaptic_weights.py
@@ -0,0 +1,8 @@
+from bmtk.simulator.pointnet.pyfunction_cache import add_weight_function
+
+
+def default_weight_fnc(edge_props, source_node, target_node):
+ return edge_props['syn_weight']*edge_props.nsyns
+
+
+add_weight_function(default_weight_fnc, 'default_weight_fnc', overwrite=False)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/io_tools.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/io_tools.py
new file mode 100644
index 0000000..b5ea9ea
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/io_tools.py
@@ -0,0 +1,122 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+"""
+Functions for logging, writing and reading from file.
+
+"""
+import nest
+
+from bmtk.simulator.core.io_tools import IOUtils
+
+# Want users to be able to use NEST whether or not it is compiled in parallel mode or not, which means checking if
+# the method nest.SyncPRocesses (aka MPI Barrier) exists. If it doesn't try getting barrier from mpi4py
+rank = nest.Rank()
+n_nodes = nest.NumProcesses()
+try:
+ barrier = nest.SyncProcesses
+except AttributeError as exc:
+ try:
+ from mpi4py import MPI
+ barrier = MPI.COMM_WORLD.Barrier
+ except:
+ # Barrier is just an empty function, no problem if running on one core.
+ barrier = lambda: None
+
+
+class NestIOUtils(IOUtils):
+ def __init__(self):
+ super(NestIOUtils, self).__init__()
+ self.mpi_rank = rank
+ self.mpi_size = n_nodes
+
+ def barrier(self):
+ barrier()
+
+ def quiet_simulator(self):
+ nest.set_verbosity('M_QUIET')
+
+ def setup_output_dir(self, config_dir, log_file, overwrite=True):
+ super(NestIOUtils, self).setup_output_dir(config_dir, log_file, overwrite=True)
+ if n_nodes > 1 and rank == 0:
+ io.log_info('Running NEST with MPI ({} cores)'.format(n_nodes))
+
+
+io = NestIOUtils()
+
+
+'''
+log_format = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
+pointnet_logger = logging.getLogger()
+pointnet_logger.setLevel(logging.DEBUG)
+
+console_handler = logging.StreamHandler(sys.stdout)
+console_handler.setFormatter(log_format)
+pointnet_logger.addHandler(console_handler)
+
+
+def collect_gdf_files(gdf_dir, output_file, nest_id_map, overwrite=False):
+
+ if n_nodes > 0:
+ # Wait until all nodes are finished
+ barrier()
+
+ if rank != 0:
+ return
+
+ log("Saving spikes to file...")
+ spikes_out = output_file
+ if os.path.exists(spikes_out) and not overwrite:
+ return
+
+ gdf_files_globs = '{}/*.gdf'.format(gdf_dir)
+ gdf_files = glob.glob(gdf_files_globs)
+ with open(spikes_out, 'w') as spikes_file:
+ csv_writer = csv.writer(spikes_file, delimiter=' ')
+ for gdffile in gdf_files:
+ spikes_df = pd.read_csv(gdffile, names=['gid', 'time', 'nan'], sep='\t')
+ for _, row in spikes_df.iterrows():
+ csv_writer.writerow([row['time'], nest_id_map[int(row['gid'])]])
+ os.remove(gdffile)
+ log("done.")
+
+
+def setup_output_dir(config):
+ if rank == 0:
+ try:
+ output_dir = config['output']['output_dir']
+ if os.path.exists(output_dir):
+ shutil.rmtree(output_dir)
+ os.makedirs(output_dir)
+
+ if 'log_file' in config['output']:
+ file_logger = logging.FileHandler(config['output']['log_file'])
+ file_logger.setFormatter(log_format)
+ pointnet_logger.addHandler(file_logger)
+ log('Created a log file')
+
+ except Exception as exc:
+ print(exc)
+
+ barrier()
+'''
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/modules/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/modules/__init__.py
new file mode 100644
index 0000000..962ea78
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/modules/__init__.py
@@ -0,0 +1,2 @@
+from .record_spikes import SpikesMod
+from .multimeter_reporter import MultimeterMod
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/modules/multimeter_reporter.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/modules/multimeter_reporter.py
new file mode 100644
index 0000000..12d86ac
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/modules/multimeter_reporter.py
@@ -0,0 +1,110 @@
+import os
+import glob
+import pandas as pd
+from bmtk.utils.io.cell_vars import CellVarRecorder
+from bmtk.simulator.pointnet.io_tools import io
+
+import nest
+
+
+try:
+ MPI_RANK = nest.Rank()
+ N_HOSTS = nest.NumProcesses()
+
+except Exception as e:
+ MPI_RANK = 0
+ N_HOSTS = 1
+
+
+class MultimeterMod(object):
+ def __init__(self, tmp_dir, file_name, variable_name, cells, tstart=None, tstop=None, interval=None, to_h5=True,
+ delete_dat=True, **opt_params):
+ """For recording neuron properties using a NEST multimeter object
+
+ :param tmp_dir: ouput directory
+ :param file_name: Name of (SONATA hdf5) file that will be saved to
+ :param variable_name: A list of the variable(s) being recorded. Must be valid according to the cells
+ :param cells: A node-set or list of gids to record from
+ :param tstart: Start time of the recording (if None will default to sim.tstart)
+ :param tstop: Stop time of recording (if None will default to sim.tstop)
+ :param interval: Recording time step (if None will default to sim.dt)
+ :param to_h5: True to save to sonata .h5 format (default: True)
+ :param delete_dat: True to delete the .dat files created by NEST (default True)
+ :param opt_params:
+ """
+
+ self._output_dir = tmp_dir
+ self._file_name = file_name if os.path.isabs(file_name) else os.path.join(self._output_dir, file_name)
+ self._variable_name = variable_name
+ self._node_set = cells
+ self._tstart = tstart
+ self._tstop = tstop
+ self._interval = interval
+ self._to_h5 = to_h5
+ self._delete_dat = delete_dat
+
+ self._gids = None
+ self._nest_ids = None
+ self._multimeter = None
+
+ self._min_delay = 1.0 # Required for calculating steps recorded
+
+ self.__output_label = os.path.join(self._output_dir, '__bmtk_nest_{}'.format(os.path.basename(self._file_name)))
+ self._var_recorder = CellVarRecorder(self._file_name, self._output_dir, self._variable_name, buffer_data=False)
+
+ def initialize(self, sim):
+ self._gids = list(sim.net.get_node_set(self._node_set).gids())
+ self._nest_ids = [sim.net._gid2nestid[gid] for gid in self._gids]
+
+ self._tstart = self._tstart or sim.tstart
+ self._tstop = self._tstop or sim.tstop
+ self._interval = self._interval or sim.dt
+ self._multimeter = nest.Create('multimeter',
+ params={'interval': self._interval, 'start': self._tstart, 'stop': self._tstop,
+ 'to_file': True, 'to_memory': False,
+ 'withtime': True,
+ 'record_from': self._variable_name,
+ 'label': self.__output_label})
+
+ nest.Connect(self._multimeter, self._nest_ids)
+
+ def finalize(self, sim):
+ io.barrier() # Makes sure all nodes finish, but not sure if actually required by nest
+
+ # min_delay needs to be fetched after simulation otherwise the value will be off. There also seems to be some
+ # MPI barrier inside GetKernelStatus
+ self._min_delay = nest.GetKernelStatus('min_delay')
+ # print self._min_delay
+ if self._to_h5 and MPI_RANK == 0:
+ for gid in self._gids:
+ self._var_recorder.add_cell(gid, sec_list=[0], seg_list=[0.0])
+
+ # Initialize hdf5 file including preallocated data block of recorded variables
+ # Unfortantely with NEST the final time-step recorded can't be calculated in advanced, and even with the
+ # same min/max_delay can be different. We need to read the output-file to get n_steps
+ def get_var_recorder(node_recording_df):
+ if not self._var_recorder.is_initialized:
+ self._var_recorder.tstart = node_recording_df['time'].min()
+ self._var_recorder.tstop = node_recording_df['time'].max()
+ self._var_recorder.dt = self._interval
+ self._var_recorder.initialize(len(node_recording_df))
+
+ return self._var_recorder
+
+ gid_map = sim.net._nestid2gid
+ for nest_file in glob.glob('{}*'.format(self.__output_label)):
+ report_df = pd.read_csv(nest_file, index_col=False, names=['nest_id', 'time']+self._variable_name,
+ sep='\t')
+ for grp_id, grp_df in report_df.groupby(by='nest_id'):
+ gid = gid_map[grp_id]
+ vr = get_var_recorder(grp_df)
+ for var_name in self._variable_name:
+ vr.record_cell_block(gid, var_name, grp_df[var_name])
+
+ if self._delete_dat:
+ # remove csv file created by nest
+ os.remove(nest_file)
+
+ self._var_recorder.close()
+
+ io.barrier()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/modules/record_spikes.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/modules/record_spikes.py
new file mode 100644
index 0000000..9791fdc
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/modules/record_spikes.py
@@ -0,0 +1,90 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import glob
+from bmtk.utils.io.spike_trains import SpikeTrainWriter
+from bmtk.simulator.pointnet.io_tools import io
+
+import nest
+
+
+MPI_RANK = nest.Rank()
+N_HOSTS = nest.NumProcesses()
+
+
+class SpikesMod(object):
+ """Module use for saving spikes
+
+ """
+
+ def __init__(self, tmp_dir, spikes_file_csv=None, spikes_file=None, spikes_file_nwb=None, spikes_sort_order=None):
+ def _get_path(file_name):
+ # Unless file-name is an absolute path then it should be placed in the $OUTPUT_DIR
+ if file_name is None:
+ return None
+ return file_name if os.path.isabs(file_name) else os.path.join(tmp_dir, file_name)
+
+ self._csv_fname = _get_path(spikes_file_csv)
+ self._h5_fname = _get_path(spikes_file)
+ self._nwb_fname = _get_path(spikes_file_nwb)
+
+ self._tmp_dir = tmp_dir
+ self._tmp_file_base = 'tmp_spike_times'
+ self._spike_labels = os.path.join(self._tmp_dir, self._tmp_file_base)
+
+ self._spike_writer = SpikeTrainWriter(tmp_dir=tmp_dir, mpi_rank=MPI_RANK, mpi_size=N_HOSTS)
+ self._spike_writer.delimiter = '\t'
+ self._spike_writer.gid_col = 0
+ self._spike_writer.time_col = 1
+ self._sort_order = spikes_sort_order
+
+ self._spike_detector = None
+
+ def initialize(self, sim):
+ self._spike_detector = nest.Create("spike_detector", 1, {'label': self._spike_labels, 'withtime': True,
+ 'withgid': True, 'to_file': True})
+
+ for pop_name, pop in sim._graph._nestid2nodeid_map.items():
+ nest.Connect(list(pop.keys()), self._spike_detector)
+
+ def finalize(self, sim):
+ if MPI_RANK == 0:
+ for gdf_file in glob.glob(self._spike_labels + '*.gdf'):
+ self._spike_writer.add_spikes_file(gdf_file)
+ io.barrier()
+
+ gid_map = sim._graph._nestid2gid
+
+ if self._csv_fname is not None:
+ self._spike_writer.to_csv(self._csv_fname, sort_order=self._sort_order, gid_map=gid_map)
+ io.barrier()
+
+ if self._h5_fname is not None:
+ self._spike_writer.to_hdf5(self._h5_fname, sort_order=self._sort_order, gid_map=gid_map)
+ io.barrier()
+
+ if self._nwb_fname is not None:
+ self._spike_writer.to_nwb(self._nwb_fname, sort_order=self._sort_order, gid_map=gid_map)
+ io.barrier()
+
+ self._spike_writer.close()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/pointnetwork.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/pointnetwork.py
new file mode 100644
index 0000000..0cc781f
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/pointnetwork.py
@@ -0,0 +1,176 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+import functools
+import nest
+
+from bmtk.simulator.core.simulator_network import SimNetwork
+from bmtk.simulator.pointnet.sonata_adaptors import PointNodeAdaptor, PointEdgeAdaptor
+from bmtk.simulator.pointnet import pyfunction_cache
+from bmtk.simulator.pointnet.io_tools import io
+
+
+class PointNetwork(SimNetwork):
+ def __init__(self, **properties):
+ super(PointNetwork, self).__init__(**properties)
+ self._io = io
+
+ self.__weight_functions = {}
+ self._params_cache = {}
+
+ self._virtual_ids_map = {}
+
+ self._batch_nodes = True
+
+ self._nest_id_map = {}
+ self._nestid2nodeid_map = {}
+
+ self._nestid2gid = {}
+
+ self._nodes_table = {}
+ self._gid2nestid = {}
+
+ @property
+ def py_function_caches(self):
+ return pyfunction_cache
+
+ def __get_params(self, node_params):
+ if node_params.with_dynamics_params:
+ # TODO: use property, not name
+ return node_params['dynamics_params']
+
+ params_file = node_params[self._params_column]
+ # params_file = self._MT.params_column(node_params) #node_params['dynamics_params']
+ if params_file in self._params_cache:
+ return self._params_cache[params_file]
+ else:
+ params_dir = self.get_component('models_dir')
+ params_path = os.path.join(params_dir, params_file)
+ params_dict = json.load(open(params_path, 'r'))
+ self._params_cache[params_file] = params_dict
+ return params_dict
+
+ def _register_adaptors(self):
+ super(PointNetwork, self)._register_adaptors()
+ self._node_adaptors['sonata'] = PointNodeAdaptor
+ self._edge_adaptors['sonata'] = PointEdgeAdaptor
+
+ # TODO: reimplement with py_modules like in bionet
+ def add_weight_function(self, function, name=None):
+ fnc_name = name if name is not None else function.__name__
+ self.__weight_functions[fnc_name] = functools.partial(function)
+
+ def set_default_weight_function(self, function):
+ self.add_weight_function(function, 'default_weight_fnc', overwrite=True)
+
+ def get_weight_function(self, name):
+ return self.__weight_functions[name]
+
+ def build_nodes(self):
+ for node_pop in self.node_populations:
+ nid2nest_map = {}
+ nest2nid_map = {}
+ if node_pop.internal_nodes_only:
+ for node in node_pop.get_nodes():
+ node.build()
+ for nid, gid, nest_id in zip(node.node_ids, node.gids, node.nest_ids):
+ self._nestid2gid[nest_id] = gid
+ self._gid2nestid[gid] = nest_id
+ nid2nest_map[nid] = nest_id
+ nest2nid_map[nest_id] = nid
+
+ elif node_pop.mixed_nodes:
+ for node in node_pop.get_nodes():
+ if node.model_type != 'virtual':
+ node.build()
+ for nid, gid, nest_id in zip(node.node_ids, node.gids, node.nest_ids):
+ self._nestid2gid[nest_id] = gid
+ self._gid2nestid[gid] = nest_id
+ nid2nest_map[nid] = nest_id
+ nest2nid_map[nest_id] = nid
+
+ self._nest_id_map[node_pop.name] = nid2nest_map
+ self._nestid2nodeid_map[node_pop.name] = nest2nid_map
+
+ def build_recurrent_edges(self):
+ recurrent_edge_pops = [ep for ep in self._edge_populations if not ep.virtual_connections]
+ if not recurrent_edge_pops:
+ return
+
+ for edge_pop in recurrent_edge_pops:
+ src_nest_ids = self._nest_id_map[edge_pop.source_nodes]
+ trg_nest_ids = self._nest_id_map[edge_pop.target_nodes]
+ for edge in edge_pop.get_edges():
+ nest_srcs = [src_nest_ids[nid] for nid in edge.source_node_ids]
+ nest_trgs = [trg_nest_ids[nid] for nid in edge.target_node_ids]
+ nest.Connect(nest_srcs, nest_trgs, conn_spec='one_to_one', syn_spec=edge.nest_params)
+
+ def find_edges(self, source_nodes=None, target_nodes=None):
+ # TODO: Move to parent
+ selected_edges = self._edge_populations[:]
+
+ if source_nodes is not None:
+ selected_edges = [edge_pop for edge_pop in selected_edges if edge_pop.source_nodes == source_nodes]
+
+ if target_nodes is not None:
+ selected_edges = [edge_pop for edge_pop in selected_edges if edge_pop.target_nodes == target_nodes]
+
+ return selected_edges
+
+ def add_spike_trains(self, spike_trains, node_set):
+ # Build the virtual nodes
+ src_nodes = [node_pop for node_pop in self.node_populations if node_pop.name in node_set.population_names()]
+ for node_pop in src_nodes:
+ if node_pop.name in self._virtual_ids_map:
+ continue
+
+ virt_node_map = {}
+ if node_pop.virtual_nodes_only:
+ for node in node_pop.get_nodes():
+ nest_ids = nest.Create('spike_generator', node.n_nodes, {})
+ for node_id, nest_id in zip(node.node_ids, nest_ids):
+ virt_node_map[node_id] = nest_id
+ nest.SetStatus([nest_id], {'spike_times': spike_trains.get_spikes(node_id)})
+
+ elif node_pop.mixed_nodes:
+ for node in node_pop.get_nodes():
+ if node.model_type != 'virtual':
+ continue
+
+ nest_ids = nest.Create('spike_generator', node.n_nodes, {})
+ for node_id, nest_id in zip(node.node_ids, nest_ids):
+ virt_node_map[node_id] = nest_id
+ nest.SetStatus([nest_id], {'spike_times': spike_trains.get_spikes(node_id)})
+
+ self._virtual_ids_map[node_pop.name] = virt_node_map
+
+ # Create virtual synaptic connections
+ for source_reader in src_nodes:
+ for edge_pop in self.find_edges(source_nodes=source_reader.name):
+ src_nest_ids = self._virtual_ids_map[edge_pop.source_nodes]
+ trg_nest_ids = self._nest_id_map[edge_pop.target_nodes]
+ for edge in edge_pop.get_edges():
+ nest_srcs = [src_nest_ids[nid] for nid in edge.source_node_ids]
+ nest_trgs = [trg_nest_ids[nid] for nid in edge.target_node_ids]
+ nest.Connect(nest_srcs, nest_trgs, conn_spec='one_to_one', syn_spec=edge.nest_params)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/pointsimulator.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/pointsimulator.py
new file mode 100644
index 0000000..a434da6
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/pointsimulator.py
@@ -0,0 +1,266 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import glob
+import nest
+from six import string_types
+from six import moves
+
+from bmtk.simulator.core.simulator import Simulator
+from bmtk.simulator.pointnet.config import Config
+#import bmtk.simulator.pointnet.config as cfg
+from bmtk.simulator.pointnet.io_tools import io
+import bmtk.simulator.utils.simulation_reports as reports
+import bmtk.simulator.utils.simulation_inputs as inputs
+from bmtk.utils.io import spike_trains
+from . import modules as mods
+from bmtk.simulator.core.node_sets import NodeSet
+
+
+class PointSimulator(Simulator):
+ def __init__(self, graph, dt=0.001, overwrite=True, print_time=False):
+ self._tstop = 0.0 # simulation time
+ self._dt = dt # time step
+ self._output_dir = './output/' # directory where log and temporary output will be stored
+ self._overwrite = overwrite
+ self._block_run = False
+ self._block_size = -1
+
+ self._cells_built = False
+ self._internal_connections_built = False
+
+ self._graph = graph
+ self._external_cells = {} # dict-of-dict of external pointnet cells with keys [network_name][cell_id]
+ self._internal_cells = {} # dictionary of internal pointnet cells with cell_id as key
+ self._nest_id_map = {} # a map between NEST IDs and Node-IDs
+
+ self._spikedetector = None
+ self._spikes_file = None # File where all output spikes will be collected and saved
+ self._tmp_spikes_file = None # temporary gdf files of spike-trains
+ self._spike_trains_ds = {} # used to temporary store NWB datasets containing spike trains
+
+ self._spike_detector = None
+
+ self._mods = []
+
+ self._inputs = {} # Used to hold references to nest input objects (current_generators, etc)
+
+ # Reset the NEST kernel for a new simualtion
+ # TODO: move this into it's own function and make sure it is called before network is built
+ nest.ResetKernel()
+ nest.SetKernelStatus({"resolution": self._dt, "overwrite_files": self._overwrite, "print_time": print_time})
+
+ @property
+ def tstart(self):
+ return 0.0
+
+ @property
+ def dt(self):
+ return self._dt
+
+ @property
+ def tstop(self):
+ return self._tstop
+
+ @tstop.setter
+ def tstop(self, val):
+ self._tstop = val
+
+ @property
+ def n_steps(self):
+ return long((self.tstop-self.tstart)/self.dt)
+
+ @property
+ def net(self):
+ return self._graph
+
+ @property
+ def gid_map(self):
+ return self._graph._nestid2gid
+
+ def _get_block_trial(self, duration):
+ """
+ Compute necessary number of block trials, the length of block simulation and the simulation length of the last
+ block run, if necessary.
+ """
+ if self._block_run:
+ data_res = self._block_size * self._dt
+ fn = duration / data_res
+ n = int(fn)
+ res = fn - n
+ else:
+ n = -1
+ res = -1
+ data_res = -1
+ return n, res, data_res
+
+ '''
+ def set_spikes_recordings(self):
+ # TODO: Pass in output-dir and file name to save to
+ # TODO: Allow for sorting - overwrite bionet module
+ self._spike_detector = nest.Create("spike_detector", 1, {'label': os.path.join(self.output_dir, 'tmp_spike_times'),
+ 'withtime': True, 'withgid': True, 'to_file': True})
+ # print self._spike_detector
+
+ for pop_name, pop in self._graph._nestid2nodeid_map.items():
+ # print pop.keys()
+
+ nest.Connect(pop.keys(), self._spike_detector)
+ # exit()
+ '''
+
+ def add_step_currents(self, amp_times, amp_values, node_set, input_name):
+ scg = nest.Create("step_current_generator",
+ params={'amplitude_times': amp_times, 'amplitude_values': amp_values})
+
+ if not isinstance(node_set, NodeSet):
+ node_set = self.net.get_node_set(node_set)
+
+ # Convert node set into list of gids and then look-up the nest-ids
+ nest_ids = [self.net._gid2nestid[gid] for gid in node_set.gids()]
+
+ # Attach current clamp to nodes
+ nest.Connect(scg, nest_ids, syn_spec={'delay': self.dt})
+
+ self._inputs[input_name] = nest_ids
+
+ def run(self, tstop=None):
+ if tstop is None:
+ tstop = self._tstop
+
+ for mod in self._mods:
+ mod.initialize(self)
+
+ io.barrier()
+
+ io.log_info('Starting Simulation')
+ n, res, data_res = self._get_block_trial(tstop)
+ if n > 0:
+ for r in moves.range(n):
+ nest.Simulate(data_res)
+ if res > 0:
+ nest.Simulate(res * self.dt)
+ if n < 0:
+ nest.Simulate(tstop)
+
+ io.barrier()
+ io.log_info('Simulation finished, finalizing results.')
+ for mod in self._mods:
+ mod.finalize(self)
+ io.barrier()
+ io.log_info('Done.')
+
+ def add_mod(self, mod):
+ self._mods.append(mod)
+
+ @classmethod
+ def from_config(cls, configure, graph):
+ # load the json file or object
+ if isinstance(configure, string_types):
+ config = Config.from_json(configure, validate=True)
+ elif isinstance(configure, dict):
+ config = configure
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(configure, type(configure)))
+
+ if 'run' not in config:
+ raise Exception('Json file is missing "run" entry. Unable to build Bionetwork.')
+ run_dict = config['run']
+
+ # Get network parameters
+ # step time (dt) is set in the kernel and should be passed
+ overwrite = run_dict['overwrite_output_dir'] if 'overwrite_output_dir' in run_dict else True
+ print_time = run_dict['print_time'] if 'print_time' in run_dict else False
+ dt = run_dict['dt'] # TODO: make sure dt exists
+ network = cls(graph, dt=dt, overwrite=overwrite)
+
+ if 'output_dir' in config['output']:
+ network.output_dir = config['output']['output_dir']
+
+ if 'block_run' in run_dict and run_dict['block_run']:
+ if 'block_size' not in run_dict:
+ raise Exception('"block_run" is set to True but "block_size" not found.')
+ network._block_size = run_dict['block_size']
+
+ if 'duration' in run_dict:
+ network.tstop = run_dict['duration']
+ elif 'tstop' in run_dict:
+ network.tstop = run_dict['tstop']
+
+ # Create the output-directory, or delete existing files if it already exists
+ graph.io.log_info('Setting up output directory')
+ if not os.path.exists(config['output']['output_dir']):
+ os.mkdir(config['output']['output_dir'])
+ elif overwrite:
+ for gfile in glob.glob(os.path.join(config['output']['output_dir'], '*.gdf')):
+ os.remove(gfile)
+
+ graph.io.log_info('Building cells.')
+ graph.build_nodes()
+
+ graph.io.log_info('Building recurrent connections')
+ graph.build_recurrent_edges()
+
+ for sim_input in inputs.from_config(config):
+ node_set = graph.get_node_set(sim_input.node_set)
+ if sim_input.input_type == 'spikes':
+ spikes = spike_trains.SpikesInput.load(name=sim_input.name, module=sim_input.module,
+ input_type=sim_input.input_type, params=sim_input.params)
+ io.log_info('Build virtual cell stimulations for {}'.format(sim_input.name))
+ graph.add_spike_trains(spikes, node_set)
+
+ elif sim_input.input_type == 'current_clamp':
+ # TODO: Need to make this more robust
+ amp_times = sim_input.params.get('amplitude_times', [])
+ amp_values = sim_input.params.get('amplitude_values', [])
+
+ if 'delay' in sim_input.params:
+ amp_times.append(sim_input.params['delay'])
+ amp_values.append(sim_input.params['amp'])
+
+ if 'duration' in sim_input.params:
+ amp_times.append(sim_input.params['delay'] + sim_input.params['duration'])
+ amp_values.append(0.0)
+
+ network.add_step_currents(amp_times, amp_values, node_set, sim_input.name)
+
+ else:
+ graph.io.log_warning('Unknown input type {}'.format(sim_input.input_type))
+
+ sim_reports = reports.from_config(config)
+ for report in sim_reports:
+ if report.module == 'spikes_report':
+ mod = mods.SpikesMod(**report.params)
+
+ elif isinstance(report, reports.MembraneReport):
+ # For convience and for compliance with SONATA format. "membrane_report" and "multimeter_report is the
+ # same in pointnet.
+ mod = mods.MultimeterMod(**report.params)
+
+ else:
+ graph.io.log_exception('Unknown report type {}'.format(report.module))
+
+ network.add_mod(mod)
+
+ io.log_info('Network created.')
+ return network
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/property_map.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/property_map.py
new file mode 100644
index 0000000..dd1ecc4
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/property_map.py
@@ -0,0 +1,213 @@
+import types
+import numpy as np
+
+import nest
+
+from bmtk.simulator.pointnet.pyfunction_cache import py_modules
+from bmtk.simulator.pointnet.io_tools import io
+
+class NodePropertyMap(object):
+ def __init__(self, graph):
+ self._graph = graph
+ # TODO: Move template_cache to parent graph so it can be shared across diff populations.
+ self._template_cache = {}
+ self.node_types_table = None
+
+ self.batch = True
+
+
+ def _parse_model_template(self, model_template):
+ if model_template in self._template_cache:
+ return self._template_cache[model_template]
+ else:
+ template_parts = model_template.split(':')
+ assert(len(template_parts) == 2)
+ directive, template = template_parts[0], template_parts[1]
+ self._template_cache[model_template] = (directive, template)
+ return directive, template
+
+ def load_cell(self, node):
+ model_type = self._parse_model_template(node['model_template'])[1]
+ dynamics_params = self.dynamics_params(node)
+ fnc_name = node['model_processing']
+ if fnc_name is None:
+ return nest.Create(model_type, 1, dynamics_params)
+ else:
+ cell_fnc = py_modules.cell_processor(fnc_name)
+ return cell_fnc(model_type, node, dynamics_params)
+
+ @classmethod
+ def build_map(cls, node_group, graph):
+ prop_map = cls(graph)
+
+ node_types_table = node_group.parent.node_types_table
+ prop_map.node_types_table = node_types_table
+
+ if 'model_processing' in node_group.columns:
+ prop_map.batch = False
+ elif 'model_processing' in node_group.all_columns:
+ model_fncs = [node_types_table[ntid]['model_processing'] for ntid in np.unique(node_group.node_type_ids)
+ if node_types_table[ntid]['model_processing'] is not None]
+
+ if model_fncs:
+ prop_map.batch = False
+
+ if node_group.has_dynamics_params:
+ prop_map.batch = False
+ prop_map.dynamics_params = types.MethodType(group_dynamics_params, prop_map)
+ else: # 'dynamics_params' in node_group.all_columns:
+ prop_map.dynamics_params = types.MethodType(types_dynamics_params, prop_map)
+
+ if prop_map.batch:
+ prop_map.model_type = types.MethodType(model_type_batched, prop_map)
+ prop_map.model_params = types.MethodType(model_params_batched, prop_map)
+ else:
+ prop_map.model_type = types.MethodType(model_type, prop_map)
+ prop_map.model_params = types.MethodType(model_params, prop_map)
+
+ if node_group.has_gids:
+ prop_map.gid = types.MethodType(gid, prop_map)
+ else:
+ prop_map.gid = types.MethodType(node_id, prop_map)
+
+ return prop_map
+
+
+def gid(self, node):
+ return node['gid']
+
+
+def node_id(self, node):
+ return node.node_id
+
+
+def model_type(self, node):
+ return self._parse_model_template(node['model_template'])
+
+
+def model_type_batched(self, node_type_id):
+ return self._parse_model_template(self.node_types_table[node_type_id]['model_template'])
+
+
+def model_params(self, node):
+ return {}
+
+
+def model_params_batched(self, node_type_id):
+ return self.node_types_table[node_type_id]['dynamics_params']
+
+
+def types_dynamics_params(self, node):
+ return node['dynamics_params']
+
+
+def group_dynamics_params(self, node):
+ return node.dynamics_params
+
+
+class EdgePropertyMap(object):
+ def __init__(self, graph, source_population, target_population):
+ self._graph = graph
+ self._source_population = source_population
+ self._target_population = target_population
+
+ self.batch = True
+ self.synpatic_models = []
+
+
+ def synaptic_model(self, edge):
+ return edge['model_template']
+
+
+ def synpatic_params(self, edge):
+ params_dict = {'weight': self.syn_weight(edge), 'delay': edge['delay']}
+ params_dict.update(edge['dynamics_params'])
+ return params_dict
+
+ @classmethod
+ def build_map(cls, edge_group, biograph):
+ prop_map = cls(biograph, edge_group.parent.source_population, edge_group.parent.source_population)
+ if 'model_template' in edge_group.columns:
+ prop_map.batch = False
+ elif 'model_template' in edge_group.all_columns:
+ edge_types_table = edge_group.parent.edge_types_table
+ syn_models = set(edge_types_table[etid]['model_template']
+ for etid in np.unique(edge_types_table.edge_type_ids))
+ prop_map.synpatic_models = list(syn_models)
+ else:
+ prop_map.synpatic_models = ['static_synapse']
+ #s = [edge_types_table[ntid]['model_template'] for ntid in np.unique(edge_types_table.node_type_ids)
+ # if edge_types_table[ntid]['model_template'] is not None]
+
+
+ # For fetching/calculating synaptic weights
+ edge_types_weight_fncs = set()
+ edge_types_table = edge_group.parent.edge_types_table
+ for etid in edge_types_table.edge_type_ids:
+ weight_fnc = edge_types_table[etid].get('weight_function', None)
+ if weight_fnc is not None:
+ edge_types_weight_fncs.add(weight_fnc)
+
+ if 'weight_function' in edge_group.group_columns or edge_types_weight_fncs:
+ # Customized function for user to calculate the synaptic weight
+ prop_map.syn_weight = types.MethodType(weight_function, prop_map)
+
+ elif 'syn_weight' in edge_group.all_columns:
+ # Just return the synaptic weight
+ prop_map.syn_weight = types.MethodType(syn_weight, prop_map)
+ else:
+ io.log_exception('Could not find syn_weight or weight_function properties. Cannot create connections.')
+
+ # For determining the synapse placement
+ if 'nsyns' in edge_group.all_columns:
+ prop_map.nsyns = types.MethodType(nsyns, prop_map)
+ else:
+ # It will get here for connections onto point neurons
+ prop_map.nsyns = types.MethodType(no_syns, prop_map)
+
+ # For target sections
+ '''
+ if 'syn_weight' not in edge_group.all_columns:
+ io.log_exception('Edges {} missing syn_weight property for connections.'.format(edge_group.parent.name))
+ else:
+ prop_map.syn_weight = types.MethodType(syn_weight, prop_map)
+
+
+
+ if 'syn_weight' in edge_group.columns:
+ prop_map.weight = types.MethodType(syn_weight, prop_map)
+ prop_map.preselected_targets = True
+ prop_map.nsyns = types.MethodType(no_nsyns, prop_map)
+ else:
+ prop_map.preselected_targets = False
+ '''
+ return prop_map
+
+
+def syn_weight(self, edge):
+ return edge['syn_weight']*self.nsyns(edge)
+
+
+def weight_function(self, edge):
+ weight_fnc_name = edge['weight_function']
+ src_node = self._graph.get_node(self._source_population, edge.source_node_id)
+ trg_node = self._graph.get_node(self._target_population, edge.target_node_id)
+
+ if weight_fnc_name is None:
+ weight_fnc = py_modules.synaptic_weight('default_weight_fnc')
+ return weight_fnc(edge, src_node, trg_node)# *self.nsyns(edge)
+
+ elif py_modules.has_synaptic_weight(weight_fnc_name):
+ weight_fnc = py_modules.synaptic_weight(weight_fnc_name)
+ return weight_fnc(edge, src_node, trg_node)
+
+ else:
+ io.log_exception('weight_function {} is not defined.'.format(weight_fnc_name))
+
+
+def nsyns(self, edge):
+ return edge['nsyns']
+
+
+def no_syns(self, edge):
+ return 1
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/pyfunction_cache.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/pyfunction_cache.py
new file mode 100644
index 0000000..9e50616
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/pyfunction_cache.py
@@ -0,0 +1,246 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import types
+from functools import wraps
+
+
+class _PyFunctions(object):
+ """Structure for holding custom user-defined python functions.
+
+ Will store a set of functions created by the user. Should not access this directly but rather user the
+ decorators or setter functions, and use the py_modules class variable to access individual functions. Is divided
+ up into
+ synaptic_weight: functions for calcuating synaptic weight.
+ cell_model: should return NEURON cell hobj.
+ synapse model: should return a NEURON synapse object.
+ """
+ def __init__(self):
+ self.__syn_weights = {}
+ self.__cell_models = {}
+ self.__synapse_models = {}
+ self.__cell_processors = {}
+
+ def clear(self):
+ self.__syn_weights.clear()
+ self.__cell_models.clear()
+ self.__synapse_models.clear()
+ self.__cell_processors.clear()
+
+ def add_synaptic_weight(self, name, func, overwrite=True):
+ """stores synpatic fuction for given name"""
+ if overwrite or name not in self.__syn_weights:
+ self.__syn_weights[name] = func
+
+ @property
+ def synaptic_weight(self):
+ """return list of the names of all available synaptic weight functions"""
+ return self.__syn_weights.keys()
+
+ def synaptic_weight(self, name):
+ """return the synpatic weight function"""
+ return self.__syn_weights[name]
+
+ def has_synaptic_weight(self, name):
+ return name in self.__syn_weights
+
+ def __cell_model_key(self, directive, model_type):
+ return (directive, model_type)
+
+ def add_cell_model(self, directive, model_type, func, overwrite=True):
+ key = self.__cell_model_key(directive, model_type)
+ if overwrite or key not in self.__cell_models:
+ self.__cell_models[key] = func
+
+ @property
+ def cell_models(self):
+ return self.__cell_models.keys()
+
+ def cell_model(self, directive, model_type):
+ return self.__cell_models[self.__cell_model_key(directive, model_type)]
+
+ def has_cell_model(self, directive, model_type):
+ return self.__cell_model_key(directive, model_type) in self.__cell_models
+
+ def add_synapse_model(self, name, func, overwrite=True):
+ if overwrite or name not in self.__synapse_models:
+ self.__synapse_models[name] = func
+
+ @property
+ def synapse_models(self):
+ return self.__synapse_models.keys()
+
+ def synapse_model(self, name):
+ return self.__synapse_models[name]
+
+
+ @property
+ def cell_processors(self):
+ return self.__cell_processors.keys()
+
+ def cell_processor(self, name):
+ return self.__cell_processors[name]
+
+ def add_cell_processor(self, name, func, overwrite=True):
+ if overwrite or name not in self.__syn_weights:
+ self.__cell_processors[name] = func
+
+ def __repr__(self):
+ rstr = '{}: {}\n'.format('cell_models', self.cell_models)
+ rstr += '{}: {}\n'.format('synapse_models', self.synapse_models)
+ rstr += '{}: {}'.format('synaptic_weights', self.synaptic_weights)
+ return rstr
+
+py_modules = _PyFunctions()
+
+
+def synaptic_weight(*wargs, **wkwargs):
+ """A decorator for registering a function as a synaptic weight function.
+ To use either
+ @synaptic_weight
+ def weight_function(): ...
+
+ or
+ @synaptic_weight(name='name_in_edge_types')
+ def weight_function(): ...
+
+ Once the decorator has been attached and imported the functions will automatically be added to py_modules.
+ """
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_synaptic_weight(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_synaptic_weight(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def cell_model(*wargs, **wkwargs):
+ """A decorator for registering NEURON cell loader functions."""
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_cell_model(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_cell_model(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def synapse_model(*wargs, **wkwargs):
+ """A decorator for registering NEURON synapse loader functions."""
+ if len(wargs) == 1 and callable(wargs[0]):
+ # for the case without decorator arguments, grab the function object in wargs and create a decorator
+ func = wargs[0]
+ py_modules.add_synapse_model(func.__name__, func) # add function assigned to its original name
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ else:
+ # for the case with decorator arguments
+ assert(all(k in ['name'] for k in wkwargs.keys()))
+
+ def decorator(func):
+ # store the function in py_modules but under the name given in the decorator arguments
+ py_modules.add_synapse_model(wkwargs['name'], func)
+
+ @wraps(func)
+ def func_wrapper(*args, **kwargs):
+ return func(*args, **kwargs)
+ return func_wrapper
+ return decorator
+
+
+def add_weight_function(func, name=None, overwrite=True):
+ assert(callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_synaptic_weight(func_name, func, overwrite)
+
+
+def add_cell_model(func, directive, model_type, overwrite=True):
+ assert(callable(func))
+ # func_name = name if name is not None else func.__name__
+ py_modules.add_cell_model(directive, model_type, func, overwrite)
+
+
+def add_cell_processor(func, name=None, overwrite=True):
+ assert(callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_cell_processor(func_name, func, overwrite)
+
+
+def add_synapse_model(func, name=None, overwrite=True):
+ assert (callable(func))
+ func_name = name if name is not None else func.__name__
+ py_modules.add_synapse_model(func_name, func, overwrite)
+
+
+def load_py_modules(cell_models=None, syn_models=None, syn_weights=None):
+ # py_modules.clear()
+
+ if cell_models is not None:
+ assert(isinstance(cell_models, types.ModuleType))
+ for f in [cell_models.__dict__.get(f) for f in dir(cell_models)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_cell_model(f.__name__, f)
+
+ if syn_models is not None:
+ assert(isinstance(syn_models, types.ModuleType))
+ for f in [syn_models.__dict__.get(f) for f in dir(syn_models)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_synapse_model(f.__name__, f)
+
+ if syn_weights is not None:
+ assert(isinstance(syn_weights, types.ModuleType))
+ for f in [syn_weights.__dict__.get(f) for f in dir(syn_weights)]:
+ if isinstance(f, types.FunctionType):
+ py_modules.add_synaptic_weight(f.__name__, f)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/sonata_adaptors.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/sonata_adaptors.py
new file mode 100644
index 0000000..b528dba
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/sonata_adaptors.py
@@ -0,0 +1,295 @@
+import numpy as np
+from collections import Counter
+import numbers
+import nest
+import types
+import pandas as pd
+
+from bmtk.simulator.core.sonata_reader import NodeAdaptor, SonataBaseNode, EdgeAdaptor, SonataBaseEdge
+from bmtk.simulator.pointnet.io_tools import io
+from bmtk.simulator.pointnet.pyfunction_cache import py_modules
+
+
+def all_null(node_group, column_name):
+ """Helper function to determine if a column has any non-NULL values"""
+ types_table = node_group.parent.types_table
+ non_null_vals = [types_table[ntid][column_name] for ntid in np.unique(node_group.node_type_ids)
+ if types_table[ntid][column_name] is not None]
+ return len(non_null_vals) == 0
+
+
+class PointNodeBatched(object):
+ def __init__(self, node_ids, gids, node_types_table, node_type_id):
+ self._n_nodes = len(node_ids)
+ self._node_ids = node_ids
+ self._gids = gids
+ self._nt_table = node_types_table
+ self._nt_id = node_type_id
+ self._nest_ids = []
+
+ @property
+ def n_nodes(self):
+ return self._n_nodes
+
+ @property
+ def node_ids(self):
+ return self._node_ids
+
+ @property
+ def gids(self):
+ return self._gids
+
+ @property
+ def nest_ids(self):
+ return self._nest_ids
+
+ @property
+ def nest_model(self):
+ return self._nt_table[self._nt_id]['model_template'].split(':')[1]
+
+ @property
+ def nest_params(self):
+ return self._nt_table[self._nt_id]['dynamics_params']
+
+ @property
+ def model_type(self):
+ return self._nt_table[self._nt_id]['model_type']
+
+ def build(self):
+ self._nest_ids = nest.Create(self.nest_model, self.n_nodes, self.nest_params)
+
+
+class PointNode(SonataBaseNode):
+ def __init__(self, node, prop_adaptor):
+ super(PointNode, self).__init__(node, prop_adaptor)
+ self._nest_ids = []
+
+ @property
+ def n_nodes(self):
+ return 1
+
+ @property
+ def node_ids(self):
+ return [self._prop_adaptor.node_id(self._node)]
+
+ @property
+ def gids(self):
+ return [self._prop_adaptor.gid(self._node)]
+
+ @property
+ def nest_ids(self):
+ return self._nest_ids
+
+ @property
+ def nest_model(self):
+ return self._prop_adaptor.model_template(self._node)[1]
+
+ @property
+ def nest_params(self):
+ return self.dynamics_params
+
+ def build(self):
+ nest_model = self.nest_model
+ dynamics_params = self.dynamics_params
+ fnc_name = self._node['model_processing']
+ if fnc_name is None:
+ self._nest_ids = nest.Create(nest_model, 1, dynamics_params)
+ else:
+ cell_fnc = py_modules.cell_processor(fnc_name)
+ self._nest_ids = cell_fnc(nest_model, self._node, dynamics_params)
+
+
+class PointNodeAdaptor(NodeAdaptor):
+ def __init__(self, network):
+ super(PointNodeAdaptor, self).__init__(network)
+
+ # Flag for determining if we can build multiple NEST nodes at once. If each individual node has unique
+ # NEST params or a model_processing function is being called then we must nest.Create for each individual cell.
+ # Otherwise we can try to call nest.Create for a batch of nodes that share the same properties
+ self._can_batch = True
+
+ @property
+ def batch_process(self):
+ return self._can_batch
+
+ @batch_process.setter
+ def batch_process(self, flag):
+ self._can_batch = flag
+
+ def get_node(self, sonata_node):
+ return PointNode(sonata_node, self)
+
+ def get_batches(self, node_group):
+ node_ids = node_group.node_ids
+ node_type_ids = node_group.node_type_ids
+ node_gids = node_group.gids
+ if node_gids is None:
+ node_gids = node_ids
+
+ ntids_counter = Counter(node_type_ids)
+
+ nid_groups = {nt_id: np.zeros(ntids_counter[nt_id], dtype=np.uint32) for nt_id in ntids_counter}
+ gid_groups = {nt_id: np.zeros(ntids_counter[nt_id], dtype=np.uint32) for nt_id in ntids_counter}
+ node_groups_counter = {nt_id: 0 for nt_id in ntids_counter}
+
+ for node_id, gid, node_type_id in zip(node_ids, node_gids, node_type_ids):
+ grp_indx = node_groups_counter[node_type_id]
+ nid_groups[node_type_id][grp_indx] = node_id
+ gid_groups[node_type_id][grp_indx] = gid
+ node_groups_counter[node_type_id] += 1
+
+ return [PointNodeBatched(nid_groups[nt_id], gid_groups[nt_id], node_group.parent.node_types_table, nt_id)
+ for nt_id in ntids_counter]
+
+ @staticmethod
+ def patch_adaptor(adaptor, node_group, network):
+ node_adaptor = NodeAdaptor.patch_adaptor(adaptor, node_group, network)
+
+ # If dynamics params is stored in the nodes.h5 then we have to build each node separate
+ if node_group.has_dynamics_params:
+ node_adaptor.batch_process = False
+
+ # If there is a non-null value in the model_processing column then it potentially means that every cell is
+ # uniquly built (currently model_processing is applied to each individ. cell) and nodes can't be batched
+ if 'model_processing' in node_group.columns:
+ node_adaptor.batch_process = False
+ elif 'model_processing' in node_group.all_columns and not all_null(node_group, 'model_processing'):
+ node_adaptor.batch_process = False
+
+ if node_adaptor.batch_process:
+ io.log_info('Batch processing nodes for {}/{}.'.format(node_group.parent.name, node_group.group_id))
+
+ return node_adaptor
+
+
+class PointEdge(SonataBaseEdge):
+ @property
+ def source_node_ids(self):
+ return [self._edge.source_node_id]
+
+ @property
+ def target_node_ids(self):
+ return [self._edge.target_node_id]
+
+ @property
+ def nest_params(self):
+ if self.model_template in py_modules.synapse_models:
+ syn_model_fnc = py_modules.synapse_model(self.model_template)
+ else:
+ syn_model_fnc = py_modules.synapse_models('default')
+
+ return syn_model_fnc(self)
+
+
+class PointEdgeBatched(object):
+ def __init__(self, source_nids, target_nids, nest_params):
+ self._src_nids = source_nids
+ self._trg_nids = target_nids
+ self._nest_params = nest_params
+
+ @property
+ def source_node_ids(self):
+ return self._src_nids
+
+ @property
+ def target_node_ids(self):
+ return self._trg_nids
+
+ @property
+ def nest_params(self):
+ return self._nest_params
+
+
+class PointEdgeAdaptor(EdgeAdaptor):
+ def __init__(self, network):
+ super(PointEdgeAdaptor, self).__init__(network)
+ self._can_batch = True
+
+ @property
+ def batch_process(self):
+ return self._can_batch
+
+ @batch_process.setter
+ def batch_process(self, flag):
+ self._can_batch = flag
+
+ def synaptic_params(self, edge):
+ # TODO: THIS NEEDS to be replaced with call to synapse_models
+ params_dict = {'weight': self.syn_weight(edge, None, None), 'delay': edge.delay}
+ params_dict.update(edge.dynamics_params)
+ return params_dict
+
+ def get_edge(self, sonata_node):
+ return PointEdge(sonata_node, self)
+
+
+ def get_batches(self, edge_group):
+ src_ids = {}
+ trg_ids = {}
+ edge_types_table = edge_group.parent.edge_types_table
+
+ edge_type_ids = edge_group.node_type_ids()
+ et_id_counter = Counter(edge_type_ids)
+ tmp_df = pd.DataFrame({'etid': edge_type_ids, 'src_nids': edge_group.src_node_ids(),
+ 'trg_nids': edge_group.trg_node_ids()})
+
+ for et_id, grp_vals in tmp_df.groupby('etid'):
+ src_ids[et_id] = np.array(grp_vals['src_nids'])
+ trg_ids[et_id] = np.array(grp_vals['trg_nids'])
+
+ # selected_etids = np.unique(edge_type_ids)
+ type_params = {et_id: {} for et_id in et_id_counter.keys()}
+ for et_id, p_dict in type_params.items():
+ p_dict.update(edge_types_table[et_id]['dynamics_params'])
+ if 'model_template' in edge_types_table[et_id]:
+ p_dict['model'] = edge_types_table[et_id]['model_template']
+
+ if 'delay' in edge_group.columns:
+ raise NotImplementedError
+ elif 'delay' in edge_types_table.columns:
+ for et_id, p_dict in type_params.items():
+ p_dict['delay'] = edge_types_table[et_id]['delay']
+
+ scalar_syn_weight = 'syn_weight' not in edge_group.columns
+ scalar_nsyns = 'nsyns' not in edge_group.columns
+
+ if scalar_syn_weight and scalar_nsyns:
+ for et_id, p_dict in type_params.items():
+ et_dict = edge_types_table[et_id]
+ p_dict['weight'] = et_dict['nsyns']*et_dict['syn_weight']
+
+ else:
+ if not scalar_nsyns and not scalar_syn_weight:
+ tmp_df['nsyns'] = edge_group.get_dataset('nsyns')
+ tmp_df['syn_weight'] = edge_group.get_dataset('syn_weight')
+ for et_id, grp_vals in tmp_df.groupby('etid'):
+ type_params[et_id]['weight'] = np.array(grp_vals['nsyns'])*np.array(grp_vals['syn_weight'])
+
+ elif scalar_nsyns:
+ tmp_df['syn_weight'] = edge_group.get_dataset('syn_weight')
+ for et_id, grp_vals in tmp_df.groupby('etid'):
+ type_params[et_id]['weight'] = edge_types_table[et_id].get('nsyns', 1) * np.array(grp_vals['syn_weight'])
+
+ elif scalar_syn_weight:
+ tmp_df['nsyns'] = edge_group.get_dataset('nsyns')
+ for et_id, grp_vals in tmp_df.groupby('etid'):
+ type_params[et_id]['weight'] = np.array(grp_vals['nsyns']) * edge_types_table[et_id]['syn_weight']
+
+ batched_edges = []
+ for et_id in et_id_counter.keys():
+ batched_edges.append(PointEdgeBatched(src_ids[et_id], trg_ids[et_id], type_params[et_id]))
+
+ return batched_edges
+
+ @staticmethod
+ def patch_adaptor(adaptor, edge_group):
+ edge_adaptor = EdgeAdaptor.patch_adaptor(adaptor, edge_group)
+
+ if 'weight_function' not in edge_group.all_columns and 'syn_weight' in edge_group.all_columns:
+ adaptor.syn_weight = types.MethodType(point_syn_weight, adaptor)
+
+ return edge_adaptor
+
+
+def point_syn_weight(self, edge, src_node, trg_node):
+ return edge['syn_weight']*edge.nsyns
diff --git a/bmtk-vb/build/lib/bmtk/simulator/pointnet/utils.py b/bmtk-vb/build/lib/bmtk/simulator/pointnet/utils.py
new file mode 100644
index 0000000..d71716a
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/pointnet/utils.py
@@ -0,0 +1,188 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import h5py
+from collections import defaultdict
+import pandas as pd
+import numpy as np
+import six
+"""
+Most of these functions were collected from previous version of pointnet and are no longer tested and tested. However
+some functions may still be used by some people internally at AI for running their own simulations. I have marked all
+such functions as UNUSED.
+
+I will leave them alone for now but in the future they should be purged or updated.
+"""
+
+
+def read_LGN_activity(trial_num, file_name):
+ # UNUSED.
+ spike_train_dict = {}
+ f5 = h5py.File(file_name, 'r')
+ trial_group = f5['processing/trial_{}/spike_train'.format(trial_num)]
+ for cid in trial_group.keys():
+ spike_train_dict[int(cid)] = trial_group[cid]['data'][...]
+
+ return spike_train_dict
+
+
+def read_conns(file_name):
+ # UNUSED.
+ fc = h5py.File(file_name)
+ indptr = fc['indptr']
+ cell_size = len(indptr) - 1
+ print(cell_size)
+ conns = {}
+ source = fc['src_gids']
+ for xin in six.moves.range(cell_size):
+ conns[str(xin)] = list(source[indptr[xin]:indptr[xin+1]])
+
+ return conns
+
+
+def gen_recurrent_csv(num, offset, csv_file):
+ # UNUSED.
+ conn_data = np.loadtxt(csv_file)
+ target_ids = conn_data[:, 0]
+ source_ids = conn_data[:, 1]
+ weight_scale = conn_data[:, 2]
+
+ pre = []
+ cell_num = num
+ params = []
+ for xin in six.moves.range(cell_num):
+ pre.append(xin+offset)
+ ind = np.where(source_ids == xin)
+
+ temp_param = {}
+ targets = target_ids[ind] + offset
+ weights = weight_scale[ind]
+ delays = np.ones(len(ind[0]))*1.5
+ targets.astype(float)
+ weights.astype(float)
+ temp_param['target'] = targets
+ temp_param['weight'] = weights*1
+ temp_param['delay'] = delays
+ params.append(temp_param)
+
+ return pre, params
+
+
+def gen_recurrent_h5(num, offset, h5_file):
+ # UNUSED.
+ fc = h5py.File(h5_file)
+ indptr = fc['indptr']
+ cell_size = len(indptr) - 1
+ src_gids = fc['src_gids']
+ nsyns = fc['nsyns']
+ source_ids = []
+ weight_scale = []
+ target_ids = []
+ delay_v = 1.5 # arbitrary value
+
+ for xin in six.moves.range(cell_size):
+ target_ids.append(xin)
+ source_ids.append(list(src_gids[indptr[xin]:indptr[xin+1]]))
+ weight_scale.append(list(nsyns[indptr[xin]:indptr[xin+1]]))
+ targets = defaultdict(list)
+ weights = defaultdict(list)
+ delays = defaultdict(list)
+
+ for xi, xin in enumerate(target_ids):
+ for yi, yin in enumerate(source_ids[xi]):
+ targets[yin].append(xin)
+ weights[yin].append(weight_scale[xi][yi])
+ delays[yin].append(delay_v)
+
+ presynaptic = []
+ params = []
+ for xin in targets:
+ presynaptic.append(xin+offset)
+ temp_param = {}
+ temp_array = np.array(targets[xin])*1.0 + offset
+ temp_array.astype(float)
+ temp_param['target'] = temp_array
+ temp_array = np.array(weights[xin])
+ temp_array.astype(float)
+ temp_param['weight'] = temp_array
+ temp_array = np.array(delays[xin])
+ temp_array.astype(float)
+ temp_param['delay'] = temp_array
+ params.append(temp_param)
+
+ return presynaptic, params
+
+
+def load_params(node_name, model_name):
+ """
+ load information regarding nodes and cell_models from csv files
+
+ Parameters
+ ----------
+ node_name: json file name for node information
+ model_name: json file name for neuron model information
+
+ Returns
+ -------
+ node_info: 2d array of node info read out from the json file
+ mode_info: 2d array of model info read out from the json file
+ dict_coordinates: dictionary of coordinates. keyword is the node_id and entries are the x,y and z coordinates.
+ """
+ # UNUSED.
+ node = pd.read_csv(node_name, sep=' ', quotechar='"', quoting=0)
+ model = pd.read_csv(model_name, sep=' ', quotechar='"', quoting=0)
+ node_info = node.values
+ model_info = model.values
+ # In NEST, cells do not have intrinsic coordinates. So we have to make some virutial links between cells and
+ # coordinates
+ dict_coordinates = defaultdict(list)
+
+ for xin in six.moves.range(len(node_info)):
+ dict_coordinates[str(node_info[xin, 0])] = [node_info[xin, 2], node_info[xin, 3], node_info[xin, 4]]
+ return node_info, model_info, dict_coordinates
+
+
+def load_conns(cnn_fn):
+ """
+ load information regarding connectivity from csv files
+
+ Parameters
+ ----------
+ cnn_fn: json file name for connection information
+
+ Returns
+ -------
+ connection dictionary
+ """
+ # UNUSED.
+ conns = pd.read_csv(cnn_fn, sep=' ', quotechar='"', quoting=0)
+ targets = conns.target_label
+ sources = conns.source_label
+ weights = conns.weight
+ delays = conns.delay
+
+ conns_mapping = {}
+ for xin in six.moves.range(len(targets)):
+ keys = sources[xin] + '-' + targets[xin]
+ conns_mapping[keys] = [weights[xin], delays[xin]]
+
+ return conns_mapping
diff --git a/bmtk-vb/build/lib/bmtk/simulator/popnet/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/popnet/__init__.py
new file mode 100644
index 0000000..7b591ca
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/popnet/__init__.py
@@ -0,0 +1,25 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .popnetwork import PopNetwork
+from .popsimulator import PopSimulator
+from .config import Config
diff --git a/bmtk-vb/build/lib/bmtk/simulator/popnet/config.py b/bmtk-vb/build/lib/bmtk/simulator/popnet/config.py
new file mode 100644
index 0000000..567e5b6
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/popnet/config.py
@@ -0,0 +1,34 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# import bmtk.simulator.utils.config as msdk_config
+from bmtk.simulator.core.config import ConfigDict
+from bmtk.simulator.core.io_tools import io
+
+def from_json(config_file, validate=False):
+ conf_dict = ConfigDict.from_json(config_file)
+ conf_dict.io = io
+ return conf_dict
+
+
+class Config(ConfigDict):
+ pass
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/popnet/popedge.py b/bmtk-vb/build/lib/bmtk/simulator/popnet/popedge.py
new file mode 100644
index 0000000..1e4e98e
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/popnet/popedge.py
@@ -0,0 +1,82 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from bmtk.simulator.utils.graph import SimEdge
+
+
+class PopEdge(SimEdge):
+ def __init__(self, source_pop, target_pop, edge_params, dynamics_params):
+ super(PopEdge, self).__init__(edge_params, dynamics_params)
+ self.__source_pop = source_pop
+ self.__target_pop = target_pop
+ self._weight = self.__get_prop('weight', 0.0)
+ self._nsyns = self.__get_prop('nsyns', 0)
+ self._delay = self.__get_prop('delay', 0.0)
+
+ @property
+ def source(self):
+ return self.__source_pop
+
+ @property
+ def target(self):
+ return self.__target_pop
+
+ @property
+ def params(self):
+ return self._orig_params
+
+ @property
+ def weight(self):
+ return self._weight
+
+ @weight.setter
+ def weight(self, value):
+ self._weight = value
+
+ @property
+ def nsyns(self):
+ return self._nsyns
+
+ @nsyns.setter
+ def nsyns(self, value):
+ self._nsyns = value
+
+ @property
+ def delay(self):
+ return self._delay
+
+ @delay.setter
+ def delay(self, value):
+ self._delay = value
+
+ def __get_prop(self, name, default=None):
+ if name in self._orig_params:
+ return self._orig_params[name]
+ elif name in self._dynamics_params:
+ return self._dynamics_params[name]
+ else:
+ return default
+
+ def __repr__(self):
+ relevant_params = "weight: {}, delay: {}, nsyns: {}".format(self.weight, self.delay, self.nsyns)
+ rstr = "{} --> {} {{{}}}".format(self.source.pop_id, self.target.pop_id, relevant_params)
+ return rstr
diff --git a/bmtk-vb/build/lib/bmtk/simulator/popnet/popnetwork.py b/bmtk-vb/build/lib/bmtk/simulator/popnet/popnetwork.py
new file mode 100644
index 0000000..46b7928
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/popnet/popnetwork.py
@@ -0,0 +1,695 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+import numpy as np
+
+from bmtk.simulator.core.simulator_network import SimNetwork
+#from bmtk.simulator.core.graph import SimGraph
+#from property_schemas import PopTypes, DefaultPropertySchema
+#from popnode import InternalNode, ExternalPopulation
+#from popedge import PopEdge
+from bmtk.simulator.popnet import utils as poputils
+from bmtk.simulator.popnet.sonata_adaptors import PopEdgeAdaptor
+
+from dipde.internals.internalpopulation import InternalPopulation
+from dipde.internals.externalpopulation import ExternalPopulation
+from dipde.internals.connection import Connection
+
+'''
+class PopNode(object):
+ def __init__(self, node, property_map, graph):
+ self._node = node
+ self._property_map = property_map
+ self._graph = graph
+
+ @property
+ def dynamics_params(self):
+ # TODO: Use propert map
+ return self._node['dynamics_params']
+
+ @property
+ def node_id(self):
+ # TODO: Use property map
+ return self._node.node_id
+'''
+
+
+class Population(object):
+ def __init__(self, pop_id):
+ self._pop_id = pop_id
+ self._nodes = []
+ self._params = None
+
+ self._dipde_obj = None
+
+ def add_node(self, pnode):
+ self._nodes.append(pnode)
+ if self._params is None and pnode.dynamics_params is not None:
+ self._params = pnode.dynamics_params.copy()
+
+ @property
+ def pop_id(self):
+ return self._pop_id
+
+ @property
+ def dipde_obj(self):
+ return self._dipde_obj
+
+ @property
+ def record(self):
+ return True
+
+ def build(self):
+ params = self._nodes[0].dynamics_params
+ self._dipde_obj = InternalPopulation(**params)
+
+ def get_gids(self):
+ for node in self._nodes:
+ yield node.node_id
+
+ def __getitem__(self, item):
+ return self._params[item]
+
+ def __setitem__(self, key, value):
+ self._params[key] = value
+
+ def __repr__(self):
+ return str(self._pop_id)
+
+
+class ExtPopulation(Population):
+ def __init__(self, pop_id):
+ super(ExtPopulation, self).__init__(pop_id)
+ self._firing_rate = None
+
+ @property
+ def record(self):
+ return False
+
+ @property
+ def firing_rate(self):
+ return self._firing_rate
+
+ @firing_rate.setter
+ def firing_rate(self, value):
+ self.build(value)
+
+ def build(self, firing_rate):
+ if firing_rate is not None:
+ self._firing_rate = firing_rate
+
+ self._dipde_obj = ExternalPopulation(firing_rate)
+
+
+class PopEdge(object):
+ def __init__(self, edge, property_map, graph):
+ self._edge = edge
+ self._prop_map = property_map
+ self._graph = graph
+
+ @property
+ def nsyns(self):
+ # TODO: Use property map
+ return self._edge['nsyns']
+
+ @property
+ def delay(self):
+ return self._edge['delay']
+
+ @property
+ def weight(self):
+ return self._edge['syn_weight']
+
+
+class PopConnection(object):
+ def __init__(self, src_pop, trg_pop):
+ self._src_pop = src_pop
+ self._trg_pop = trg_pop
+ self._edges = []
+
+ self._dipde_conn = None
+
+ def add_edge(self, edge):
+ self._edges.append(edge)
+
+ def build(self):
+ edge = self._edges[0]
+ self._dipde_conn = Connection(self._src_pop._dipde_obj, self._trg_pop._dipde_obj, edge.nsyns, edge.delay,
+ edge.syn_weight)
+
+ @property
+ def dipde_obj(self):
+ return self._dipde_conn
+
+
+class PopNetwork(SimNetwork):
+ def __init__(self, group_by='node_type_id', **properties):
+ super(PopNetwork, self).__init__()
+
+ self.__all_edges = []
+ self._group_key = group_by
+ self._gid_table = {}
+ self._edges = {}
+ self._target_edges = {}
+ self._source_edges = {}
+
+ self._params_cache = {}
+ #self._params_column = property_schema.get_params_column()
+ self._dipde_pops = {}
+ self._external_pop = {}
+ self._all_populations = []
+ # self._loaded_external_pops = {}
+
+ self._nodeid2pop_map = {}
+
+ self._connections = {}
+ self._external_connections = {}
+ self._all_connections = []
+
+ @property
+ def populations(self):
+ return self._all_populations
+
+ @property
+ def connections(self):
+ return self._all_connections
+
+ @property
+ def internal_populations(self):
+ return self._dipde_pops.values()
+
+ def _register_adaptors(self):
+ super(PopNetwork, self)._register_adaptors()
+ self._edge_adaptors['sonata'] = PopEdgeAdaptor
+
+ def build_nodes(self):
+ if self._group_key == 'node_id' or self._group_key is None:
+ self._build_nodes()
+ else:
+ self._build_nodes_grouped()
+
+ def _build_nodes(self):
+ for node_pop in self.node_populations:
+ if node_pop.internal_nodes_only:
+ nid2pop_map = {}
+ for node in node_pop.get_nodes():
+ #pnode = PopNode(node, prop_maps[node.group_id], self)
+ pop = Population(node.node_id)
+ pop.add_node(node)
+ pop.build()
+
+ self._dipde_pops[node.node_id] = pop
+ self._all_populations.append(pop)
+ nid2pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[node_pop.name] = nid2pop_map
+
+ """
+ for node_pop in self._internal_populations_map.values():
+ prop_maps = self._node_property_maps[node_pop.name]
+ nid2pop_map = {}
+ for node in node_pop:
+ pnode = PopNode(node, prop_maps[node.group_id], self)
+ pop = Population(node.node_id)
+ pop.add_node(pnode)
+ pop.build()
+
+ self._dipde_pops[node.node_id] = pop
+ self._all_populations.append(pop)
+ nid2pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[node_pop.name] = nid2pop_map
+ """
+
+ def _build_nodes_grouped(self):
+ # Organize every single sonata-node into a given population.
+ for node_pop in self.node_populations:
+ nid2pop_map = {}
+ if node_pop.internal_nodes_only:
+ for node in node_pop.get_nodes():
+ pop_key = node[self._group_key]
+ if pop_key not in self._dipde_pops:
+ pop = Population(pop_key)
+ self._dipde_pops[pop_key] = pop
+ self._all_populations.append(pop)
+
+ pop = self._dipde_pops[pop_key]
+ pop.add_node(node)
+ nid2pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[node_pop.name] = nid2pop_map
+
+ for dpop in self._dipde_pops.values():
+ dpop.build()
+
+ """
+ for node_pop in self._internal_populations_map.values():
+ prop_maps = self._node_property_maps[node_pop.name]
+ nid2pop_map = {}
+ for node in node_pop:
+ pop_key = node[self._group_key]
+ pnode = PopNode(node, prop_maps[node.group_id], self)
+ if pop_key not in self._dipde_pops:
+ pop = Population(pop_key)
+ self._dipde_pops[pop_key] = pop
+ self._all_populations.append(pop)
+
+ pop = self._dipde_pops[pop_key]
+ pop.add_node(pnode)
+ nid2pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[node_pop.name] = nid2pop_map
+
+ for dpop in self._dipde_pops.values():
+ dpop.build()
+ """
+
+ def build_recurrent_edges(self):
+ recurrent_edge_pops = [ep for ep in self._edge_populations if not ep.virtual_connections]
+
+ for edge_pop in recurrent_edge_pops:
+ if edge_pop.recurrent_connections:
+ src_pop_maps = self._nodeid2pop_map[edge_pop.source_nodes]
+ trg_pop_maps = self._nodeid2pop_map[edge_pop.target_nodes]
+ for edge in edge_pop.get_edges():
+ src_pop = src_pop_maps[edge.source_node_id]
+ trg_pop = trg_pop_maps[edge.target_node_id]
+ conn_key = (src_pop, trg_pop)
+ if conn_key not in self._connections:
+ conn = PopConnection(src_pop, trg_pop)
+ self._connections[conn_key] = conn
+ self._all_connections.append(conn)
+
+ self._connections[conn_key].add_edge(edge)
+
+ elif edge_pop.mixed_connections:
+ raise NotImplementedError()
+
+ for conn in self._connections.values():
+ conn.build()
+
+ """
+ recurrent_edges = [edge_pop for _, edge_list in self._recurrent_edges.items() for edge_pop in edge_list]
+ for edge_pop in recurrent_edges:
+ prop_maps = self._edge_property_maps[edge_pop.name]
+ src_pop_maps = self._nodeid2pop_map[edge_pop.source_population]
+ trg_pop_maps = self._nodeid2pop_map[edge_pop.target_population]
+ for edge in edge_pop:
+ src_pop = src_pop_maps[edge.source_node_id]
+ trg_pop = trg_pop_maps[edge.target_node_id]
+ conn_key = (src_pop, trg_pop)
+ if conn_key not in self._connections:
+ conn = PopConnection(src_pop, trg_pop)
+ self._connections[conn_key] = conn
+ self._all_connections.append(conn)
+
+ pop_edge = PopEdge(edge, prop_maps[edge.group_id], self)
+ self._connections[conn_key].add_edge(pop_edge)
+
+ for conn in self._connections.values():
+ conn.build()
+ # print len(self._connections)
+ """
+
+ def find_edges(self, source_nodes=None, target_nodes=None):
+ # TODO: Move to parent
+ selected_edges = self._edge_populations[:]
+
+ if source_nodes is not None:
+ selected_edges = [edge_pop for edge_pop in selected_edges if edge_pop.source_nodes == source_nodes]
+
+ if target_nodes is not None:
+ selected_edges = [edge_pop for edge_pop in selected_edges if edge_pop.target_nodes == target_nodes]
+
+ return selected_edges
+
+ def add_spike_trains(self, spike_trains, node_set):
+ # Build external node populations
+ src_nodes = [node_pop for node_pop in self.node_populations if node_pop.name in node_set.population_names()]
+ for node_pop in src_nodes:
+ pop_name = node_pop.name
+ if node_pop.name not in self._external_pop:
+ external_pop_map = {}
+ src_pop_map = {}
+ for node in node_pop.get_nodes():
+ pop_key = node[self._group_key]
+ if pop_key not in external_pop_map:
+ pop = ExtPopulation(pop_key)
+ external_pop_map[pop_key] = pop
+ self._all_populations.append(pop)
+
+ pop = external_pop_map[pop_key]
+ pop.add_node(node)
+ src_pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[pop_name] = src_pop_map
+
+ firing_rates = poputils.get_firing_rates(external_pop_map.values(), spike_trains)
+ self._external_pop[pop_name] = external_pop_map
+ for dpop in external_pop_map.values():
+ dpop.build(firing_rates[dpop.pop_id])
+
+ else:
+ # TODO: Throw error spike trains should only be called once per source population
+ # external_pop_map = self._external_pop[pop_name]
+ src_pop_map = self._nodeid2pop_map[pop_name]
+
+ unbuilt_connections = []
+ for source_reader in src_nodes:
+ for edge_pop in self.find_edges(source_nodes=source_reader.name):
+ trg_pop_map = self._nodeid2pop_map[edge_pop.target_nodes]
+ for edge in edge_pop.get_edges():
+ src_pop = src_pop_map[edge.source_node_id]
+ trg_pop = trg_pop_map[edge.target_node_id]
+ conn_key = (src_pop, trg_pop)
+ if conn_key not in self._external_connections:
+ pconn = PopConnection(src_pop, trg_pop)
+ self._external_connections[conn_key] = pconn
+ unbuilt_connections.append(pconn)
+ self._all_connections.append(pconn)
+
+ #pop_edge = PopEdge(edge, prop_maps[edge.group_id], self)
+ self._external_connections[conn_key].add_edge(edge)
+
+ for pedge in unbuilt_connections:
+ pedge.build()
+ #exit()
+
+ """
+ print node_pop.name
+
+
+ exit()
+ if node_pop.name in self._virtual_ids_map:
+ continue
+
+ virt_node_map = {}
+ if node_pop.virtual_nodes_only:
+ print 'HERE'
+ exit()
+
+
+ for pop_name, node_pop in self._virtual_populations_map.items():
+ if pop_name not in spike_trains.populations:
+ continue
+
+ # Build external population if it already hasn't been built
+ if pop_name not in self._external_pop:
+ prop_maps = self._node_property_maps[pop_name]
+ external_pop_map = {}
+ src_pop_map = {}
+ for node in node_pop:
+ pop_key = node[self._group_key]
+ pnode = PopNode(node, prop_maps[node.group_id], self)
+ if pop_key not in external_pop_map:
+ pop = ExtPopulation(pop_key)
+ external_pop_map[pop_key] = pop
+ self._all_populations.append(pop)
+
+ pop = external_pop_map[pop_key]
+ pop.add_node(pnode)
+ src_pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[pop_name] = src_pop_map
+
+ firing_rates = poputils.get_firing_rates(external_pop_map.values(), spike_trains)
+ self._external_pop[pop_name] = external_pop_map
+ for dpop in external_pop_map.values():
+ dpop.build(firing_rates[dpop.pop_id])
+
+ else:
+ # TODO: Throw error spike trains should only be called once per source population
+ # external_pop_map = self._external_pop[pop_name]
+ src_pop_map = self._nodeid2pop_map[pop_name]
+
+ unbuilt_connections = []
+ for node_pop in self._internal_populations_map.values():
+ trg_pop_map = self._nodeid2pop_map[node_pop.name]
+ for edge_pop in self.external_edge_populations(src_pop=pop_name, trg_pop=node_pop.name):
+ for edge in edge_pop:
+ src_pop = src_pop_map[edge.source_node_id]
+ trg_pop = trg_pop_map[edge.target_node_id]
+ conn_key = (src_pop, trg_pop)
+ if conn_key not in self._external_connections:
+ pconn = PopConnection(src_pop, trg_pop)
+ self._external_connections[conn_key] = pconn
+ unbuilt_connections.append(pconn)
+ self._all_connections.append(pconn)
+
+ pop_edge = PopEdge(edge, prop_maps[edge.group_id], self)
+ self._external_connections[conn_key].add_edge(pop_edge)
+
+ for pedge in unbuilt_connections:
+ pedge.build()
+ """
+
+
+ def add_rates(self, rates, node_set):
+ if self._group_key == 'node_id':
+ id_lookup = lambda n: n.node_id
+ else:
+ id_lookup = lambda n: n[self._group_key]
+
+ src_nodes = [node_pop for node_pop in self.node_populations if node_pop.name in node_set.population_names()]
+ for node_pop in src_nodes:
+ pop_name = node_pop.name
+ if node_pop.name not in self._external_pop:
+ external_pop_map = {}
+ src_pop_map = {}
+ for node in node_pop.get_nodes():
+ pop_key = id_lookup(node)
+ if pop_key not in external_pop_map:
+ pop = ExtPopulation(pop_key)
+ external_pop_map[pop_key] = pop
+ self._all_populations.append(pop)
+
+ pop = external_pop_map[pop_key]
+ pop.add_node(node)
+ src_pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[pop_name] = src_pop_map
+
+ self._external_pop[pop_name] = external_pop_map
+ for dpop in external_pop_map.values():
+ firing_rates = rates.get_rate(dpop.pop_id)
+ dpop.build(firing_rates)
+
+ else:
+ # TODO: Throw error spike trains should only be called once per source population
+ # external_pop_map = self._external_pop[pop_name]
+ src_pop_map = self._nodeid2pop_map[pop_name]
+
+ unbuilt_connections = []
+ for source_reader in src_nodes:
+ for edge_pop in self.find_edges(source_nodes=source_reader.name):
+ trg_pop_map = self._nodeid2pop_map[edge_pop.target_nodes]
+ for edge in edge_pop.get_edges():
+ src_pop = src_pop_map[edge.source_node_id]
+ trg_pop = trg_pop_map[edge.target_node_id]
+ conn_key = (src_pop, trg_pop)
+ if conn_key not in self._external_connections:
+ pconn = PopConnection(src_pop, trg_pop)
+ self._external_connections[conn_key] = pconn
+ unbuilt_connections.append(pconn)
+ self._all_connections.append(pconn)
+
+ #pop_edge = PopEdge(edge, prop_maps[edge.group_id], self)
+ self._external_connections[conn_key].add_edge(edge)
+
+ for pedge in unbuilt_connections:
+ pedge.build()
+
+ """
+ for pop_name, node_pop in self._virtual_populations_map.items():
+ if pop_name not in rates.populations:
+ continue
+
+ # Build external population if it already hasn't been built
+ if pop_name not in self._external_pop:
+ prop_maps = self._node_property_maps[pop_name]
+ external_pop_map = {}
+ src_pop_map = {}
+ for node in node_pop:
+ pop_key = id_lookup(node)
+ #pop_key = node[self._group_key]
+ pnode = PopNode(node, prop_maps[node.group_id], self)
+ if pop_key not in external_pop_map:
+ pop = ExtPopulation(pop_key)
+ external_pop_map[pop_key] = pop
+ self._all_populations.append(pop)
+
+ pop = external_pop_map[pop_key]
+ pop.add_node(pnode)
+ src_pop_map[node.node_id] = pop
+
+ self._nodeid2pop_map[pop_name] = src_pop_map
+
+ firing_rate = rates.get_rate(pop_key)
+ self._external_pop[pop_name] = external_pop_map
+ for dpop in external_pop_map.values():
+ dpop.build(firing_rate)
+
+ else:
+ # TODO: Throw error spike trains should only be called once per source population
+ # external_pop_map = self._external_pop[pop_name]
+ src_pop_map = self._nodeid2pop_map[pop_name]
+ """
+
+ '''
+ def _add_node(self, node, network):
+ pops = self._networks[network]
+ pop_key = node[self._group_key]
+ if pop_key in pops:
+ pop = pops[pop_key]
+ pop.add_gid(node.gid)
+ self._gid_table[network][node.gid] = pop
+ else:
+ model_class = self.property_schema.get_pop_type(node)
+ if model_class == PopTypes.Internal:
+ pop = InternalNode(pop_key, self, network, node)
+ pop.add_gid(node.gid)
+ pop.model_params = self.__get_params(node)
+ self._add_internal_node(pop, network)
+
+ elif model_class == PopTypes.External:
+ # TODO: See if we can get firing rate from dynamics_params
+ pop = ExternalPopulation(pop_key, self, network, node)
+ pop.add_gid(node.gid)
+ self._add_external_node(pop, network)
+
+ else:
+ raise Exception('Unknown model type')
+
+ if network not in self._gid_table:
+ self._gid_table[network] = {}
+ self._gid_table[network][node.gid] = pop
+ '''
+
+ def __get_params(self, node_params):
+ if node_params.with_dynamics_params:
+ return node_params['dynamics_params']
+
+ params_file = node_params[self._params_column]
+ if params_file in self._params_cache:
+ return self._params_cache[params_file]
+ else:
+ params_dir = self.get_component('models_dir')
+ params_path = os.path.join(params_dir, params_file)
+ params_dict = json.load(open(params_path, 'r'))
+ self._params_cache[params_file] = params_dict
+ return params_dict
+
+ def _preprocess_node_types(self, node_population):
+ node_type_ids = np.unique(node_population.type_ids)
+ # TODO: Verify all the node_type_ids are in the table
+ node_types_table = node_population.types_table
+
+ if 'dynamics_params' in node_types_table.columns and 'model_type' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ dynamics_params = node_type['dynamics_params']
+ model_type = node_type['model_type']
+
+ if model_type == 'biophysical':
+ params_dir = self.get_component('biophysical_neuron_models_dir')
+ elif model_type == 'point_process':
+ params_dir = self.get_component('point_neuron_models_dir')
+ elif model_type == 'point_soma':
+ params_dir = self.get_component('point_neuron_models_dir')
+ elif model_type == 'population':
+ params_dir = self.get_component('population_models_dir')
+ else:
+ # Not sure what to do in this case, throw Exception?
+ params_dir = self.get_component('custom_neuron_models')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ node_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ self.io.log_exception('Could not find node dynamics_params file {}.'.format(params_path))
+
+
+ '''
+ def add_edges(self, edges, target_network=None, source_network=None):
+ # super(PopGraph, self).add_edges(edges)
+
+ target_network = target_network if target_network is not None else edges.target_network
+ if target_network not in self._target_edges:
+ self._target_edges[target_network] = []
+
+ source_network = source_network if source_network is not None else edges.source_network
+ if source_network not in self._source_edges:
+ self._source_edges[source_network] = []
+
+ target_pops = self.get_populations(target_network)
+ source_pops = self.get_populations(source_network)
+ source_gid_table = self._gid_table[source_network]
+
+ for target_pop in target_pops:
+ for target_gid in target_pop.get_gids():
+ for edge in edges.edges_itr(target_gid):
+ source_pop = source_gid_table[edge.source_gid]
+ self._add_edge(source_pop, target_pop, edge)
+ '''
+
+ def _add_edge(self, source_pop, target_pop, edge):
+ src_id = source_pop.node_id
+ trg_id = target_pop.node_id
+ edge_type_id = edge['edge_type_id']
+ edge_key = (src_id, source_pop.network, trg_id, target_pop.network, edge_type_id)
+
+ if edge_key in self._edges:
+ return
+ else:
+ # TODO: implement dynamics params
+ dynamics_params = self._get_edge_params(edge)
+ pop_edge = PopEdge(source_pop, target_pop, edge, dynamics_params)
+ self._edges[edge_key] = pop_edge
+ self._source_edges[source_pop.network].append(pop_edge)
+ self._target_edges[target_pop.network].append(pop_edge)
+
+ def get_edges(self, source_network):
+ return self._source_edges[source_network]
+
+ def edges_table(self, target_network, source_network):
+ return self._edges_table[(target_network, source_network)]
+
+ def get_populations(self, network):
+ return super(PopNetwork, self).get_nodes(network)
+
+ def get_population(self, node_set, gid):
+ return self._nodeid2pop_map[node_set][gid]
+
+ def rebuild(self):
+ for _, ns in self._nodeid2pop_map.items():
+ for _, pop in ns.items():
+ pop.build()
+
+ for pc in self._all_connections:
+ pc.build()
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/popnet/popnetwork_OLD.py b/bmtk-vb/build/lib/bmtk/simulator/popnet/popnetwork_OLD.py
new file mode 100644
index 0000000..cfdeddb
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/popnet/popnetwork_OLD.py
@@ -0,0 +1,327 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import logging
+
+from dipde.internals.internalpopulation import InternalPopulation
+from dipde.internals.externalpopulation import ExternalPopulation
+from dipde.internals.connection import Connection
+import dipde
+
+import bmtk.simulator.popnet.config as cfg
+import bmtk.simulator.popnet.utils as poputils
+
+
+class PopNetwork (object):
+ def __init__(self, graph):
+ self._graph = graph
+
+ self._duration = 0.0
+ self._dt = 0.0001
+ self._rates_file = None # name of file where the output is saved
+
+ self.__population_list = [] # list of all populations, internal and external
+ self.__population_table = {graph: {} for graph in self._graph.networks} # population lookup by [network][id]
+ self.__connection_list = [] # list of all connections
+ self._dipde_network = None # reference to dipde.Network object
+
+ # diction of rates for every external network/pop_id. Prepopulate dictionary with populations whose rates
+ # have already been manually set, otherwise they should use one of the add_rates_* function.
+ self._rates = {network: {pop.pop_id: pop.firing_rate for pop in self._graph.get_populations(network)
+ if not pop.is_internal and pop.is_firing_rate_set}
+ for network in self._graph.networks}
+
+ """
+ for network in self._graph.networks:
+ for pop in self._graph.get_populations(network):
+
+ if pop.is_internal:
+ dipde_pop = self.__create_internal_pop(pop)
+
+ else:
+ if pop.is_firing_rate_set:
+ rates = pop.firing_rate
+ """
+
+ @property
+ def duration(self):
+ return self._duration
+
+ @duration.setter
+ def duration(self, value):
+ self._duration = value
+
+ @property
+ def dt(self):
+ return self._dt
+
+ @dt.setter
+ def dt(self, value):
+ self._dt = value
+
+ @property
+ def rates_file(self):
+ return self._rates_file
+
+ @rates_file.setter
+ def rates_file(self, value):
+ self._rates_file = value
+
+ @property
+ def populations(self):
+ return self.__population_list
+
+ @property
+ def connections(self):
+ return self.__connection_list
+
+ def add_rates_nwb(self, network, nwb_file, trial, force=False):
+ """Creates external population firing rates from an NWB file.
+
+ Will iterate through a processing trial of an NWB file by assigning gids the population it belongs too and
+ taking the average firing rate.
+
+ This should be done before calling build_cells(). If a population has already been assigned a firing rate an
+ error will occur unless force=True.
+
+ :param network: Name of network with external populations.
+ :param nwb_file: NWB file with spike rates.
+ :param trial: trial id in NWB file
+ :param force: will overwrite existing firing rates
+ """
+ existing_rates = self._rates[network] # TODO: validate network exists
+ # Get all unset, external populations in a network.
+ network_pops = self._graph.get_populations(network)
+ selected_pops = []
+ for pop in network_pops:
+ if pop.is_internal:
+ continue
+ elif not force and pop.pop_id in existing_rates:
+ print('Firing rate for {}/{} has already been set, skipping.'.format(network, pop.pop_id))
+ else:
+ selected_pops.append(pop)
+
+ if selected_pops:
+ # assign firing rates from NWB file
+ # TODO:
+ rates_dict = poputils.get_firing_rate_from_nwb(selected_pops, nwb_file, trial)
+ self._rates[network].update(rates_dict)
+
+ def add_rate_hz(self, network, pop_id, rate, force=False):
+ """Set the firing rate of an external population.
+
+ This should be done before calling build_cells(). If a population has already been assigned a firing rate an
+ error will occur unless force=True.
+
+ :param network: name of network with wanted exteranl population
+ :param pop_id: name/id of external population
+ :param rate: firing rate in Hz.
+ :param force: will overwrite existing firing rates
+ """
+ self.__add_rates_validator(network, pop_id, force)
+ self._rates[network][pop_id] = rate
+
+ def __add_rates_validator(self, network, pop_id, force):
+ if network not in self._graph.networks:
+ raise Exception('No network {} found in PopGraph.'.format(network))
+
+ pop = self._graph.get_population(network, pop_id)
+ if pop is None:
+ raise Exception('No population with id {} found in {}.'.format(pop_id, network))
+ if pop.is_internal:
+ raise Exception('Population {} in {} is not an external population.'.format(pop_id, network))
+ if not force and pop_id in self._rates[network]:
+ raise Exception('The firing rate for {}/{} already set and force=False.'.format(network, pop_id))
+
+ def _get_rate(self, network, pop):
+ """Gets the firing rate for a given population"""
+ return self._rates[network][pop.pop_id]
+
+ def build_populations(self):
+ """Build dipde Population objects from graph nodes.
+
+ To calculate external populations firing rates, it first see if a population's firing rate has been manually
+ set in the graph. Otherwise it attempts to calulate the firing rate from the call to add_rate_hz, add_rates_NWB,
+ etc. (which should be called first).
+ """
+ for network in self._graph.networks:
+ for pop in self._graph.get_populations(network):
+ if pop.is_internal:
+ dipde_pop = self.__create_internal_pop(pop)
+
+ else:
+ dipde_pop = self.__create_external_pop(pop, self._get_rate(network, pop))
+
+ self.__population_list.append(dipde_pop)
+ self.__population_table[network][pop.pop_id] = dipde_pop
+
+ def set_logging(self, log_file):
+ # TODO: move this out of the function, put in io class
+ if os.path.exists(log_file):
+ os.remove(log_file)
+
+ # get root logger
+ logger = logging.getLogger()
+ for h in list(logger.handlers):
+ # remove existing handlers that will write to console.
+ logger.removeHandler(h)
+
+ # creates handler that write to log_file
+ logging.basicConfig(filename=log_file, filemode='w', level=logging.DEBUG)
+
+ def set_external_connections(self, network_name):
+ """Sets the external connections for populations in a given network.
+
+ :param network_name: name of external network with External Populations to connect to internal pops.
+ """
+ for edge in self._graph.get_edges(network_name):
+ # Get source and target populations
+ src = edge.source
+ source_pop = self.__population_table[src.network][src.pop_id]
+ trg = edge.target
+ target_pop = self.__population_table[trg.network][trg.pop_id]
+
+ # build a connection.
+ self.__connection_list.append(self.__create_connection(source_pop, target_pop, edge))
+
+ def set_recurrent_connections(self):
+ """Initialize internal connections."""
+ for network in self._graph.internal_networks():
+ for edge in self._graph.get_edges(network):
+ src = edge.source
+ source_pop = self.__population_table[src.network][src.pop_id]
+ trg = edge.target
+ target_pop = self.__population_table[trg.network][trg.pop_id]
+ self.__connection_list.append(self.__create_connection(source_pop, target_pop, edge))
+
+ def run(self, duration=None):
+ # TODO: Check if cells/connections need to be rebuilt.
+
+ # Create the networ
+ self._dipde_network = dipde.Network(population_list=self.populations, connection_list=self.__connection_list)
+
+ if duration is None:
+ duration = self.duration
+
+ print("running simulation...")
+ self._dipde_network.run(t0=0.0, tf=duration, dt=self.dt)
+ # TODO: make record_rates optional?
+ self.__record_rates()
+ print("done simulation.")
+
+ def __create_internal_pop(self, params):
+ # TODO: use getter methods directly in case arguments are not stored in dynamics params
+ # pop = InternalPopulation(**params.dynamics_params)
+ pop = InternalPopulation(**params.model_params)
+ return pop
+
+ def __create_external_pop(self, params, rates):
+ pop = ExternalPopulation(rates, record=False)
+ return pop
+
+ def __create_connection(self, source, target, params):
+ return Connection(source, target, nsyn=params.nsyns, delays=params.delay, weights=params.weight)
+
+ def __record_rates(self):
+ with open(self._rates_file, 'w') as f:
+ # TODO: store internal populations separately, unless there is a reason to save external populations
+ # (there isn't and it will be problematic)
+ for network, pop_list in self.__population_table.items():
+ for pop_id, pop in pop_list.items():
+ if pop.record:
+ for time, rate in zip(pop.t_record, pop.firing_rate_record):
+ f.write('{} {} {}\n'.format(pop_id, time, rate))
+
+ @classmethod
+ def from_config(cls, configure, graph):
+ # load the json file or object
+ if isinstance(configure, basestring):
+ config = cfg.from_json(configure, validate=True)
+ elif isinstance(configure, dict):
+ config = configure
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(configure, type(configure)))
+ network = cls(graph)
+
+ if 'run' not in config:
+ raise Exception('Json file is missing "run" entry. Unable to build Bionetwork.')
+ run_dict = config['run']
+
+ # Create the output file
+ if 'output' in config:
+ out_dict = config['output']
+
+ rates_file = out_dict.get('rates_file', None)
+ if rates_file is not None:
+ # create directory if required
+ network.rates_file = rates_file
+ parent_dir = os.path.dirname(rates_file)
+ if not os.path.exists(parent_dir):
+ os.makedirs(parent_dir)
+
+ if 'log_file' in out_dict:
+ log_file = out_dict['log_file']
+ network.set_logging(log_file)
+
+ # get network parameters
+ if 'duration' in run_dict:
+ network.duration = run_dict['duration']
+
+ if 'dt' in run_dict:
+ network.dt = run_dict['dt']
+
+ # TODO: need to get firing rates before building populations
+ if 'input' in config:
+ for netinput in config['input']:
+ if netinput['type'] == 'external_spikes' and netinput['format'] == 'nwb' and netinput['active']:
+ # Load external network spike trains from an NWB file.
+ print('Setting firing rates for {} from {}.'.format(netinput['source_nodes'], netinput['file']))
+ network.add_rates_nwb(netinput['source_nodes'], netinput['file'], netinput['trial'])
+
+ if netinput['type'] == 'pop_rate':
+ print('Setting {}/{} to fire at {} Hz.'.format(netinput['source_nodes'], netinput['pop_id'], netinput['rate']))
+ network.add_rate_hz(netinput['source_nodes'], netinput['pop_id'], netinput['rate'])
+
+ # TODO: take input as function with Population argument
+
+ # Build populations
+ print('Building Populations')
+ network.build_populations()
+
+ # Build recurrent connections
+ if run_dict['connect_internal']:
+ print('Building recurrention connections')
+ network.set_recurrent_connections()
+
+ # Build external connections. Set connection to default True and turn off only if explicitly stated.
+ # NOTE: It might be better to set to default off?!?! Need to dicuss what would be more intuitive for the users.
+ # TODO: ignore case of network name
+ external_network_settings = {name: True for name in graph.external_networks()}
+ if 'connect_external' in run_dict:
+ external_network_settings.update(run_dict['connect_external'])
+ for netname, connect in external_network_settings.items():
+ if connect:
+ print('Setting external connections for {}'.format(netname))
+ network.set_external_connections(netname)
+
+ return network
diff --git a/bmtk-vb/build/lib/bmtk/simulator/popnet/popnode.py b/bmtk-vb/build/lib/bmtk/simulator/popnet/popnode.py
new file mode 100644
index 0000000..6288762
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/popnet/popnode.py
@@ -0,0 +1,158 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from bmtk.simulator.utils.graph import SimNode
+
+class PopNode(SimNode):
+ def __init__(self, node_id, graph, network, params):
+ self._graph = graph
+ self._node_id = node_id
+ self._network = network
+ self._graph_params = params
+
+ self._dynamics_params = {}
+ self._updated_params = {'dynamics_params': self._dynamics_params}
+
+ self._gids = set()
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def pop_id(self):
+ return self._node_id
+
+ @property
+ def network(self):
+ return self._network
+
+ @property
+ def dynamics_params(self):
+ return self._dynamics_params
+
+ @dynamics_params.setter
+ def dynamics_params(self, value):
+ self._dynamics_params = value
+
+ @property
+ def is_internal(self):
+ return False
+
+ def __getitem__(self, item):
+ if item in self._updated_params:
+ return self._updated_params[item]
+ elif item in self._graph_params:
+ return self._graph_params[item]
+ elif self._model_params is not None:
+ return self._model_params[item]
+
+ def add_gid(self, gid):
+ self._gids.add(gid)
+
+ def get_gids(self):
+ return list(self._gids)
+
+
+class InternalNode(PopNode):
+ """
+ def __init__(self, node_id, graph, network, params):
+ super(InternalNode, self).__init__(node_id, graph, network, params)
+ #self._pop_id = node_id
+ #self._graph = graph
+ #self._network = network
+ #self._graph_params = params
+ #self._dynamics_params = {}
+ #self._update_params = {'dynamics_params': self._dynamics_params}
+ """
+ @property
+ def tau_m(self):
+ return self['tau_m']
+ #return self._dynamics_params.get('tau_m', None)
+
+ @tau_m.setter
+ def tau_m(self, value):
+ #return self['tau_m']
+ self._dynamics_params['tau_m'] = value
+
+ @property
+ def v_max(self):
+ return self._dynamics_params.get('v_max', None)
+
+ @v_max.setter
+ def v_max(self, value):
+ self._dynamics_params['v_max'] = value
+
+ @property
+ def dv(self):
+ return self._dynamics_params.get('dv', None)
+
+ @dv.setter
+ def dv(self, value):
+ self._dynamics_params['dv'] = value
+
+ @property
+ def v_min(self):
+ return self._dynamics_params.get('v_min', None)
+
+ @v_min.setter
+ def v_min(self, value):
+ self._dynamics_params['v_min'] = value
+
+ @property
+ def is_internal(self):
+ return True
+
+ def __repr__(self):
+ props = 'pop_id={}, tau_m={}, v_max={}, v_min={}, dv={}'.format(self.pop_id, self.tau_m, self.v_max, self.v_min,
+ self.dv)
+ return 'InternalPopulation({})'.format(props)
+
+
+class ExternalPopulation(PopNode):
+ def __init__(self, node_id, graph, network, params):
+ super(ExternalPopulation, self).__init__(node_id, graph, network, params)
+ self._firing_rate = -1
+ if 'firing_rate' in params:
+ self._firing_rate = params['firing_rate']
+
+ @property
+ def firing_rate(self):
+ return self._firing_rate
+
+ @property
+ def is_firing_rate_set(self):
+ return self._firing_rate >= 0
+
+ @firing_rate.setter
+ def firing_rate(self, rate):
+ assert(isinstance(rate, float) and rate >= 0)
+ self._firing_rate = rate
+
+ @property
+ def is_internal(self):
+ return False
+
+ def __repr__(self):
+ props = 'pop_id={}, firing_rate={}'.format(self.pop_id, self.firing_rate)
+ return 'ExternalPopulation({})'.format(props)
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/popnet/popsimulator.py b/bmtk-vb/build/lib/bmtk/simulator/popnet/popsimulator.py
new file mode 100644
index 0000000..38c660a
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/popnet/popsimulator.py
@@ -0,0 +1,451 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import logging
+from six import string_types
+
+from dipde.internals.internalpopulation import InternalPopulation
+from dipde.internals.externalpopulation import ExternalPopulation
+from dipde.internals.connection import Connection
+import dipde
+
+from bmtk.simulator.core.simulator import Simulator
+from . import config as cfg
+from . import utils as poputils
+import bmtk.simulator.utils.simulation_inputs as inputs
+from bmtk.utils.io import spike_trains, firing_rates
+
+
+class PopSimulator(Simulator):
+ def __init__(self, graph, dt=0.0001, tstop=0.0, overwrite=True):
+ self._graph = graph
+
+ self._tstop = tstop
+ self._dt = dt
+ self._rates_file = None # name of file where the output is saved
+
+ self.__population_list = [] # list of all populations, internal and external
+ #self.__population_table = {graph: {} for graph in self._graph.networks} # population lookup by [network][id]
+ self.__connection_list = [] # list of all connections
+ self._dipde_network = None # reference to dipde.Network object
+
+ # diction of rates for every external network/pop_id. Prepopulate dictionary with populations whose rates
+ # have already been manually set, otherwise they should use one of the add_rates_* function.
+ #self._rates = {network: {pop.pop_id: pop.firing_rate for pop in self._graph.get_populations(network)
+ # if not pop.is_internal and pop.is_firing_rate_set}
+ # for network in self._graph.networks}
+
+ """
+ for network in self._graph.networks:
+ for pop in self._graph.get_populations(network):
+
+ if pop.is_internal:
+ dipde_pop = self.__create_internal_pop(pop)
+
+ else:
+ if pop.is_firing_rate_set:
+ rates = pop.firing_rate
+ """
+
+ @property
+ def tstop(self):
+ return self._tstop
+
+ @tstop.setter
+ def tstop(self, value):
+ self._tstop = value
+
+ @property
+ def dt(self):
+ return self._dt
+
+ @dt.setter
+ def dt(self, value):
+ self._dt = value
+
+ @property
+ def rates_file(self):
+ return self._rates_file
+
+ @rates_file.setter
+ def rates_file(self, value):
+ self._rates_file = value
+
+ @property
+ def populations(self):
+ return self.__population_list
+
+ @property
+ def connections(self):
+ return self.__connection_list
+
+ def add_rates_nwb(self, network, nwb_file, trial, force=False):
+ """Creates external population firing rates from an NWB file.
+
+ Will iterate through a processing trial of an NWB file by assigning gids the population it belongs too and
+ taking the average firing rate.
+
+ This should be done before calling build_cells(). If a population has already been assigned a firing rate an
+ error will occur unless force=True.
+
+ :param network: Name of network with external populations.
+ :param nwb_file: NWB file with spike rates.
+ :param trial: trial id in NWB file
+ :param force: will overwrite existing firing rates
+ """
+ existing_rates = self._rates[network] # TODO: validate network exists
+ # Get all unset, external populations in a network.
+ network_pops = self._graph.get_populations(network)
+ selected_pops = []
+ for pop in network_pops:
+ if pop.is_internal:
+ continue
+ elif not force and pop.pop_id in existing_rates:
+ print('Firing rate for {}/{} has already been set, skipping.'.format(network, pop.pop_id))
+ else:
+ selected_pops.append(pop)
+
+ if selected_pops:
+ # assign firing rates from NWB file
+ # TODO:
+ rates_dict = poputils.get_firing_rate_from_nwb(selected_pops, nwb_file, trial)
+ self._rates[network].update(rates_dict)
+
+ def add_rate_hz(self, network, pop_id, rate, force=False):
+ """Set the firing rate of an external population.
+
+ This should be done before calling build_cells(). If a population has already been assigned a firing rate an
+ error will occur unless force=True.
+
+ :param network: name of network with wanted exteranl population
+ :param pop_id: name/id of external population
+ :param rate: firing rate in Hz.
+ :param force: will overwrite existing firing rates
+ """
+ self.__add_rates_validator(network, pop_id, force)
+ self._rates[network][pop_id] = rate
+
+ def __add_rates_validator(self, network, pop_id, force):
+ if network not in self._graph.networks:
+ raise Exception('No network {} found in PopGraph.'.format(network))
+
+ pop = self._graph.get_population(network, pop_id)
+ if pop is None:
+ raise Exception('No population with id {} found in {}.'.format(pop_id, network))
+ if pop.is_internal:
+ raise Exception('Population {} in {} is not an external population.'.format(pop_id, network))
+ if not force and pop_id in self._rates[network]:
+ raise Exception('The firing rate for {}/{} already set and force=False.'.format(network, pop_id))
+
+ def _get_rate(self, network, pop):
+ """Gets the firing rate for a given population"""
+ return self._rates[network][pop.pop_id]
+
+ def build_populations(self):
+ """Build dipde Population objects from graph nodes.
+
+ To calculate external populations firing rates, it first see if a population's firing rate has been manually
+ set in the graph. Otherwise it attempts to calulate the firing rate from the call to add_rate_hz, add_rates_NWB,
+ etc. (which should be called first).
+ """
+ for network in self._graph.networks:
+ for pop in self._graph.get_populations(network):
+ if pop.is_internal:
+ dipde_pop = self.__create_internal_pop(pop)
+
+ else:
+ dipde_pop = self.__create_external_pop(pop, self._get_rate(network, pop))
+
+ self.__population_list.append(dipde_pop)
+ self.__population_table[network][pop.pop_id] = dipde_pop
+
+ def set_logging(self, log_file):
+ # TODO: move this out of the function, put in io class
+ if os.path.exists(log_file):
+ os.remove(log_file)
+
+ # get root logger
+ logger = logging.getLogger()
+ for h in list(logger.handlers):
+ # remove existing handlers that will write to console.
+ logger.removeHandler(h)
+
+ # creates handler that write to log_file
+ logging.basicConfig(filename=log_file, filemode='w', level=logging.DEBUG)
+
+ def set_external_connections(self, network_name):
+ """Sets the external connections for populations in a given network.
+
+ :param network_name: name of external network with External Populations to connect to internal pops.
+ """
+ for edge in self._graph.get_edges(network_name):
+ # Get source and target populations
+ src = edge.source
+ source_pop = self.__population_table[src.network][src.pop_id]
+ trg = edge.target
+ target_pop = self.__population_table[trg.network][trg.pop_id]
+
+ # build a connection.
+ self.__connection_list.append(self.__create_connection(source_pop, target_pop, edge))
+
+ def set_recurrent_connections(self):
+ """Initialize internal connections."""
+ for network in self._graph.internal_networks():
+ for edge in self._graph.get_edges(network):
+ src = edge.source
+ source_pop = self.__population_table[src.network][src.pop_id]
+ trg = edge.target
+ target_pop = self.__population_table[trg.network][trg.pop_id]
+ self.__connection_list.append(self.__create_connection(source_pop, target_pop, edge))
+
+ def run(self, tstop=None):
+ # TODO: Check if cells/connections need to be rebuilt.
+
+ # Create the networ
+ dipde_pops = [p.dipde_obj for p in self._graph.populations]
+ dipde_conns = [c.dipde_obj for c in self._graph.connections]
+ #print dipde_pops
+ #print dipde_conns
+ #exit()
+
+ self._dipde_network = dipde.Network(population_list=dipde_pops, connection_list=dipde_conns)
+
+ #self._dipde_network = dipde.Network(population_list=self._graph.populations,
+ # connection_list=self._graph.connections)
+
+ if tstop is None:
+ tstop = self.tstop
+
+ #print tstop, self.dt
+ #print self._graph.populations
+ #exit()
+ print("running simulation...")
+ self._dipde_network.run(t0=0.0, tf=tstop, dt=self.dt)
+ # TODO: make record_rates optional?
+ self.__record_rates()
+ print("done simulation.")
+
+ def __create_internal_pop(self, params):
+ # TODO: use getter methods directly in case arguments are not stored in dynamics params
+ # pop = InternalPopulation(**params.dynamics_params)
+ pop = InternalPopulation(**params.model_params)
+ return pop
+
+ def __create_external_pop(self, params, rates):
+ pop = ExternalPopulation(rates, record=False)
+ return pop
+
+ def __create_connection(self, source, target, params):
+ return Connection(source, target, nsyn=params.nsyns, delays=params.delay, weights=params.weight)
+
+ def __record_rates(self):
+ with open(self._rates_file, 'w') as f:
+ for pop in self._graph.internal_populations:
+ if pop.record:
+ for time, rate in zip(pop.dipde_obj.t_record, pop.dipde_obj.firing_rate_record):
+ f.write('{} {} {}\n'.format(pop.pop_id, time, rate))
+
+ '''
+ @classmethod
+ def from_config(cls, configure, graph):
+ # load the json file or object
+ if isinstance(configure, basestring):
+ config = cfg.from_json(configure, validate=True)
+ elif isinstance(configure, dict):
+ config = configure
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(configure, type(configure)))
+ network = cls(graph)
+
+ if 'run' not in config:
+ raise Exception('Json file is missing "run" entry. Unable to build Bionetwork.')
+ run_dict = config['run']
+
+ # Create the output file
+ if 'output' in config:
+ out_dict = config['output']
+
+ rates_file = out_dict.get('rates_file', None)
+ if rates_file is not None:
+ # create directory if required
+ network.rates_file = rates_file
+ parent_dir = os.path.dirname(rates_file)
+ if not os.path.exists(parent_dir):
+ os.makedirs(parent_dir)
+
+ if 'log_file' in out_dict:
+ log_file = out_dict['log_file']
+ network.set_logging(log_file)
+
+ # get network parameters
+ if 'duration' in run_dict:
+ network.duration = run_dict['duration']
+
+ if 'dt' in run_dict:
+ network.dt = run_dict['dt']
+
+ # TODO: need to get firing rates before building populations
+ if 'input' in config:
+ for netinput in config['input']:
+ if netinput['type'] == 'external_spikes' and netinput['format'] == 'nwb' and netinput['active']:
+ # Load external network spike trains from an NWB file.
+ print('Setting firing rates for {} from {}.'.format(netinput['source_nodes'], netinput['file']))
+ network.add_rates_nwb(netinput['source_nodes'], netinput['file'], netinput['trial'])
+
+ if netinput['type'] == 'pop_rate':
+ print('Setting {}/{} to fire at {} Hz.'.format(netinput['source_nodes'], netinput['pop_id'], netinput['rate']))
+ network.add_rate_hz(netinput['source_nodes'], netinput['pop_id'], netinput['rate'])
+
+ # TODO: take input as function with Population argument
+
+ # Build populations
+ print('Building Populations')
+ network.build_populations()
+
+ # Build recurrent connections
+ if run_dict['connect_internal']:
+ print('Building recurrention connections')
+ network.set_recurrent_connections()
+
+ # Build external connections. Set connection to default True and turn off only if explicitly stated.
+ # NOTE: It might be better to set to default off?!?! Need to dicuss what would be more intuitive for the users.
+ # TODO: ignore case of network name
+ external_network_settings = {name: True for name in graph.external_networks()}
+ if 'connect_external' in run_dict:
+ external_network_settings.update(run_dict['connect_external'])
+ for netname, connect in external_network_settings.items():
+ if connect:
+ print('Setting external connections for {}'.format(netname))
+ network.set_external_connections(netname)
+
+ return network
+ '''
+
+ @classmethod
+ def from_config(cls, configure, graph):
+ # load the json file or object
+ if isinstance(configure, string_types):
+ config = cfg.from_json(configure, validate=True)
+ elif isinstance(configure, dict):
+ config = configure
+ else:
+ raise Exception('Could not convert {} (type "{}") to json.'.format(configure, type(configure)))
+
+ if 'run' not in config:
+ raise Exception('Json file is missing "run" entry. Unable to build Bionetwork.')
+ run_dict = config['run']
+
+ # Get network parameters
+ # step time (dt) is set in the kernel and should be passed
+ overwrite = run_dict['overwrite_output_dir'] if 'overwrite_output_dir' in run_dict else True
+ print_time = run_dict['print_time'] if 'print_time' in run_dict else False
+ dt = run_dict['dt'] # TODO: make sure dt exists
+ tstop = float(config.tstop) / 1000.0
+ network = cls(graph, dt=config.dt, tstop=tstop, overwrite=overwrite)
+
+ if 'output_dir' in config['output']:
+ network.output_dir = config['output']['output_dir']
+
+ # network.spikes_file = config['output']['spikes_ascii']
+
+ if 'block_run' in run_dict and run_dict['block_run']:
+ if 'block_size' not in run_dict:
+ raise Exception('"block_run" is set to True but "block_size" not found.')
+ network._block_size = run_dict['block_size']
+
+ if 'duration' in run_dict:
+ network.duration = run_dict['duration']
+
+ graph.io.log_info('Building cells.')
+ graph.build_nodes()
+
+ graph.io.log_info('Building recurrent connections')
+ graph.build_recurrent_edges()
+
+ for sim_input in inputs.from_config(config):
+ node_set = graph.get_node_set(sim_input.node_set)
+ if sim_input.input_type == 'spikes':
+ spikes = spike_trains.SpikesInput.load(name=sim_input.name, module=sim_input.module,
+ input_type=sim_input.input_type, params=sim_input.params)
+ graph.io.log_info('Build virtual cell stimulations for {}'.format(sim_input.name))
+ graph.add_spike_trains(spikes, node_set)
+ else:
+ graph.io.log_info('Build virtual cell stimulations for {}'.format(sim_input.name))
+ rates = firing_rates.RatesInput(sim_input.params)
+ graph.add_rates(rates, node_set)
+
+ # Create the output file
+ if 'output' in config:
+ out_dict = config['output']
+
+ rates_file = out_dict.get('rates_file', None)
+ if rates_file is not None:
+ rates_file = rates_file if os.path.isabs(rates_file) else os.path.join(config.output_dir, rates_file)
+ # create directory if required
+ network.rates_file = rates_file
+ parent_dir = os.path.dirname(rates_file)
+ if not os.path.exists(parent_dir):
+ os.makedirs(parent_dir)
+
+ if 'log_file' in out_dict:
+ log_file = out_dict['log_file']
+ network.set_logging(log_file)
+
+
+ # exit()
+
+
+ # build the cells
+ #io.log('Building cells')
+ #network.build_cells()
+
+ # Build internal connections
+ #if run_dict['connect_internal']:
+ # io.log('Creating recurrent connections')
+ # network.set_recurrent_connections()
+
+ # Build external connections. Set connection to default True and turn off only if explicitly stated.
+ # NOTE: It might be better to set to default off?!?! Need to dicuss what would be more intuitive for the users.
+ # TODO: ignore case of network name
+
+ '''
+ external_network_settings = {name: True for name in graph.external_networks()}
+ if 'connect_external' in run_dict:
+ external_network_settings.update(run_dict['connect_external'])
+ for netname, connect in external_network_settings.items():
+ if connect:
+ io.log('Setting external connections for {}'.format(netname))
+ network.set_external_connections(netname)
+
+ # Build inputs
+ if 'input' in config:
+ for netinput in config['input']:
+ if netinput['type'] == 'external_spikes' and netinput['format'] == 'nwb' and netinput['active']:
+ network.add_spikes_nwb(netinput['source_nodes'], netinput['file'], netinput['trial'])
+
+ io.log_info('Adding stimulations')
+ network.make_stims()
+ '''
+
+ graph.io.log_info('Network created.')
+ return network
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/popnet/property_schemas/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/popnet/property_schemas/__init__.py
new file mode 100644
index 0000000..4d7c64c
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/popnet/property_schemas/__init__.py
@@ -0,0 +1,28 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from base_schema import PopTypes
+import property_schema_ver0 as v0
+import property_schema_ver1 as v1
+
+DefaultPropertySchema = v1.PropertySchema()
+AIPropertySchema = v0.PropertySchema()
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/simulator/popnet/property_schemas/base_schema.py b/bmtk-vb/build/lib/bmtk/simulator/popnet/property_schemas/base_schema.py
new file mode 100644
index 0000000..cc880a6
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/popnet/property_schemas/base_schema.py
@@ -0,0 +1,50 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+class PopTypes:
+ """Essentially an enum to store the type/group of each cell. It's faster and more robust than doing multiple string
+ comparisons.
+ """
+ Internal = 0
+ External = 1
+ Other = 2 # should never really get here
+
+ @staticmethod
+ def len():
+ return 3
+
+
+class PropertySchema(object):
+ #######################################
+ # For nodes/cells properties
+ #######################################
+ def get_pop_type(self, pop_params):
+ model_type = pop_params['model_type'].lower()
+ if model_type == 'virtual' or model_type == 'external':
+ return PopTypes.External
+ elif model_type == 'internal':
+ return PopTypes.Internal
+ else:
+ return PopTypes.Unknown
+
+ def get_params_column(self):
+ raise NotImplementedError()
diff --git a/bmtk-vb/build/lib/bmtk/simulator/popnet/property_schemas/property_schema_ver0.py b/bmtk-vb/build/lib/bmtk/simulator/popnet/property_schemas/property_schema_ver0.py
new file mode 100644
index 0000000..6c5c542
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/popnet/property_schemas/property_schema_ver0.py
@@ -0,0 +1,28 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from base_schema import PopTypes, PropertySchema as BaseSchema
+
+
+class PropertySchema(BaseSchema):
+ def get_params_column(self):
+ return 'params_file'
diff --git a/bmtk-vb/build/lib/bmtk/simulator/popnet/property_schemas/property_schema_ver1.py b/bmtk-vb/build/lib/bmtk/simulator/popnet/property_schemas/property_schema_ver1.py
new file mode 100644
index 0000000..8794525
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/popnet/property_schemas/property_schema_ver1.py
@@ -0,0 +1,28 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from base_schema import PropertySchema as BaseSchema
+
+
+class PropertySchema(BaseSchema):
+ def get_params_column(self):
+ return 'dynamics_params'
diff --git a/bmtk-vb/build/lib/bmtk/simulator/popnet/sonata_adaptors.py b/bmtk-vb/build/lib/bmtk/simulator/popnet/sonata_adaptors.py
new file mode 100644
index 0000000..dcc1300
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/popnet/sonata_adaptors.py
@@ -0,0 +1,12 @@
+from bmtk.simulator.core.sonata_reader import NodeAdaptor, SonataBaseNode, EdgeAdaptor, SonataBaseEdge
+
+
+class PopNetEdge(SonataBaseEdge):
+ @property
+ def syn_weight(self):
+ return self._edge['syn_weight']
+
+
+class PopEdgeAdaptor(EdgeAdaptor):
+ def get_edge(self, sonata_edge):
+ return PopNetEdge(sonata_edge, self)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/popnet/utils.py b/bmtk-vb/build/lib/bmtk/simulator/popnet/utils.py
new file mode 100644
index 0000000..ceeeaa3
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/popnet/utils.py
@@ -0,0 +1,287 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import math
+import warnings
+import numpy as np
+import pandas as pd
+import scipy.interpolate as spinterp
+import collections
+import h5py
+import itertools
+import scipy.io as sio
+import json
+import importlib
+
+"""
+Most of these functions are not being used directly by popnet, but may still be used in some other capcity. These have
+been marked as depreciated, and should be removed soon.
+
+
+"""
+
+
+def get_firing_rate_from_nwb(populations, nwb_file, trial):
+ """Calculates firing rates for an external population"""
+ h5_file = h5py.File(nwb_file, 'r')
+ spike_trains_ds = h5_file['processing'][trial]['spike_train']
+
+ # TODO: look into adding a time window rather than searching for min/max t.
+ firing_rates = {}
+ for pop in populations:
+ spike_counts = []
+ spike_min_t = 1.0e30
+ spike_max_t = 0.0
+ for gid in pop.get_gids():
+ spike_train_ds = spike_trains_ds[str(gid)]['data']
+ if spike_train_ds is not None and len(spike_train_ds[...]) > 0:
+ spike_times = spike_train_ds[...]
+ tmp_min = min(spike_times)
+ spike_min_t = tmp_min if tmp_min < spike_min_t else spike_min_t
+ tmp_max = max(spike_times)
+ spike_max_t = tmp_max if tmp_max > spike_max_t else spike_max_t
+ spike_counts.append(len(spike_times))
+
+ # TODO make sure t_diffs is not null and spike_counts has some values
+ firing_rates[pop.pop_id] = 1.0e03 * np.mean(spike_counts) / (spike_max_t - spike_min_t)
+ return firing_rates
+
+
+def get_firing_rates(populations, spike_trains):
+ """Calculates firing rates for an external population"""
+ #h5_file = h5py.File(nwb_file, 'r')
+ #spike_trains_ds = h5_file['processing'][trial]['spike_train']
+
+ # TODO: look into adding a time window rather than searching for min/max t.
+ firing_rates = {}
+ for pop in populations:
+ spike_counts = []
+ spike_min_t = 1.0e30
+ spike_max_t = 0.0
+ for gid in pop.get_gids():
+ spike_times = spike_trains.get_spikes(gid)
+ if spike_times is not None and len(spike_times) > 0:
+ tmp_min = min(spike_times)
+ spike_min_t = tmp_min if tmp_min < spike_min_t else spike_min_t
+ tmp_max = max(spike_times)
+ spike_max_t = tmp_max if tmp_max > spike_max_t else spike_max_t
+ spike_counts.append(len(spike_times))
+
+ # TODO make sure t_diffs is not null and spike_counts has some values
+ firing_rates[pop.pop_id] = 1.0e03 * np.mean(spike_counts) / (spike_max_t - spike_min_t)
+ return firing_rates
+
+#############################################
+# Depreciated
+#############################################
+def list_of_dicts_to_dict_of_lists(list_of_dicts, default=None):
+ new_dict = {}
+ for curr_dict in list_of_dicts:
+ print(curr_dict.keys())
+
+
+#############################################
+# Depreciated
+#############################################
+class KeyDefaultDict(collections.defaultdict):
+ def __missing__(self, key):
+ if self.default_factory is None:
+ raise KeyError
+ else:
+ ret = self[key] = self.default_factory(key)
+ return ret
+
+
+#############################################
+# Depreciated
+#############################################
+def create_firing_rate_server(t, y):
+
+ warnings.warn('Hard coded bug fix for mindscope council 4/27/15')
+ t = t/.001/200
+ interpolation_callable = spinterp.interp1d(t, y, bounds_error=False, fill_value=0)
+ return lambda t: interpolation_callable(t)
+
+
+#############################################
+# Depreciated
+#############################################
+def create_nwb_server_file_path(nwb_file_name, nwb_path):
+ f = h5py.File(nwb_file_name, 'r')
+ y = f['%s/data' % nwb_path][:]
+ dt = f['%s/data' % nwb_path].dims[0][0].value
+ t = np.arange(len(y))*dt
+ f.close()
+ return create_firing_rate_server(t, y)
+
+
+#############################################
+# Depreciated
+#############################################
+def get_mesoscale_connectivity_dict():
+
+ # Extract data into a dictionary:
+ mesoscale_data_dir = '/data/mat/iSee_temp_shared/packages/mesoscale_connectivity'
+ nature_data = {}
+ for mat, side in itertools.product(['W', 'PValue'],['ipsi', 'contra']):
+ data, row_labels, col_labels = [sio.loadmat(os.path.join(mesoscale_data_dir, '%s_%s.mat' % (mat, side)))[key]
+ for key in ['data', 'row_labels', 'col_labels']]
+ for _, (row_label, row) in enumerate(zip(row_labels, data)):
+ for _, (col_label, val) in enumerate(zip(col_labels, row)):
+ nature_data[mat, side, str(row_label.strip()), str(col_label.strip())] = val
+
+ return nature_data
+
+
+#############################################
+# Depreciated
+#############################################
+def reorder_columns_in_frame(frame, var):
+ varlist = [w for w in frame.columns if w not in var]
+ return frame[var+varlist]
+
+
+#############################################
+# Depreciated
+#############################################
+def population_to_dict_for_dataframe(p):
+
+ black_list = ['firing_rate_record',
+ 'initial_firing_rate',
+ 'metadata',
+ 't_record']
+
+ json_list = ['p0', 'tau_m']
+
+ return_dict = {}
+ p_dict = p.to_dict()
+
+ for key, val in p_dict['metadata'].items():
+ return_dict[key] = val
+
+ for key, val in p_dict.items():
+ if key not in black_list:
+ if key in json_list:
+ val = json.dumps(val)
+ return_dict[key] = val
+
+ return return_dict
+
+
+#############################################
+# Depreciated
+#############################################
+def network_dict_to_target_adjacency_dict(network_dict):
+ print(network_dict)
+
+
+#############################################
+# Depreciated
+#############################################
+def population_list_to_dataframe(population_list):
+ df = pd.DataFrame({'_tmp': [None]})
+ for p in population_list:
+ model_dict = {'_tmp': [None]}
+ for key, val in population_to_dict_for_dataframe(p).items():
+ model_dict.setdefault(key, []).append(val)
+ df_tmp = pd.DataFrame(model_dict)
+
+ df = pd.merge(df, df_tmp, how='outer')
+ df.drop('_tmp', inplace=True, axis=1)
+ return df
+
+
+#############################################
+# Depreciated
+#############################################
+def df_to_csv(df, save_file_name, index=False, sep=' ', na_rep='None'):
+ df.to_csv(save_file_name, index=index, sep=sep, na_rep=na_rep)
+
+
+#############################################
+# Depreciated
+#############################################
+def population_list_to_csv(population_list, save_file_name):
+ df = population_list_to_dataframe(population_list)
+ df_to_csv(df, save_file_name)
+
+
+#############################################
+# Depreciated
+#############################################
+def create_instance(data_dict):
+ '''Helper function to create an object from a dictionary containing:
+
+ "module": The name of the module containing the class
+ "class": The name of the class to be used to create the object
+ '''
+
+ curr_module, curr_class = data_dict.pop('module'), data_dict.pop('class')
+ curr_instance = getattr(importlib.import_module(curr_module), curr_class)(**data_dict)
+
+ return curr_instance
+
+
+#############################################
+# Depreciated
+#############################################
+def assert_model_known(model, model_dict):
+ """Test if a model in in the model_dict; if not, raise UnknownModelError"""
+
+ try:
+ assert model in model_dict
+ except:
+ raise Exception('model {} does not exist.'.format(model))
+
+
+#############################################
+# Depreciated
+#############################################
+def create_population_list(node_table, model_table):
+ """Create a population list from the node and model pandas tables"""
+
+ model_dict = {}
+ for row in model_table.iterrows():
+ model = row[1].to_dict()
+ model_dict[model.pop('model')] = model
+
+ population_list = []
+ for row in node_table.iterrows():
+ node = row[1].to_dict()
+ model = node.pop('model')
+
+ # Check if model type in model dict:
+ assert_model_known(model, model_dict)
+
+ # Clean up:
+ curr_model = {}
+ for key, val in model_dict[model].items():
+ if not (isinstance(val, float) and math.isnan(val)):
+ curr_model[key] = val
+ curr_model.setdefault('metadata', {})['model'] = model
+
+ curr_module, curr_class = curr_model['module'], curr_model['class']
+ curr_instance = getattr(importlib.import_module(curr_module), curr_class)(**curr_model)
+ population_list.append(curr_instance)
+
+ return population_list
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/utils/__init__.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/config.py b/bmtk-vb/build/lib/bmtk/simulator/utils/config.py
new file mode 100644
index 0000000..aa5ee5e
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/config.py
@@ -0,0 +1,438 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+import re
+import copy
+import datetime
+from six import string_types
+
+
+from bmtk.simulator.core.io_tools import io
+
+
+def from_json(config_file, validator=None):
+ """Builds and validates a configuration json file.
+
+ :param config_file: File object or path to a json file.
+ :param validator: A SimConfigValidator object to validate json file. Won't validate if set to None
+ :return: A dictionary, verified against json validator and with manifest variables resolved.
+ """
+ #print(config_file)
+ #if os.path.isfile(config_file):
+ #if isinstance(config_file, file):
+ # conf = json.load(config_file)
+ if isinstance(config_file, string_types):
+ conf = json.load(open(config_file, 'r'))
+ elif isinstance(config_file, dict):
+ conf = config_file.copy()
+ else:
+ raise Exception('{} is not a file or file path.'.format(config_file))
+
+ # insert file path into dictionary
+ if 'config_path' not in conf:
+ conf['config_path'] = os.path.abspath(config_file)
+ conf['config_dir'] = os.path.dirname(conf['config_path'])
+
+ # Will resolve manifest variables and validate
+ return from_dict(conf, validator)
+
+
+def from_dict(config_dict, validator=None):
+ """Builds and validates a configuration json dictionary object. Best to directly use from_json when possible.
+
+ :param config_dict: Dictionary object
+ :param validator: A SimConfigValidator object to validate json file. Won't validate if set to None
+ :return: A dictionary, verified against json validator and with manifest variables resolved.
+ """
+ assert(isinstance(config_dict, dict))
+ conf = copy.deepcopy(config_dict) # Since the functions will mutate the dictionary we will copy just-in-case.
+
+ if 'config_path' not in conf:
+ conf['config_path'] = os.path.join(os.getcwd(), 'tmp_cfg.dict')
+ conf['config_dir'] = os.path.dirname(conf['config_path'])
+
+ # Build the manifest and resolve variables.
+ # TODO: Check that manifest exists
+ manifest = __build_manifest(conf)
+ conf['manifest'] = manifest
+ __recursive_insert(conf, manifest)
+
+ # In our work with Blue-Brain it was agreed that 'network' and 'simulator' parts of config may be split up into
+ # separate files. If this is the case we build each sub-file separately and merge into this one
+ for childconfig in ['network', 'simulation']:
+ if childconfig in conf and isinstance(conf[childconfig], string_types):
+ # Try to resolve the path of the network/simulation config files. If an absolute path isn't used find
+ # the file relative to the current config file. TODO: test if this will work on windows?
+ conf_str = conf[childconfig]
+ conf_path = conf_str if conf_str.startswith('/') else os.path.join(conf['config_dir'], conf_str)
+
+ # Build individual json file and merge into parent.
+ child_json = from_json(conf_path)
+ del child_json['config_path'] # we don't want 'config_path' of parent being overwritten.
+ conf.update(child_json)
+
+ # Run the validator
+ if validator is not None:
+ validator.validate(conf)
+
+ return conf
+
+
+def copy_config(conf):
+ """Copy configuration file to different directory, with manifest variables resolved.
+
+ :param conf: configuration dictionary
+ """
+ output_dir = conf.output_dir
+ config_name = os.path.basename(conf['config_path'])
+ output_path = os.path.join(output_dir, config_name)
+ with open(output_path, 'w') as fp:
+ out_cfg = conf.copy()
+ if 'manifest' in out_cfg:
+ del out_cfg['manifest']
+ json.dump(out_cfg, fp, indent=2)
+
+
+def __special_variables(conf):
+ """A list of preloaded variables to insert into the manifest, containing things like path to run-time directory,
+ configuration directory, etc.
+ """
+ pre_manifest = dict()
+ pre_manifest['$workingdir'] = os.path.dirname(os.getcwd())
+ if 'config_path' in conf:
+ pre_manifest['$configdir'] = os.path.dirname(conf['config_path']) # path of configuration file
+ pre_manifest['$configfname'] = conf['config_path']
+
+ dt_now = datetime.datetime.now()
+ pre_manifest['$time'] = dt_now.strftime('%H-%M-%S')
+ pre_manifest['$date'] = dt_now.strftime('%Y-%m-%d')
+ pre_manifest['$datetime'] = dt_now.strftime('%Y-%m-%d_%H-%M-%S')
+
+ return pre_manifest
+
+
+def __build_manifest(conf):
+ """Resolves the manifest section and resolve any internal variables"""
+ if 'manifest' not in conf:
+ return __special_variables(conf)
+
+ manifest = conf["manifest"]
+ resolved_manifest = __special_variables(conf)
+ resolved_keys = set()
+ unresolved_keys = set(manifest.keys())
+
+ # No longer using recursion since that can lead to an infinite loop if the person who writes the config file isn't
+ # careful. Also added code to allow for ${VAR} format in-case user wants to user "$.../some_${MODEl}_here/..."
+ while unresolved_keys:
+ for key in unresolved_keys:
+ # Find all variables in manifest and see if they can be replaced by the value in resolved_manifest
+ value = __find_variables(manifest[key], resolved_manifest)
+
+ # If value no longer has variables, and key-value pair to resolved_manifest and remove from unresolved-keys
+ if value.find('$') < 0:
+ resolved_manifest[key] = value
+ resolved_keys.add(key)
+
+ # remove resolved key-value pairs from set, and make sure at every iteration unresolved_keys shrinks to prevent
+ # infinite loops
+ n_unresolved = len(unresolved_keys)
+ unresolved_keys -= resolved_keys
+ if n_unresolved == len(unresolved_keys):
+ msg = "Unable to resolve manifest variables: {}".format(unresolved_keys)
+ raise Exception(msg)
+
+ return resolved_manifest
+
+
+def __recursive_insert(json_obj, manifest):
+ """Loop through the config and substitute the path variables (e.g.: $MY_DIR) with the values from the manifest
+
+ :param json_obj: A json dictionary object that may contain variables needing to be resolved.
+ :param manifest: A dictionary of variable values
+ :return: A new json dictionar config file with variables resolved
+ """
+ if isinstance(json_obj, string_types):
+ return __find_variables(json_obj, manifest)
+
+ elif isinstance(json_obj, list):
+ new_list = []
+ for itm in json_obj:
+ new_list.append(__recursive_insert(itm, manifest))
+ return new_list
+
+ elif isinstance(json_obj, dict):
+ for key, val in json_obj.items():
+ if key == 'manifest':
+ continue
+ json_obj[key] = __recursive_insert(val, manifest)
+
+ return json_obj
+
+ else:
+ return json_obj
+
+
+def __find_variables(json_str, manifest):
+ """Replaces variables (i.e. $VAR, ${VAR}) with their values from the manifest.
+
+ :param json_str: a json string that may contain none, one or multiple variable
+ :param manifest: dictionary of variable lookup values
+ :return: json_str with resolved variables. Won't resolve variables that don't exist in manifest.
+ """
+ variables = [m for m in re.finditer('\$\{?[\w]+\}?', json_str)]
+ for var in variables:
+ var_lookup = var.group()
+ if var_lookup.startswith('${') and var_lookup.endswith('}'):
+ # replace ${VAR} with $VAR
+ var_lookup = "$" + var_lookup[2:-1]
+ if var_lookup in manifest:
+ json_str = json_str.replace(var.group(), manifest[var_lookup])
+
+ return json_str
+
+
+class ConfigDict(dict):
+ def __init__(self, *args, **kwargs):
+ self.update(*args, **kwargs)
+ self._env_built = False
+ self._io = None
+
+ self._node_set = {}
+ self._load_node_set()
+
+ @property
+ def io(self):
+ if self._io is None:
+ self._io = io
+ return self._io
+
+ @io.setter
+ def io(self, io):
+ self._io = io
+
+ @property
+ def run(self):
+ return self['run']
+
+ @property
+ def tstart(self):
+ return self.run.get('tstart', 0.0)
+
+ @property
+ def tstop(self):
+ return self.run['tstop']
+
+ @property
+ def dt(self):
+ return self.run.get('dt', 0.1)
+
+ @property
+ def spike_threshold(self):
+ return self.run.get('spike_threshold', -15.0)
+
+ @property
+ def dL(self):
+ return self.run.get('dL', 20.0)
+
+ @property
+ def gid_mappings(self):
+ return self.get('gid_mapping_file', None)
+
+ @property
+ def block_step(self):
+ return self.run.get('nsteps_block', 5000)
+
+ @property
+ def calc_ecp(self):
+ return self.run.get('calc_ecp', False)
+
+ @property
+ def conditions(self):
+ return self['conditions']
+
+ @property
+ def celsius(self):
+ return self.conditions['celsius']
+
+ @property
+ def v_init(self):
+ return self.conditions['v_init']
+
+ @property
+ def path(self):
+ return self['config_path']
+
+ @property
+ def output(self):
+ return self['output']
+
+ @property
+ def output_dir(self):
+ return self.output['output_dir']
+
+ @property
+ def overwrite_output(self):
+ return self.output.get('overwrite_output_dir', False)
+
+ @property
+ def log_file(self):
+ return self.output['log_file']
+
+ @property
+ def components(self):
+ return self.get('components', {})
+
+ @property
+ def morphologies_dir(self):
+ return self.components['morphologies_dir']
+
+ @property
+ def synaptic_models_dir(self):
+ return self.components['synaptic_models_dir']
+
+ @property
+ def point_neuron_models_dir(self):
+ return self.components['point_neuron_models_dir']
+
+ @property
+ def mechanisms_dir(self):
+ return self.components['mechanisms_dir']
+
+ @property
+ def biophysical_neuron_models_dir(self):
+ return self.components['biophysical_neuron_models_dir']
+
+ @property
+ def templates_dir(self):
+ return self.components.get('templates_dir', None)
+
+ @property
+ def with_networks(self):
+ return 'networks' in self and len(self.nodes) > 0
+
+ @property
+ def networks(self):
+ return self['networks']
+
+ @property
+ def nodes(self):
+ return self.networks.get('nodes', [])
+
+ @property
+ def edges(self):
+ return self.networks.get('edges', [])
+
+ @property
+ def reports(self):
+ return self.get('reports', {})
+
+ @property
+ def inputs(self):
+ return self.get('inputs', {})
+
+ @property
+ def node_sets(self):
+ return self._node_set
+
+ @property
+ def spikes_file(self):
+ return os.path.join(self.output_dir, self.output['spikes_file'])
+
+ def _load_node_set(self):
+ if 'node_sets_file' in self.keys():
+ node_set_val = self['node_sets_file']
+ elif 'node_sets' in self.keys():
+ node_set_val = self['node_sets']
+ else:
+ self._node_set = {}
+ return
+
+ if isinstance(node_set_val, dict):
+ self._node_set = node_set_val
+ else:
+ try:
+ self._node_set = json.load(open(node_set_val, 'r'))
+ except Exception as e:
+ io.log_exception('Unable to load node_sets_file {}'.format(node_set_val))
+
+ def copy_to_output(self):
+ copy_config(self)
+
+ def get_modules(self, module_name):
+ return [report for report in self.reports.values() if report['module'] == module_name]
+
+ def _set_logging(self):
+ """Check if log-level and/or log-format string is being changed through the config"""
+ output_sec = self.output
+ if 'log_format' in output_sec:
+ self._io.set_log_format(output_sec['log_format'])
+
+ if 'log_level' in output_sec:
+ self._io.set_log_level(output_sec['log_level'])
+
+ if 'log_to_console' in output_sec:
+ self._io.log_to_console = output_sec['log_to_console']
+
+ if 'quiet_simulator' in output_sec and output_sec['quiet_simulator']:
+ self._io.quiet_simulator()
+
+ def build_env(self):
+ if self._env_built:
+ return
+
+ self._set_logging()
+ self.io.setup_output_dir(self.output_dir, self.log_file, self.overwrite_output)
+ self.copy_to_output()
+ self._env_built = True
+
+ @staticmethod
+ def get_validator():
+ raise NotImplementedError
+
+ @classmethod
+ def from_json(cls, config_file, validate=False):
+ validator = cls.get_validator() if validate else None
+ return cls(from_json(config_file, validator))
+
+ @classmethod
+ def from_dict(cls, config_dict, validate=False):
+ validator = cls.get_validator() if validate else None
+ return cls(from_dict(config_dict, validator))
+
+ @classmethod
+ def from_yaml(cls, config_file, validate=False):
+ raise NotImplementedError
+
+ @classmethod
+ def load(cls, config_file, validate=False):
+ # Implement factory method that can resolve the format/type of input configuration.
+ if isinstance(config_file, dict):
+ return cls.from_dict(config_file, validate)
+ elif isinstance(config_file, string_types):
+ if config_file.endswith('yml') or config_file.endswith('yaml'):
+ return cls.from_yaml(config_file, validate)
+ else:
+ return cls.from_json(config_file, validate)
+ else:
+ raise Exception
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/graph.py b/bmtk-vb/build/lib/bmtk/simulator/utils/graph.py
new file mode 100644
index 0000000..629ea1d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/graph.py
@@ -0,0 +1,408 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+import ast
+import numpy as np
+
+import config as cfg
+from property_maps import NodePropertyMap, EdgePropertyMap
+from bmtk.utils import sonata
+
+
+"""Creates a graph of nodes and edges from multiple network files for all simulators.
+
+Consists of edges and nodes. All classes are abstract and should be reimplemented by a specific simulator. Also
+contains base factor methods for building a network from a config file (or other).
+"""
+
+
+class SimEdge(object):
+ def __init__(self, original_params, dynamics_params):
+ self._orig_params = original_params
+ self._dynamics_params = dynamics_params
+ self._updated_params = {'dynamics_params': self._dynamics_params}
+
+ @property
+ def edge_type_id(self):
+ return self._orig_params['edge_type_id']
+
+ def __getitem__(self, item):
+ if item in self._updated_params:
+ return self._updated_params[item]
+ else:
+ return self._orig_params[item]
+
+
+class SimNode(object):
+ def __init__(self, node_id, graph, network, params):
+ self._node_id = node_id
+ self._graph = graph
+ self._graph_params = params
+ self._node_type_id = params['node_type_id']
+ self._network = network
+ self._updated_params = {}
+
+ self._model_params = {}
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def node_type_id(self):
+ return self._node_type_id
+
+ @property
+ def network(self):
+ """Name of network node belongs too."""
+ return self._network
+
+ @property
+ def model_params(self):
+ """Parameters (json file, nml, dictionary) that describe a specific node"""
+ return self._model_params
+
+ @model_params.setter
+ def model_params(self, value):
+ self._model_params = value
+
+ def __contains__(self, item):
+ return item in self._updated_params or item in self._graph_params
+
+ def __getitem__(self, item):
+ if item in self._updated_params:
+ return self._updated_params[item]
+ else:
+ return self._graph_params[item]
+
+
+class SimGraph(object):
+ model_type_col = 'model_type'
+
+ def __init__(self):
+ self._components = {} # components table, i.e. paths to model files.
+ self._io = None # TODO: create default io module (without mpi)
+
+ self._node_property_maps = {}
+ self._edge_property_maps = {}
+
+ self._node_populations = {}
+ self._internal_populations_map = {}
+ self._virtual_populations_map = {}
+
+ self._virtual_cells_nid = {}
+
+ self._recurrent_edges = {}
+ self._external_edges = {}
+
+ @property
+ def io(self):
+ return self._io
+
+ @property
+ def internal_pop_names(self):
+ return self
+
+ @property
+ def node_populations(self):
+ return list(self._node_populations.keys())
+
+ def get_component(self, key):
+ """Get the value of item in the components dictionary.
+
+ :param key: name of component
+ :return: value assigned to component
+ """
+ return self._components[key]
+
+ def add_component(self, key, value):
+ """Add a component key-value pair
+
+ :param key: name of component
+ :param value: value
+ """
+ self._components[key] = value
+
+ def _from_json(self, file_name):
+ return cfg.from_json(file_name)
+
+ def _validate_components(self):
+ """Make sure various components (i.e. paths) exists before attempting to build the graph."""
+ return True
+
+ def _create_nodes_prop_map(self, grp):
+ return NodePropertyMap()
+
+ def _create_edges_prop_map(self, grp):
+ return EdgePropertyMap()
+
+ def __avail_model_types(self, population):
+ model_types = set()
+ for grp in population.groups:
+ if self.model_type_col not in grp.all_columns:
+ self.io.log_exception('model_type is missing from nodes.')
+
+ model_types.update(set(np.unique(grp.get_values(self.model_type_col))))
+ return model_types
+
+ def _preprocess_node_types(self, node_population):
+ # TODO: The following figures out the actually used node-type-ids. For mem and speed may be better to just
+ # process them all
+ node_type_ids = node_population.type_ids
+ # TODO: Verify all the node_type_ids are in the table
+ node_types_table = node_population.types_table
+
+ # TODO: Convert model_type to a enum
+ morph_dir = self.get_component('morphologies_dir')
+ if morph_dir is not None and 'morphology' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ if node_type['morphology'] is None:
+ continue
+ # TODO: Check the file exits
+ # TODO: See if absolute path is stored in csv
+ node_type['morphology'] = os.path.join(morph_dir, node_type['morphology'])
+
+ if 'dynamics_params' in node_types_table.columns and 'model_type' in node_types_table.columns:
+ for nt_id in node_type_ids:
+ node_type = node_types_table[nt_id]
+ dynamics_params = node_type['dynamics_params']
+ if isinstance(dynamics_params, dict):
+ continue
+
+ model_type = node_type['model_type']
+ if model_type == 'biophysical':
+ params_dir = self.get_component('biophysical_neuron_models_dir')
+ elif model_type == 'point_process':
+ params_dir = self.get_component('point_neuron_models_dir')
+ elif model_type == 'point_soma':
+ params_dir = self.get_component('point_neuron_models_dir')
+ else:
+ # Not sure what to do in this case, throw Exception?
+ params_dir = self.get_component('custom_neuron_models')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ node_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ self.io.log_exception('Could not find node dynamics_params file {}.'.format(params_path))
+
+ def _preprocess_edge_types(self, edge_pop):
+ edge_types_table = edge_pop.types_table
+ edge_type_ids = np.unique(edge_pop.type_ids)
+
+ for et_id in edge_type_ids:
+ if 'dynamics_params' in edge_types_table.columns:
+ edge_type = edge_types_table[et_id]
+ dynamics_params = edge_type['dynamics_params']
+ params_dir = self.get_component('synaptic_models_dir')
+
+ params_path = os.path.join(params_dir, dynamics_params)
+
+ # see if we can load the dynamics_params as a dictionary. Otherwise just save the file path and let the
+ # cell_model loader function handle the extension.
+ try:
+ params_val = json.load(open(params_path, 'r'))
+ edge_type['dynamics_params'] = params_val
+ except Exception:
+ # TODO: Check dynamics_params before
+ self.io.log_exception('Could not find edge dynamics_params file {}.'.format(params_path))
+
+ # Split target_sections
+ if 'target_sections' in edge_type:
+ trg_sec = edge_type['target_sections']
+ if trg_sec is not None:
+ try:
+ edge_type['target_sections'] = ast.literal_eval(trg_sec)
+ except Exception as exc:
+ self.io.log_warning('Unable to split target_sections list {}'.format(trg_sec))
+ edge_type['target_sections'] = None
+
+ # Split target distances
+ if 'distance_range' in edge_type:
+ dist_range = edge_type['distance_range']
+ if dist_range is not None:
+ try:
+ # TODO: Make the distance range has at most two values
+ edge_type['distance_range'] = json.loads(dist_range)
+ except Exception as e:
+ try:
+ edge_type['distance_range'] = [0.0, float(dist_range)]
+ except Exception as e:
+ self.io.log_warning('Unable to parse distance_range {}'.format(dist_range))
+ edge_type['distance_range'] = None
+
+ def external_edge_populations(self, src_pop, trg_pop):
+ return self._external_edges.get((src_pop, trg_pop), [])
+
+ def add_nodes(self, sonata_file, populations=None):
+ """Add nodes from a network to the graph.
+
+ :param sonata_file: A NodesFormat type object containing list of nodes.
+ :param populations: name/identifier of network. If none will attempt to retrieve from nodes object
+ """
+ nodes = sonata_file.nodes
+
+ selected_populations = nodes.population_names if populations is None else populations
+ for pop_name in selected_populations:
+ if pop_name not in nodes:
+ # when user wants to simulation only a few populations in the file
+ continue
+
+ if pop_name in self.node_populations:
+ # Make sure their aren't any collisions
+ self.io.log_exception('There are multiple node populations with name {}.'.format(pop_name))
+
+ node_pop = nodes[pop_name]
+ self._preprocess_node_types(node_pop)
+ self._node_populations[pop_name] = node_pop
+
+ # Segregate into virtual populations and non-virtual populations
+ model_types = self.__avail_model_types(node_pop)
+ if 'virtual' in model_types:
+ self._virtual_populations_map[pop_name] = node_pop
+ self._virtual_cells_nid[pop_name] = {}
+ model_types -= set(['virtual'])
+ if model_types:
+ # We'll allow a population to have virtual and non-virtual nodes but it is not ideal
+ self.io.log_warning('Node population {} contains both virtual and non-virtual nodes which can ' +
+ 'cause memory and build-time inefficency. Consider separating virtual nodes ' +
+ 'into their own population'.format(pop_name))
+
+ if model_types:
+ self._internal_populations_map[pop_name] = node_pop
+
+ self._node_property_maps[pop_name] = {grp.group_id: self._create_nodes_prop_map(grp)
+ for grp in node_pop.groups}
+
+ def build_nodes(self):
+ raise NotImplementedError
+
+ def build_recurrent_edges(self):
+ raise NotImplementedError
+
+ def add_edges(self, sonata_file, populations=None, source_pop=None, target_pop=None):
+ """
+
+ :param sonata_file:
+ :param populations:
+ :param source_pop:
+ :param target_pop:
+ :return:
+ """
+ edges = sonata_file.edges
+ selected_populations = edges.population_names if populations is None else populations
+
+ for pop_name in selected_populations:
+ if pop_name not in edges:
+ continue
+
+ edge_pop = edges[pop_name]
+ self._preprocess_edge_types(edge_pop)
+
+ # Check the source nodes exists
+ src_pop = source_pop if source_pop is not None else edge_pop.source_population
+ is_internal_src = src_pop in self._internal_populations_map.keys()
+ is_external_src = src_pop in self._virtual_populations_map.keys()
+
+ trg_pop = target_pop if target_pop is not None else edge_pop.target_population
+ is_internal_trg = trg_pop in self._internal_populations_map.keys()
+
+ if not is_internal_trg:
+ self.io.log_exception(('Node population {} does not exists (or consists of only virtual nodes). ' +
+ '{} edges cannot create connections.').format(trg_pop, pop_name))
+
+ if not (is_internal_src or is_external_src):
+ self.io.log_exception('Source node population {} not found. Please update {} edges'.format(src_pop,
+ pop_name))
+ if is_internal_src:
+ if trg_pop not in self._recurrent_edges:
+ self._recurrent_edges[trg_pop] = []
+ self._recurrent_edges[trg_pop].append(edge_pop)
+
+ if is_external_src:
+ if trg_pop not in self._external_edges:
+ self._external_edges[(src_pop, trg_pop)] = []
+ self._external_edges[(src_pop, trg_pop)].append(edge_pop)
+
+ self._edge_property_maps[pop_name] = {grp.group_id: self._create_edges_prop_map(grp)
+ for grp in edge_pop.groups}
+
+ @classmethod
+ def from_config(cls, conf, **properties):
+ """Generates a graph structure from a json config file or dictionary.
+
+ :param conf: name of json config file, or a dictionary with config parameters
+ :param properties: optional properties.
+ :return: A graph object of type cls
+ """
+ graph = cls(**properties)
+ if isinstance(conf, basestring):
+ config = graph._from_json(conf)
+ elif isinstance(conf, dict):
+ config = conf
+ else:
+ graph.io.log_exception('Could not convert {} (type "{}") to json.'.format(conf, type(conf)))
+
+ run_dict = config['run']
+ if 'spike_threshold' in run_dict:
+ # TODO: FIX, spike-thresholds should be set by simulation code, allow for diff. values based on node-group
+ graph.spike_threshold = run_dict['spike_threshold']
+ if 'dL' in run_dict:
+ graph.dL = run_dict['dL']
+
+ if not config.with_networks:
+ graph.io.log_exception('Could not find any network files. Unable to build network.')
+
+ # load components
+ for name, value in config.components.items():
+ graph.add_component(name, value)
+ graph._validate_components()
+
+ # load nodes
+ for node_dict in config.nodes:
+ nodes_net = sonata.File(data_files=node_dict['nodes_file'], data_type_files=node_dict['node_types_file'])
+ graph.add_nodes(nodes_net)
+
+ # load edges
+ for edge_dict in config.edges:
+ target_network = edge_dict['target'] if 'target' in edge_dict else None
+ source_network = edge_dict['source'] if 'source' in edge_dict else None
+ edge_net = sonata.File(data_files=edge_dict['edges_file'], data_type_files=edge_dict['edge_types_file'])
+ graph.add_edges(edge_net, source_pop=target_network, target_pop=source_network)
+
+ '''
+ graph.io.log_info('Building cells.')
+ graph.build_nodes()
+
+ graph.io.log_info('Building recurrent connections')
+ graph.build_recurrent_edges()
+ '''
+
+ return graph
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/io.py b/bmtk-vb/build/lib/bmtk/simulator/utils/io.py
new file mode 100644
index 0000000..b6e5e5c
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/io.py
@@ -0,0 +1,54 @@
+import os
+import shutil
+import logging
+
+
+class IOUtils(object):
+ def __init__(self):
+ self.mpi_rank = 0
+ self.mpi_size = 1
+
+ self._log_format = '%(asctime)s [%(levelname)s] %(message)s'
+ self._logger = logging.getLogger()
+ self.set_console_logging()
+
+ @property
+ def logger(self):
+ return None
+
+ def set_console_logging(self):
+ pass
+
+ def barrier(self):
+ pass
+
+ def quit(self):
+ exit(1)
+
+ def setup_output_dir(self, config_dir, log_file, overwrite=True):
+ if self.mpi_rank == 0:
+ # Create output directory
+ if os.path.exists(config_dir):
+ if overwrite:
+ shutil.rmtree(config_dir)
+ else:
+ self.log_exception('ERROR: Directory already exists (remove or set to overwrite).')
+ os.makedirs(config_dir)
+
+ # Create log file
+ if log_file is not None:
+ file_logger = logging.FileHandler(log_file)
+ file_logger.setFormatter(self._log_format)
+ self.logger.addHandler(file_logger)
+ self.log_info('Created log file')
+
+ self.barrier()
+
+ def log_info(self, message, all_ranks=False):
+ print(message)
+
+ def log_warning(self, message, all_ranks=False):
+ print(message)
+
+ def log_exception(self, message):
+ raise Exception(message)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/load_spikes.py b/bmtk-vb/build/lib/bmtk/simulator/utils/load_spikes.py
new file mode 100644
index 0000000..8c16caf
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/load_spikes.py
@@ -0,0 +1,91 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import h5py
+import numpy as np
+import os
+import datetime
+
+
+def load_spikes_ascii(file_name):
+ '''
+ Load ascii spike file
+ '''
+ t = os.path.getmtime(file_name)
+ print(file_name, "modified on:", datetime.datetime.fromtimestamp(t))
+ spk_ts,spk_gids = np.loadtxt(file_name,
+ dtype='float32,int',
+ unpack=True)
+
+ spk_ts=spk_ts*1E-3
+
+ print('loaded spikes from ascii')
+
+ return [spk_ts,spk_gids]
+
+
+def load_spikes_h5(file_name):
+ '''
+ Load ascii spike file
+ '''
+
+ t = os.path.getmtime(file_name)
+ print(file_name, "modified on:", datetime.datetime.fromtimestamp(t))
+
+ with h5py.File(file_name,'r') as h5:
+
+ spk_ts=h5["time"][...]*1E-3
+ spk_gids=h5["gid"][...]
+
+
+ print('loaded spikes from hdf5')
+
+ return [spk_ts,spk_gids]
+
+
+def load_spikes_nwb(file_name,trial_name):
+
+ '''
+ Load spikes from the nwb file
+
+ Returns:
+ -------
+
+ spike_times: list
+ spike_gids: list
+ '''
+ f5 = h5py.File(file_name, 'r')
+
+
+ spike_trains_handle = f5['processing/%s/spike_train' % trial_name] # nwb.SpikeTrain.get_processing(f5,'trial_0')
+
+ spike_times = []
+ spike_gids = []
+
+ for gid in spike_trains_handle.keys():
+
+ times_gid = spike_trains_handle['%d/data' %int(gid)][:]
+ spike_times.extend(times_gid)
+ spike_gids.extend([int(gid)]*len(times_gid))
+
+ return [np.array(spike_times)*1E-3,np.array(spike_gids)]
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/nwb.py b/bmtk-vb/build/lib/bmtk/simulator/utils/nwb.py
new file mode 100644
index 0000000..4d18d16
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/nwb.py
@@ -0,0 +1,530 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import copy
+import numpy as np
+import os
+import h5py
+import time
+import uuid
+import tempfile
+from bmtk.analyzer.visualization.widgets import PlotWidget, MovieWidget
+
+__version__ = '0.1.0'
+
+allowed_dimensions = {'firing_rate': ('hertz',),
+ 'time': ('second', 'millisecond'),
+ 'brightness': ('intensity',),
+ 'distance': ('pixel',),
+ 'index': ('gid',),
+ 'intensity': ('bit',None),
+ 'voltage': ('volt',),
+ 'current': ('ampere',),
+ None: (None,),
+ 'dev': ('dev',)}
+
+allowed_groups = {'firing_rate': ('firing_rate',),
+ 'spike_train': ('index', 'time'),
+ 'grayscale_movie': ('intensity',),
+ 'time_series': ('voltage', 'current'),
+ 'dev': ('dev',)}
+
+top_level_data = ['file_create_date',
+ 'stimulus',
+ 'acquisition',
+ 'analysis',
+ 'processing',
+ 'epochs',
+ 'general',
+ 'session_description',
+ 'nwb_version',
+ 'identifier']
+
+
+def open_file(file_name):
+ return h5py.File(file_name)
+
+
+class Scale(object):
+ def __init__(self, scale_range, dimension, unit):
+ assert dimension in allowed_dimensions
+ assert unit in allowed_dimensions[dimension]
+
+ self.scale_range = scale_range
+ self.dimension = dimension
+ self.unit = unit
+ self._hdf5_location = None
+
+ def __eq__(self, other):
+ d = self.dimension == other.dimension
+ u = self.unit == other.unit
+ s = np.allclose(self.scale_range, other.scale_range)
+ return d and u and s
+
+ @ property
+ def data(self):
+ return self.scale_range
+
+
+class DtScale(object):
+ def __init__(self, dt, dimension, unit):
+ assert dimension in allowed_dimensions
+ assert unit in allowed_dimensions[dimension]
+
+ self.dt = dt
+ self.dimension = dimension
+ self.unit = unit
+ self._hdf5_location = None
+
+ def __eq__(self, other):
+ d = self.dimension == other.dimension
+ u = self.unit == other.unit
+ s = np.allclose(self.scale_range, other.scale_range)
+ return d and u and s
+
+ @ property
+ def data(self):
+ return self.dt
+
+
+class NullScale(object):
+
+ def __init__(self):
+ self._hdf5_location = None
+ self.data = None
+ self.dimension = None
+ self.unit = None
+
+
+class Data(object):
+ def __init__(self, data, dimension, unit, scales, metadata):
+ assert dimension in allowed_dimensions
+ assert unit in allowed_dimensions[dimension]
+ if isinstance(scales, (Scale, DtScale)):
+ assert len(data.shape) == 1
+ scales = (scales,)
+
+ for key in metadata.iterkeys():
+ assert isinstance(key, (str, unicode))
+ for ii, scale in enumerate(scales):
+ if isinstance(scale, Scale):
+ assert len(scale.scale_range) == data.shape[ii]
+ elif isinstance(scale, DtScale):
+ assert isinstance(scale.dt, (float, np.float)) and scale.dt > 0
+ else:
+ raise Exception
+
+ if len(scales) == 0:
+ scales = [NullScale()]
+
+ metadata = copy.copy(metadata)
+ self.data = data
+ self.scales = scales
+ self.dimension = dimension
+ self.unit = unit
+ self.metadata = metadata
+ self._hdf5_location = None
+
+ def __eq__(self, other):
+ da = np.allclose(self.data, other.data)
+ d = self.dimension == other.dimension
+ u = self.unit == other.unit
+ s = [s1 == s2 for s1, s2 in zip(self.scales, other.scales)].count(True) == len(self.scales)
+ if len(self.metadata) != len(other.metadata):
+ m = False
+ else:
+ try:
+ sum = 0
+ for key in self.metadata.keys():
+ sum += other.metadata[key] == self.metadata[key]
+ assert sum == len(self.metadata)
+ m = True
+ except:
+ m = False
+ return da and d and u and s and m
+
+ @staticmethod
+ def _get_from_group(object_class, parent_group, group_name, ii=0):
+
+ data_group = parent_group['%s/%s' % (group_name, ii)]
+ data, scales, dimension, unit, metadata = _get_data(data_group)
+
+ assert dimension in allowed_groups[object_class.group]
+
+ if unit == "None":
+ unit = None
+ scale_list = []
+ for scale in scales:
+ if scale.attrs['type'] == 'Scale':
+ curr_scale = Scale(scale, scale.attrs['dimension'], scale.attrs['unit'])
+ elif scale.attrs['type'] == 'DtScale':
+ curr_scale = DtScale(float(scale.value), scale.attrs['dimension'], scale.attrs['unit'])
+ elif scale.attrs['type'] == 'NullScale':
+ curr_scale = None
+ else:
+ raise Exception
+ if curr_scale is not None:
+ scale_list.append(curr_scale)
+
+ if len(scale_list) == 1:
+ scale_list = scale_list[0]
+
+ return object_class(data, dimension=dimension, unit=unit, scale=scale_list, metadata=metadata)
+
+ def add_to_stimulus(self, f, compression='gzip', compression_opts=4):
+ self._add_to_group(f, 'stimulus', self.__class__.group, compression=compression,
+ compression_opts=compression_opts)
+
+ @classmethod
+ def get_stimulus(cls, f, ii=None):
+ if ii is None:
+ return_data = [cls.get_stimulus(f, ii) for ii in range(len(f['stimulus/%s' % cls.group]))]
+ if len(return_data) == 1:
+ return_data = return_data[0]
+ return return_data
+ else:
+ return Data._get_from_group(cls, f['stimulus'], cls.group, ii=ii)
+
+ def add_to_acquisition(self, f, compression='gzip', compression_opts=4):
+ self._add_to_group(f, 'acquisition', self.__class__.group, compression=compression,
+ compression_opts=compression_opts)
+
+ @classmethod
+ def get_acquisition(cls, f, ii=None):
+ if ii is None:
+ return_data = [cls.get_acquisition(f, ii) for ii in range(len(f['acquisition/%s' % cls.group]))]
+ if len(return_data) == 1:
+ return_data = return_data[0]
+ return return_data
+
+ else:
+ return Data._get_from_group(cls, f['acquisition'], cls.group, ii=ii)
+
+ def add_to_processing(self, f, processing_submodule_name):
+ if processing_submodule_name not in f['processing']:
+ f['processing'].create_group(processing_submodule_name)
+ return self._add_to_group(f, 'processing/%s' % processing_submodule_name, self.__class__.group)
+
+ @classmethod
+ def get_processing(cls, f, subgroup_name, ii=None):
+ if ii is None:
+ return_data = {}
+ for ii in range(len(f['processing/%s/%s' % (subgroup_name, cls.group)])):
+ return_data[ii] = cls.get_processing(f, subgroup_name, ii)
+ return return_data
+
+ else:
+ return Data._get_from_group(cls, f['processing/%s' % subgroup_name], cls.group, ii=ii)
+
+ def add_to_analysis(self, f, analysis_submodule_name):
+ if analysis_submodule_name not in f['analysis']:
+ f['analysis'].create_group(analysis_submodule_name)
+ return self._add_to_group(f, 'analysis/%s' % analysis_submodule_name, self.__class__.group)
+
+ @classmethod
+ def get_analysis(cls, f, subgroup_name, ii=None):
+ if ii is None:
+ return [cls.get_analysis(f, ii, subgroup_name)
+ for ii in range(len(f['analysis/%s/%s' % (subgroup_name, cls.group)]))]
+ else:
+ return Data._get_from_group(cls, f['analysis/%s' % subgroup_name], cls.group, ii=ii)
+
+ def _add_to_group(self, f, parent_name, group_name, compression='gzip', compression_opts=4):
+ assert group_name in allowed_groups
+ assert self.dimension in allowed_groups[group_name]
+ try:
+ parent_group = f[parent_name]
+ except ValueError:
+ try:
+ file_name = f.filename
+ raise Exception('Parent group:%s not found in file %s' % parent_name, file_name)
+ except ValueError:
+ raise Exception('File not valid: %s' % f)
+
+ if self.__class__.group in parent_group:
+ subgroup = parent_group[self.__class__.group]
+ int_group_name = str(len(subgroup))
+ else:
+ subgroup = parent_group.create_group(self.__class__.group)
+ int_group_name = '0'
+
+ # Create external link:
+ if isinstance(self.data, h5py.Dataset):
+ if subgroup.file == self.data.file:
+ raise NotImplementedError
+ else:
+ return _set_data_external_link(subgroup, int_group_name, self.data.parent)
+ else:
+ dataset_group = subgroup.create_group(int_group_name)
+
+ # All this to allow do shared scale management:
+ scale_group = None
+ scale_list = []
+ for ii, scale in enumerate(self.scales):
+ if isinstance(scale, (Scale, DtScale, NullScale)):
+ if scale._hdf5_location is None:
+ if scale_group is None:
+ scale_group = dataset_group.create_group('scale')
+ curr_scale = _set_scale(scale_group, 'dimension_%s' % ii, scale.data, scale.dimension,
+ scale.unit, scale.__class__.__name__)
+ scale._hdf5_location = curr_scale
+ else:
+ curr_scale = _set_scale(scale_group, 'dimension_%s' % ii, scale.data, scale.dimension,
+ scale.unit, scale.__class__.__name__)
+ scale._hdf5_location = curr_scale
+ else:
+ curr_scale = scale._hdf5_location
+ elif isinstance(scale, h5py.Dataset):
+ curr_scale = scale
+ else:
+ raise Exception
+
+ scale_list.append(curr_scale)
+
+ _set_data(subgroup, dataset_group.name, self.data, scale_list, self.dimension, self.unit,
+ metadata=self.metadata, compression=compression, compression_opts=compression_opts)
+
+
+class FiringRate(Data):
+ group = 'firing_rate'
+
+ def __init__(self, data, **kwargs):
+ dimension = 'firing_rate'
+ unit = 'hertz'
+ scale = kwargs.get('scale')
+ metadata = kwargs.get('metadata', {})
+ assert isinstance(scale, (Scale, DtScale))
+ super(FiringRate, self).__init__(data, dimension, unit, scale, metadata)
+
+ def get_widget(self, **kwargs):
+ rate_data = self.data[:]
+ t_range = self.scales[0].data[:]
+ return PlotWidget(t_range, rate_data, metadata=self.metadata, **kwargs)
+
+
+class Dev(Data):
+ group = 'dev'
+
+ def __init__(self, data, **kwargs):
+ dimension = kwargs.get('dimension')
+ unit = kwargs.get('unit')
+ scale = kwargs.get('scale')
+ metadata = kwargs.get('metadata', {})
+
+ super(Dev, self).__init__(data, dimension, unit, scale, metadata)
+
+
+class TimeSeries(Data):
+ group = 'time_series'
+
+ def __init__(self, data, **kwargs):
+ dimension = kwargs.get('dimension')
+ unit = kwargs.get('unit')
+ scale = kwargs.get('scale')
+ metadata = kwargs.get('metadata', {})
+
+ assert isinstance(scale, (Scale, DtScale))
+ assert scale.dimension == 'time'
+ super(TimeSeries, self).__init__(data, dimension, unit, scale, metadata)
+
+
+class SpikeTrain(Data):
+ group = 'spike_train'
+
+ def __init__(self, data, **kwargs):
+ scales = kwargs.get('scale',[])
+ unit = kwargs.get('unit', 'gid')
+ metadata = kwargs.get('metadata',{})
+
+ if isinstance(scales, Scale):
+ super(SpikeTrain, self).__init__(data, 'index', unit, scales, metadata)
+ elif len(scales) == 0:
+ assert unit in allowed_dimensions['time']
+ scales = []
+ super(SpikeTrain, self).__init__(data, 'time', unit, scales, metadata)
+ else:
+ assert len(scales) == 1 and isinstance(scales[0], Scale)
+ super(SpikeTrain, self).__init__(data, 'index', unit, scales, metadata)
+
+
+class GrayScaleMovie(Data):
+ group = 'grayscale_movie'
+
+ def __init__(self, data, **kwargs):
+ dimension = 'intensity'
+ unit = kwargs.get('unit', None)
+ scale = kwargs.get('scale')
+ metadata = kwargs.get('metadata', {})
+
+ super(GrayScaleMovie, self).__init__(data, dimension, unit, scale, metadata)
+
+ def get_widget(self, ax=None):
+ data = self.data[:]
+ t_range = self.scales[0].data[:]
+ return MovieWidget(t_range=t_range, data=data, ax=ax, metadata=self.metadata)
+
+
+def get_temp_file_name():
+ f = tempfile.NamedTemporaryFile(delete=False)
+ temp_file_name = f.name
+ f.close()
+ os.remove(f.name)
+ return temp_file_name
+
+
+def create_blank_file(save_file_name=None, force=False, session_description='', close=False):
+
+ if save_file_name is None:
+ save_file_name = get_temp_file_name()
+
+ if not force:
+ f = h5py.File(save_file_name, 'w-')
+ else:
+ if os.path.exists(save_file_name):
+ os.remove(save_file_name)
+ f = h5py.File(save_file_name, 'w')
+
+ f.create_group('acquisition')
+ f.create_group('analysis')
+ f.create_group('epochs')
+ f.create_group('general')
+ f.create_group('processing')
+ f.create_group('stimulus')
+
+ f.create_dataset("file_create_date", data=np.string_(time.ctime()))
+ f.create_dataset("session_description", data=session_description)
+ f.create_dataset("nwb_version", data='iSee_%s' % __version__)
+ f.create_dataset("identifier", data=str(uuid.uuid4()))
+
+ if close:
+ f.close()
+ else:
+ return f
+
+
+def assert_subgroup_exists(child_name, parent):
+ try:
+ assert child_name in parent
+ except:
+ raise RuntimeError('Group: %s has no subgroup %s' % (parent.name, child_name))
+
+
+def _set_data_external_link(parent_group, dataset_name, data):
+ parent_group[dataset_name] = h5py.ExternalLink(data.file.filename, data.name)
+
+
+def _set_scale_external_link(parent_group, name, scale):
+ print(parent_group, name, scale)
+ print(scale.file.filename, scale.name)
+ parent_group[name] = h5py.ExternalLink(scale.file.filename, scale.name)
+ return parent_group[name]
+
+
+def _set_data(parent_group, dataset_name, data, scales, dimension, unit, force=False, metadata={}, compression='gzip',
+ compression_opts=4):
+ # Check inputs:
+ if isinstance(scales, h5py.Dataset):
+ scales = (scales,)
+ else:
+ assert isinstance(scales, (list, tuple))
+
+ assert data.ndim == len(scales)
+ assert dimension in allowed_dimensions
+ assert unit in allowed_dimensions[dimension]
+ for ii, scale in enumerate(scales):
+ assert len(scale.shape) in (0, 1)
+ check_dimension = str(scale.attrs['dimension'])
+ if check_dimension == 'None':
+ check_dimension = None
+ check_unit = scale.attrs['unit']
+ if check_unit == 'None':
+ check_unit = None
+ assert check_dimension in allowed_dimensions
+ assert check_unit in allowed_dimensions[check_dimension]
+ if len(scale.shape) == 1:
+ assert len(scale) == data.shape[ii] or len(scale) == 0
+
+ if dataset_name not in parent_group:
+ dataset_group = parent_group.create_group(dataset_name)
+ else:
+ dataset_group = parent_group[dataset_name]
+
+ for key, val in metadata.iteritems():
+ assert key not in dataset_group.attrs
+ dataset_group.attrs[key] = val
+
+ if 'data' in dataset_group:
+ if not force:
+ raise IOError('Field "stimulus" of %s is not empty; override with force=True' % parent_group.name)
+ else:
+ del dataset_group['data']
+
+ dataset = dataset_group.create_dataset(name='data', data=data, compression=compression,
+ compression_opts=compression_opts)
+
+ for ii, scale in enumerate(scales):
+ dataset.dims[ii].label = scale.attrs['dimension']
+ dataset.dims[ii].attach_scale(scale)
+
+ dataset.attrs.create('dimension', str(dimension))
+ dataset.attrs.create('unit', str(unit))
+
+ return dataset
+
+
+def _set_scale(parent_group, name, scale, dimension, unit, scale_class_name):
+ assert dimension in allowed_dimensions
+ assert unit in allowed_dimensions[dimension]
+
+ if scale is None:
+ scale = parent_group.create_dataset(name=name, shape=(0,))
+ else:
+ scale = np.array(scale)
+ assert scale.ndim in (0, 1)
+ scale = parent_group.create_dataset(name=name, data=scale)
+ scale.attrs['dimension'] = str(dimension)
+ scale.attrs['unit'] = str(unit)
+ scale.attrs['type'] = scale_class_name
+
+ return scale
+
+
+def _get_data(dataset_group):
+ data = dataset_group['data']
+ dimension = dataset_group['data'].attrs['dimension']
+ unit = dataset_group['data'].attrs['unit']
+ scales = tuple([dim[0] for dim in dataset_group['data'].dims])
+ metadata = dict(dataset_group.attrs)
+
+ return data, scales, dimension, unit, metadata
+
+
+def get_stimulus(f):
+ category = 'stimulus'
+ for parent_group in f[category]:
+ for data_group in f[category][parent_group]:
+ print(f[category][parent_group][data_group])
+
+
+def add_external_links(parent_group, external_file_name, external_group_name_list=top_level_data):
+ for subgroup in external_group_name_list:
+ parent_group[subgroup] = h5py.ExternalLink(external_file_name, subgroup)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/property_maps.py b/bmtk-vb/build/lib/bmtk/simulator/utils/property_maps.py
new file mode 100644
index 0000000..9a22515
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/property_maps.py
@@ -0,0 +1,7 @@
+class NodePropertyMap(object):
+ pass
+
+
+class EdgePropertyMap(object):
+ pass
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/sim_validator.py b/bmtk-vb/build/lib/bmtk/simulator/utils/sim_validator.py
new file mode 100644
index 0000000..447dda1
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/sim_validator.py
@@ -0,0 +1,126 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+from jsonschema import Draft4Validator
+from jsonschema.exceptions import ValidationError
+import pandas as pd
+
+
+class SimConfigValidator(Draft4Validator):
+ """
+ A JSON Schema validator class that will store a schema (passed into the constructor) and validate a json file.
+ It has all the functionality of the JSONSchema format, plus includes special types and parameters like making
+ sure a value is a file or directory type, checking csv files, etc.
+
+ To Use:
+ validator = SimConfigValidator(json_schema.json)
+ validator.validate(file.json)
+ """
+
+ def __init__(self, schema, types=(), resolver=None, format_checker=None, file_formats=()):
+ super(SimConfigValidator, self).__init__(schema, types, resolver, format_checker)
+
+ # custom parameter
+ self.VALIDATORS["exists"] = self._check_path
+
+ self._file_formats = {} # the "file_format" property the validity of a (non-json) file.
+ for (name, schema) in file_formats:
+ self._file_formats[name] = self._parse_file_formats(schema)
+ self.VALIDATORS["file_format"] = self._validate_file
+
+ def is_type(self, instance, dtype):
+ # override type since checking for file and directory type is potentially more complicated.
+ if dtype == "directory":
+ return self._is_directory_type(instance)
+
+ elif dtype == "file":
+ return self._is_file_type(instance)
+
+ else:
+ return super(SimConfigValidator, self).is_type(instance, dtype)
+
+ def _is_directory_type(self, instance):
+ """Check if instance value is a valid directory file path name
+
+ :param instance: string that represents a directory path
+ :return: True if instance is a valid dir path (even if it doesn't exists).
+ """
+ # Always return true for now, rely on the "exists" property (_check_path) to actual determine if file exists.
+ # TODO: check that instance string is a valid path string, even if path doesn't yet exists.
+ return True
+
+ def _is_file_type(self, instance):
+ """Check if instance value is a valid file path.
+
+ :param instance: string of file path
+ :return: True if instance is a valid file path (but doesn't necessary exists), false otherwise.
+ """
+ # Same issue as with _is_directory_type
+ return True
+
+ def _parse_file_formats(self, schema_file):
+ # Open the schema file and based on "file_type" property create a Format validator
+ schema = json.load(open(schema_file, 'r'))
+ if schema['file_type'] == 'csv':
+ return self._CSVFormat(schema)
+ else:
+ return Exception("No format found")
+
+ @staticmethod
+ def _check_path(validator, schema_bool, path, schema):
+ """Makes sure a file/directory exists or doesn't based on the "exists" property in the schema
+
+ :param validator:
+ :param schema_bool: True means file must exists, False means file should not exists
+ :param path: path of the file
+ :param schema:
+ :return: True if schema is satisfied.
+ """
+ assert(schema['type'] == 'directory' or schema['type'] == 'file')
+ path_exists = os.path.exists(path)
+ if path_exists != schema_bool:
+ raise ValidationError("{} {} exists.".format(path, "already" if path_exists else "does not"))
+
+ def _validate_file(self, validator, file_format, file_path, schema):
+ file_validator = self._file_formats.get(file_format, None)
+ if file_validator is None:
+ raise ValidationError("Could not find file validator {}".format(file_format))
+
+ if not file_validator.check(file_path):
+ raise ValidationError("File {} could not be validated against {}.".format(file_path, file_format))
+
+ # A series of validators for indivdiual types of files. All of them should have a check(file) function that returns
+ # true only when it is formated correctly.
+ class _CSVFormat(object):
+ def __init__(self, schema):
+ self._properties = schema['file_properties']
+ self._required_columns = [header for header, props in schema['columns'].items() if props['required']]
+
+ def check(self, file_name):
+ csv_headers = set(pd.read_csv(file_name, nrows=0, **self._properties).columns)
+ for col in self._required_columns:
+ if col not in csv_headers:
+ return False
+
+ return True
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/simulation_inputs.py b/bmtk-vb/build/lib/bmtk/simulator/utils/simulation_inputs.py
new file mode 100644
index 0000000..bdd1588
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/simulation_inputs.py
@@ -0,0 +1,77 @@
+
+class SimInput(object):
+ registry = {} # For factory function
+
+ def __init__(self, input_name, input_type, module, params):
+ self.name = input_name
+ self.input_type = input_type
+ self.module = module
+ self.params = params.copy()
+
+ # Remove the 'module' and 'input_type' from the params since user should access it through the variable
+ for param_key in ['module', 'input_type']:
+ if param_key in self.params:
+ del self.params[param_key]
+
+ # Special variable, not a part of standard but still want for ease of testing
+ if 'enabled' in params:
+ self.enabled = params['enabled']
+ del params['enabled']
+ else:
+ self.enabled = True
+
+ # Fill in missing values with default (as specified by the subclass)
+ for var_name, default_val in self._get_defaults():
+ if var_name not in self.params:
+ self.params[var_name] = default_val
+
+ # Check there are no missing parameters
+
+ @property
+ def node_set(self):
+ return self.params.get('node_set', None)
+
+ def _get_defaults(self):
+ return []
+
+ @classmethod
+ def build(cls, input_name, params):
+ params = params.copy()
+ if 'module' not in params:
+ raise Exception('inputs setting {} does not specify the "module".'.format(input_name))
+
+ if 'input_type' not in params:
+ raise Exception('inputs setting {} does not specify the "input_type".'.format(input_name))
+
+ module_name = params['module']
+ input_type = params['input_type']
+ module_cls = SimInput.registry.get(module_name, SimInput)
+
+ return module_cls(input_name, input_type, module_name, params)
+
+ @classmethod
+ def register_module(cls, subclass):
+ # For factory, register subclass based on the module name(s)
+ assert(issubclass(subclass, cls))
+ mod_registry = cls.registry
+ mod_list = subclass.avail_modules()
+ modules = mod_list if isinstance(mod_list, list) else [mod_list]
+ for mod_name in modules:
+ if mod_name in mod_registry:
+ raise Exception('Multiple modules named {}'.format(mod_name))
+ mod_registry[mod_name] = subclass
+
+ return subclass
+
+
+def from_config(cfg):
+ inputs_list = []
+ for input_name, input_params in cfg.inputs.items():
+ input_setting = SimInput.build(input_name, input_params)
+ if input_setting.enabled:
+ inputs_list.append(input_setting)
+
+ return inputs_list
+
+
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/simulation_reports.py b/bmtk-vb/build/lib/bmtk/simulator/utils/simulation_reports.py
new file mode 100644
index 0000000..a7bffd9
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/simulation_reports.py
@@ -0,0 +1,284 @@
+import os
+
+
+class SimReport(object):
+ default_dir = '.'
+ registry = {} # Used by factory to keep track of subclasses
+
+ def __init__(self, name, module, params):
+ self.report_name = name
+ self.module = module
+ self.params = params
+
+ # Not part of standard, just want a quick way to turn off modules
+ if 'enabled' in params:
+ self.enabled = params['enabled']
+ del params['enabled']
+ else:
+ self.enabled = True
+
+ # Set default parameter values (when not explicity stated). Should occur on a module-by-module basis
+ self._set_defaults()
+
+ @property
+ def node_set(self):
+ return self.params.get('cells', 'all')
+
+ def _set_defaults(self):
+ for var_name, default_val in self._get_defaults():
+ if var_name not in self.params:
+ self.params[var_name] = default_val
+
+ def _get_defaults(self):
+ """Should be overwritten by subclass with list of (var_name, default_val) tuples."""
+ return []
+
+ @staticmethod
+ def avail_modules():
+ # Return a string (or list of strings) to identify module name for each subclass
+ raise NotImplementedError
+
+ @classmethod
+ def build(cls, report_name, params):
+ """Factory method to get the module subclass, using the params (particularlly the 'module' value, which is
+ required). If there is no registered subclass a generic SimReport object will be returned
+
+ :param report_name: name of report
+ :param params: parameters of report
+ :return: A SimReport (or subclass) object with report parameters parsed out.
+ """
+ params = params.copy()
+ if 'module' not in params:
+ raise Exception('report {} does not specify the module.'.format(report_name))
+
+ module_name = params['module']
+ del params['module']
+ module_cls = SimReport.registry.get(module_name, SimReport)
+ return module_cls(report_name, module_name, params)
+
+ @classmethod
+ def register_module(cls, subclass):
+ # For factory, register subclass based on the module name(s)
+ assert(issubclass(subclass, cls))
+ mod_registry = cls.registry
+ mod_list = subclass.avail_modules()
+ modules = mod_list if isinstance(mod_list, list) else [mod_list]
+ for mod_name in modules:
+ if mod_name in mod_registry:
+ raise Exception('Multiple modules named {}'.format(mod_name))
+ mod_registry[mod_name] = subclass
+
+ return subclass
+
+
+@SimReport.register_module
+class MembraneReport(SimReport, object):
+ def __init__(self, report_name, module, params):
+ super(MembraneReport, self).__init__(report_name, module, params)
+ # Want variable_name option to allow for singular of list of params
+ variables = params['variable_name']
+ if isinstance(variables, list):
+ self.params['variable_name'] = variables
+ else:
+ self.params['variable_name'] = [variables]
+ self.variables = self.params['variable_name']
+
+ self.params['buffer_data'] = self.params.pop('buffer')
+
+ if self.params['transform'] and not isinstance(self.params['transform'], dict):
+ self.params['transform'] = {var_name: self.params['transform'] for var_name in self.variables}
+
+ def _get_defaults(self):
+ # directory for saving temporary files created during simulation
+ tmp_dir = self.default_dir
+
+ # Find the report file name. Either look for "file_name" parameter, or else it is .h5
+ if 'file_name' in self.params:
+ file_name = self.params['file_name']
+ elif self.report_name.endswith('.h5') or self.report_name.endswith('.hdf') \
+ or self.report_name.endswith('.hdf5'):
+ file_name = self.report_name # Check for case report.h5.h5
+ else:
+ file_name = '{}.h5'.format(self.report_name)
+
+ return [('cells', 'biophysical'), ('sections', 'all'), ('tmp_dir', tmp_dir), ('file_name', file_name),
+ ('buffer', True), ('transform', {})]
+
+ def add_variables(self, var_name, transform):
+ self.params['variable_name'].extend(var_name)
+ self.params['transform'].update(transform)
+
+ def can_combine(self, other):
+ def param_eq(key):
+ return self.params.get(key, None) == other.params.get(key, None)
+
+ return param_eq('cells') and param_eq('sections') and param_eq('file_name') and param_eq('buffer')
+
+ @staticmethod
+ def avail_modules():
+ return 'membrane_report'
+
+ @classmethod
+ def build(cls, name, params):
+ report = cls(name)
+ report.cells = params.get('cells', 'biophysical')
+ report.sections = params.get('sections', 'all')
+
+ if 'file_name' in params:
+ report.file_name = params['file_name']
+ report.tmp_dir = os.path.dirname(os.path.realpath(report.file_name))
+ else:
+ report.file_name = os.path.join(cls.default_dir, 'cell_vars.h5')
+ report.tmp_dir = cls.default_dir
+
+ variables = params['variable_name']
+ if isinstance(variables, list):
+ report.variables = variables
+ else:
+ report.variables = [variables]
+
+ return report
+
+
+@SimReport.register_module
+class SpikesReport(SimReport):
+ def __init__(self, report_name, module, params):
+ super(SpikesReport, self).__init__(report_name, module, params)
+
+ @classmethod
+ def build(cls, name, params):
+ return None
+
+ @staticmethod
+ def avail_modules():
+ return 'spikes_report'
+
+ @classmethod
+ def from_output_dict(cls, output_dict):
+ params = {
+ 'spikes_file': output_dict.get('spikes_file', None),
+ 'spikes_file_csv': output_dict.get('spikes_file_csv', None),
+ 'spikes_file_nwb': output_dict.get('spikes_file_nwb', None),
+ 'spikes_sort_order': output_dict.get('spikes_sort_order', None),
+ 'tmp_dir': output_dict.get('output_dir', cls.default_dir)
+ }
+ if not (params['spikes_file'] or params['spikes_file_csv'] or params['spikes_file_nwb']):
+ # User hasn't specified any spikes file
+ params['enabled'] = False
+
+ return cls('spikes_report', 'spikes_report', params)
+
+
+@SimReport.register_module
+class SEClampReport(SimReport):
+ def __init__(self, report_name, module, params):
+ super(SEClampReport, self).__init__(report_name, module, params)
+
+ @staticmethod
+ def avail_modules():
+ return 'SEClamp'
+
+
+@SimReport.register_module
+class ECPReport(SimReport):
+ def __init__(self, report_name, module, params):
+ super(ECPReport, self).__init__(report_name, module, params)
+ self.tmp_dir = self.default_dir
+ self.positions_file = None
+ self.file_name = None
+
+ @staticmethod
+ def avail_modules():
+ return 'extracellular'
+
+ def _get_defaults(self):
+ if 'file_name' in self.params:
+ file_name = self.params['file_name']
+ elif self.report_name.endswith('.h5') or self.report_name.endswith('.hdf') \
+ or self.report_name.endswith('.hdf5'):
+ file_name = self.report_name # Check for case report.h5.h5
+ else:
+ file_name = '{}.h5'.format(self.report_name)
+
+ return [('tmp_dir', self.default_dir), ('file_name', file_name),
+ ('contributions_dir', os.path.join(self.default_dir, 'ecp_contributions'))]
+
+ @classmethod
+ def build(cls, name, params):
+ report = cls(name)
+
+ if 'file_name' in params:
+ report.file_name = params['file_name']
+ report.tmp_dir = os.path.dirname(os.path.realpath(report.file_name))
+ else:
+ report.file_name = os.path.join(cls.default_dir, 'ecp.h5')
+ report.tmp_dir = cls.default_dir
+
+ report.contributions_dir = params.get('contributions_dir', cls.default_dir)
+ report.positions_file = params['electrode_positions']
+ return report
+
+
+@SimReport.register_module
+class SaveSynapses(SimReport):
+ def __init__(self, report_name, module, params):
+ super(SaveSynapses, self).__init__(report_name, module, params)
+
+ @staticmethod
+ def avail_modules():
+ return 'SaveSynapses'
+
+
+@SimReport.register_module
+class MultimeterReport(MembraneReport):
+
+ @staticmethod
+ def avail_modules():
+ return ['multimeter', 'multimeter_report']
+
+
+@SimReport.register_module
+class SectionReport(MembraneReport):
+
+ @staticmethod
+ def avail_modules():
+ return ['section_report']
+
+
+def from_config(cfg):
+ SimReport.default_dir = cfg.output_dir
+
+ reports_list = []
+ membrane_reports = []
+ has_spikes_report = False
+ for report_name, report_params in cfg.reports.items():
+ # Get the Report class from the module_name parameter
+ if not report_params.get('enabled', True):
+ # not a part of the standard but will help skip modules
+ continue
+
+ report = SimReport.build(report_name, report_params)
+
+ if isinstance(report, MembraneReport):
+ # When possible for membrane reports combine multiple reports into one module if all the parameters
+ # except for the variable name differs.
+ for existing_report in membrane_reports:
+ if existing_report.can_combine(report):
+ existing_report.add_variables(report.variables, report.params['transform'])
+ break
+ else:
+ reports_list.append(report)
+ membrane_reports.append(report)
+
+ else:
+ reports_list.append(report)
+
+ if not has_spikes_report:
+ report = SpikesReport.from_output_dict(cfg.output)
+ if report is None:
+ # TODO: Log exception or possibly warning
+ pass
+ else:
+ reports_list.append(report)
+
+ return reports_list
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/stimulus/LocallySparseNoise.py b/bmtk-vb/build/lib/bmtk/simulator/utils/stimulus/LocallySparseNoise.py
new file mode 100644
index 0000000..ee43e9f
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/stimulus/LocallySparseNoise.py
@@ -0,0 +1,137 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+from scipy.misc import imresize
+import os
+import pandas as pd
+
+stimulus_folder = os.path.dirname(os.path.abspath(__file__))
+bob_stimlus = os.path.join(stimulus_folder,'lsn.npy')
+
+class LocallySparseNoise (object):
+
+ def __init__(self,stim_template=None, stim_table=None):
+
+ if stim_template is None or stim_table is None:
+ raise Exception("stim_template or stim_table not provided. Please provide them or call the class methods .with_new_stimulus or .with_bob_stimulus.")
+ else:
+ self.stim_template = stim_template
+ self.stim_table = stim_table
+
+ T,y,x = stim_template.shape
+
+ self.T = T
+ self.y = y
+ self.x = x
+
+
+ def get_image_input(self, new_size=None, add_channels=False):
+
+ if new_size is not None:
+ y,x = new_size
+ data_new_size = np.empty((self.T,y,x),dtype=np.float32)
+
+ for t in range(self.stim_template.shape[0]):
+ data_new_size[t] = imresize(self.stim_template[t].astype(np.float32),new_size,interp='nearest')
+
+ if add_channels:
+ return data_new_size[:,:,:,np.newaxis]
+ else:
+ return data_new_size
+
+ @staticmethod
+ def exclude(av,y_x,exclusion=0):
+ y, x = y_x
+ X,Y = np.meshgrid(np.arange(av.shape[1]), np.arange(av.shape[0]))
+
+ mask = ((X-x)**2 + (Y-y)**2) <= exclusion**2
+ av[mask] = False
+
+ @classmethod
+ def create_sparse_noise_matrix(cls,Y=16,X=28,exclusion=5,T=9000, buffer_x=6, buffer_y=6):
+
+ Xp = X+2*buffer_x
+ Yp = Y+2*buffer_y
+
+ # 127 is mean luminance value
+ sn = 127*np.ones([T,Yp,Xp],dtype=np.uint8)
+
+ for t in range(T):
+ available = np.ones([Yp,Xp]).astype(np.bool)
+
+ while np.any(available):
+ y_available, x_available = np.where(available)
+
+ pairs = zip(y_available,x_available)
+ pair_index = np.random.choice(range(len(pairs)))
+ y,x = pairs[pair_index]
+
+ p = np.random.random()
+ if p < 0.5:
+ sn[t,y,x] = 255
+ else:
+ sn[t,y,x] = 0
+
+ cls.exclude(available,(y,x),exclusion=exclusion)
+
+ return sn[:,buffer_y:(Y+buffer_y), buffer_x:(X+buffer_x)]
+
+ def save_to_hdf(self):
+
+ pass
+
+ @staticmethod
+ def generate_stim_table(T,start_time=0,trial_length=250):
+ '''frame_length is in milliseconds'''
+
+ start_time_array = trial_length*np.arange(T) + start_time
+ column_list = [np.arange(T),start_time_array, start_time_array+trial_length-1] # -1 is because the tables in BOb use inclusive intervals, so we'll stick to that convention
+ cols = np.vstack(column_list).T
+ stim_table = pd.DataFrame(cols,columns=['frame','start','end'])
+
+ return stim_table
+
+
+ @classmethod
+ def with_new_stimulus(cls,Y=16,X=28,exclusion=5,T=9000, buffer_x=6, buffer_y=6):
+
+ stim_template = cls.create_sparse_noise_matrix(Y=Y,X=X,exclusion=exclusion,T=T, buffer_x=buffer_x, buffer_y=buffer_y)
+ T,y,x = stim_template.shape
+
+ stim_table = cls.generate_stim_table(T)
+
+ new_locally_sparse_noise = cls(stim_template=stim_template, stim_table=stim_table)
+
+ return new_locally_sparse_noise
+
+ @classmethod
+ def with_brain_observatory_stimulus(cls):
+
+ stim_template = np.load(bob_stimlus)
+ T,y,x = stim_template.shape
+
+ stim_table = cls.generate_stim_table(T)
+
+ new_locally_sparse_noise = cls(stim_template=stim_template, stim_table=stim_table)
+
+ return new_locally_sparse_noise
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/stimulus/NaturalScenes.py b/bmtk-vb/build/lib/bmtk/simulator/utils/stimulus/NaturalScenes.py
new file mode 100644
index 0000000..b04056b
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/stimulus/NaturalScenes.py
@@ -0,0 +1,337 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import os
+from PIL import Image
+import pandas as pd
+
+class NaturalScenes (object):
+ def __init__(self, new_size=(64,112), mode='L', dtype=np.float32, start_time=0, trial_length=250, add_channels=False):
+
+ self.new_size = new_size
+ self.mode = mode
+ self.dtype = dtype
+ self.add_channels = add_channels
+
+
+
+ def random_sample(self, n):
+ sample_indices = np.random.randint(0, self.num_images, n)
+ return self.stim_template[sample_indices]
+
+ # also a method random_sample_with_labels ?
+ def random_sample_with_labels(self, n):
+ pass
+
+ def get_image_input(self,**kwargs):
+ return self.stim_template
+
+ def add_gray_screen(self):
+
+ gray_screen = np.ones(self.new_size,dtype=self.dtype)*127 # using 127 as "gray" value
+ if self.add_channels:
+ gray_screen = gray_screen[:,:,np.newaxis]
+ self.stim_template = np.vstack([self.stim_template, gray_screen[np.newaxis,:,:]])
+
+ start = int(self.stim_table.tail(1)['end']) + 1
+ end = start+self.trial_length-1 #make trial_length an argument of this function?
+ frame = int(self.stim_table.tail(1)['frame']) + 1
+
+ self.stim_table = self.stim_table.append(pd.DataFrame([[frame,start,end]],columns=['frame','start','end']),ignore_index=True)
+
+ self.label_dataframe = self.label_dataframe.append(pd.DataFrame([['gray_screen']],columns=['image_name']),ignore_index=True)
+ self.num_images = self.num_images + 1
+
+ @classmethod
+ def with_brain_observatory_stimulus(cls, new_size=(64,112), mode='L', dtype=np.float32, start_time=0, trial_length=250, add_channels=False):
+
+ from sys import platform
+
+ if platform=='linux2':
+ image_dir = '/data/mat/iSee_temp_shared/CAM_Images.icns'
+ elif platform=='darwin':
+
+ image_dir = '/Users/michaelbu/Data/Images/CAM_Images.icns'
+ if not os.path.exists(image_dir):
+ print("Detected platform: OS X. I'm assuming you've mounted \\\\aibsdata\\mat at /Volumes/mat/")
+ image_dir = '/Volumes/mat/iSee_temp_shared/CAM_Images.icns'
+
+
+ elif platform=='win32':
+ image_dir = r'\\aibsdata\mat\iSee_temp_shared\CAM_Images.icns'
+
+ #image_dir = '/Users/michaelbu/Data/Images/CAM_Images' # change this to temp directory on aibsdata
+ new_ns = cls.with_new_stimulus_from_dataframe(image_dir=image_dir, new_size=new_size, mode=mode, dtype=dtype, start_time=start_time, trial_length=trial_length, add_channels=add_channels)
+
+ new_ns.add_gray_screen()
+
+ return new_ns
+
+ @staticmethod
+ def generate_stim_table(T,start_time=0,trial_length=250):
+ '''frame_length is in milliseconds'''
+
+ start_time_array = trial_length*np.arange(T) + start_time
+ column_list = [np.arange(T),start_time_array, start_time_array+trial_length-1] # -1 is because the tables in BOb use inclusive intervals, so we'll stick to that convention
+ cols = np.vstack(column_list).T
+ stim_table = pd.DataFrame(cols,columns=['frame','start','end'])
+
+ return stim_table
+
+ def to_h5(self,sample_indices=None):
+ pass
+
+ @classmethod
+ def with_new_stimulus_from_folder(cls, image_dir, new_size=(64,112), mode='L', dtype=np.float32, start_time=0, trial_length=250, add_channels=False):
+
+ new_ns = cls(new_size=new_size, mode=mode, dtype=dtype, start_time=start_time, trial_length=trial_length, add_channels=add_channels)
+
+ new_ns.im_list = os.listdir(image_dir)
+ new_ns.image_dir = image_dir
+
+ stim_list = []
+ for im in new_ns.im_list:
+ try:
+ im_data = Image.open(os.path.join(new_ns.image_dir,im))
+ except IOError:
+ print("Skipping file: ", im)
+ new_ns.im_list.remove(im)
+
+ im_data = im_data.convert(new_ns.mode)
+ if new_size is not None:
+ im_data = im_data.resize((new_ns.new_size[1], new_ns.new_size[0]))
+ im_data = np.array(im_data,dtype=new_ns.dtype)
+ if add_channels:
+ im_data = im_data[:,:,np.newaxis]
+ stim_list.append(im_data)
+
+ new_ns.stim_template = np.stack(stim_list)
+ new_ns.num_images = new_ns.stim_template.shape[0]
+
+ t,y,x = new_ns.stim_template.shape
+ new_ns.new_size = (y,x)
+
+ new_ns.trial_length = trial_length
+ new_ns.start_time = start_time
+ new_ns.stim_table = new_ns.generate_stim_table(new_ns.num_images,start_time=new_ns.start_time,trial_length=new_ns.trial_length)
+
+ new_ns.label_dataframe = pd.DataFrame(columns=['image_name'])
+ new_ns.label_dataframe['image_name'] = new_ns.im_list
+
+ return new_ns
+
+ @classmethod
+ def with_new_stimulus_from_dataframe(cls, image_dir, new_size=(64,112), mode='L', dtype=np.float32, start_time=0, trial_length=250, add_channels=False):
+ '''image_dir should contain a folder of images called 'images' and an hdf5 file with a
+ dataframe called 'label_dataframe.h5' with the frame stored in the key 'labels'.
+ dataframe should have columns ['relative_image_path','label_1', 'label_2', ...]'''
+
+ new_ns = cls(new_size=new_size, mode=mode, dtype=dtype, start_time=start_time, trial_length=trial_length, add_channels=add_channels)
+
+ image_path = os.path.join(image_dir,'images')
+ label_dataframe = pd.read_hdf(os.path.join(image_dir,'label_dataframe.h5'),'labels')
+ new_ns.label_dataframe = label_dataframe
+
+ new_ns.image_dir = image_path
+ new_ns.im_list = list(label_dataframe.image_name)
+
+ stim_list = []
+ for im in new_ns.im_list:
+ try:
+ im_data = Image.open(os.path.join(image_path,im))
+ except IOError:
+ print("Skipping file: ", im)
+ new_ns.im_list.remove(im)
+
+ im_data = im_data.convert(new_ns.mode)
+ if new_size is not None:
+ im_data = im_data.resize((new_ns.new_size[1], new_ns.new_size[0]))
+ im_data = np.array(im_data,dtype=new_ns.dtype)
+ if add_channels:
+ im_data = im_data[:,:,np.newaxis]
+ stim_list.append(im_data)
+
+ new_ns.stim_template = np.stack(stim_list)
+ new_ns.num_images = new_ns.stim_template.shape[0]
+
+ if add_channels:
+ t,y,x,_ = new_ns.stim_template.shape
+ else:
+ t,y,x = new_new.stim_template.shape
+ new_ns.new_size = (y,x)
+
+ new_ns.trial_length = trial_length
+ new_ns.start_time = start_time
+ new_ns.stim_table = new_ns.generate_stim_table(new_ns.num_images,start_time=new_ns.start_time,trial_length=new_ns.trial_length)
+
+ return new_ns
+
+ @staticmethod
+ def create_image_dir_from_hierarchy(folder, new_path, label_names=None):
+
+ import shutil
+
+ image_dataframe = pd.DataFrame(columns=["image_name"])
+
+ if os.path.exists(new_path):
+ raise Exception("path "+new_path+" already exists!")
+
+ os.mkdir(new_path)
+ os.mkdir(os.path.join(new_path,'images'))
+ for path, sub_folders, file_list in os.walk(folder):
+
+ for f in file_list:
+ try:
+ im_data = Image.open(os.path.join(path,f))
+ except IOError:
+ print("Skipping file: ", f)
+ im_data = None
+
+ if im_data is not None:
+ shutil.copy(os.path.join(path,f), os.path.join(new_path,'images',f))
+ image_name = f
+ label_vals = os.path.split(os.path.relpath(path,folder))
+ if label_names is not None:
+ current_label_names = label_names[:]
+ else:
+ current_label_names = []
+
+ if len(label_vals) > current_label_names:
+ labels_to_add = ["label_"+str(i) for i in range(len(current_label_names), len(label_vals))]
+ current_label_names += labels_to_add
+ elif len(label_vals) < current_label_names:
+ current_label_names = current_label_names[:len(label_vals)]
+
+ vals = [f] + list(label_vals)
+ cols = ['image_name']+current_label_names
+ new_frame = pd.DataFrame([vals],columns=cols)
+
+ image_dataframe = image_dataframe.append(new_frame,ignore_index=True)
+
+ image_dataframe.to_hdf(os.path.join(new_path,'label_dataframe.h5'),'labels')
+
+ # @staticmethod
+ # def add_object_to_image(image, object_image):
+ #
+ # new_image = image.copy()
+ # new_image[np.isfinite(object_image)] = object_image[np.isfinite(object_image)]
+ # return new_image
+
+ @staticmethod
+ def add_object_to_template(template, object_image):
+
+ if template.ndim==3:
+ T,y,x = template.shape
+ elif template.ndim==4:
+ T,y,x,K = template.shape
+ else:
+ raise Exception("template.ndim must be 3 or 4")
+
+ if object_image.ndim < template.ndim-1:
+ object_image=object_image[:,:,np.newaxis]
+
+ new_template = template.copy()
+ new_template[:,np.isfinite(object_image)] = object_image[np.isfinite(object_image)]
+
+ return new_template
+
+ def add_objects_to_foreground(self, object_dict):
+
+ template_list = []
+
+ if self.label_dataframe is None:
+ self.label_dataframe = pd.DataFrame(columns=['object'])
+
+ new_label_dataframe_list = []
+
+ for obj in object_dict:
+ template_list.append(self.add_object_to_template(self.stim_template,object_dict[obj]))
+ obj_dataframe = self.label_dataframe.copy()
+ obj_dataframe['object'] = [ obj for i in range(self.num_images) ]
+ new_label_dataframe_list.append(obj_dataframe)
+
+ self.stim_template = np.vstack(template_list)
+ self.label_dataframe = pd.concat(new_label_dataframe_list,ignore_index=True)
+
+ self.num_images = self.stim_template.shape[0]
+
+ self.stim_table = self.generate_stim_table(self.num_images,start_time=self.start_time,trial_length=self.trial_length)
+
+
+ @staticmethod
+ def create_object_dict(folder, background_shape=(64,112), dtype=np.float32, rotations=False):
+
+ from scipy.misc import imresize
+
+ # resize function to preserve the nans in the background
+ def resize_im(im,new_shape):
+ def mask_for_nans():
+ mask = np.ones(im.shape)
+ mask[np.isfinite(im)] = 0
+ mask = imresize(mask,new_shape,interp='nearest')
+
+ return mask.astype(np.bool)
+
+ new_im = im.copy()
+ new_im = new_im.astype(dtype)
+ new_im[np.isnan(new_im)] = -1.
+ new_im = imresize(new_im,new_shape,interp='nearest')
+
+ new_im = new_im.astype(dtype)
+ new_im[mask_for_nans()] = np.nan
+
+ return new_im
+
+ def im_on_background(im, shift=None):
+ bg = np.empty(background_shape)
+ bg[:] = np.nan
+
+ buffer_x = (background_shape[1] - im.shape[1])/2
+ buffer_y = (background_shape[0] - im.shape[0])/2
+
+ bg[buffer_y:im.shape[0]+buffer_y, buffer_x:im.shape[1]+buffer_x] = im
+
+ return bg
+
+ im_list = os.listdir(folder)
+
+ obj_dict = {}
+
+ for im_file in im_list:
+ try:
+ im = np.load(os.path.join(folder,im_file))
+ except IOError:
+ print("skipping file: ", im_file)
+ im = None
+
+ if im is not None:
+ new_shape = (np.min(background_shape), np.min(background_shape))
+ im = resize_im(im,new_shape)
+ obj_dict[im_file[:-4]] = im_on_background(im)
+ if rotations:
+ im_rot=im.copy()
+ for i in range(3):
+ im_rot = np.rot90(im_rot)
+ obj_dict[im_file[:-4]+'_'+str(90*(i+1))] = im_on_background(im_rot)
+
+ return obj_dict
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/stimulus/StaticGratings.py b/bmtk-vb/build/lib/bmtk/simulator/utils/stimulus/StaticGratings.py
new file mode 100644
index 0000000..c7bf9cb
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/stimulus/StaticGratings.py
@@ -0,0 +1,100 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import pandas as pd
+
+
+class StaticGratings (object):
+
+ def __init__(self,orientations=30.0*np.arange(6),spatial_frequencies=0.01*(2.0**np.arange(1,6)),phases=0.25*np.arange(4),num_trials=50, start_time=0, trial_length=250):
+
+ self.orientations = orientations
+ self.spatial_frequencies = spatial_frequencies
+ self.phases = phases
+ self.num_trials = num_trials
+ self.start_time = start_time
+ self.trial_length = trial_length
+
+ trial_stims = np.array([ [orientation, spat_freq, phase] for orientation in self.orientations for spat_freq in self.spatial_frequencies for phase in self.phases ])
+
+ trial_stims = np.tile(trial_stims,(num_trials,1))
+
+ indices = np.random.permutation(trial_stims.shape[0])
+ trial_stims = trial_stims[indices]
+
+ self.stim_table = pd.DataFrame(trial_stims,columns=['orientation','spatial_frequency','phase'])
+
+ T = self.stim_table.shape[0]
+ self.T = T
+ start_time_array = trial_length*np.arange(self.T) + start_time
+ end_time_array = start_time_array + trial_length
+
+ self.stim_table['start'] = start_time_array
+ self.stim_table['end'] = end_time_array
+
+ def get_image_input(self,new_size=(64,112),pix_per_degree=1.0, dtype=np.float32, add_channels=False):
+
+ y, x = new_size
+ stim_template = np.empty([self.T, y, x],dtype=dtype)
+
+ for t, row in self.stim_table.iterrows():
+ ori, sf, ph = row[0], row[1], row[2]
+
+ theta = ori*np.pi/180.0 #convert to radians
+
+ k = (sf/pix_per_degree) # radians per pixel
+ ph = ph*np.pi*2.0
+
+ X,Y = np.meshgrid(np.arange(x),np.arange(y))
+ X = X - x/2
+ Y = Y - y/2
+ Xp, Yp = self.rotate(X,Y,theta)
+
+ stim_template[t] = np.cos(2.0*np.pi*Xp*k + ph)
+
+ self.stim_template = stim_template
+
+ if add_channels:
+ return stim_template[:,:,:,np.newaxis]
+ else:
+ return stim_template
+
+ @staticmethod
+ def rotate(X,Y, theta):
+
+ Xp = X*np.cos(theta) - Y*np.sin(theta)
+ Yp = X*np.sin(theta) + Y*np.cos(theta)
+
+ return Xp, Yp
+
+ @classmethod
+ def with_brain_observatory_stimulus(cls, num_trials=50):
+
+ orientations = 30.0*np.arange(6)
+ spatial_frequencies = 0.01*(2.0**np.arange(1,6))
+ phases = 0.25*np.arange(4)
+
+ start_time = 0
+ trial_length = 250
+
+ return cls(orientations=orientations,spatial_frequencies=spatial_frequencies,phases=phases,num_trials=num_trials,start_time=start_time,trial_length=trial_length)
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/stimulus/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/utils/stimulus/__init__.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/stimulus/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/tools/__init__.py b/bmtk-vb/build/lib/bmtk/simulator/utils/tools/__init__.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/tools/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/tools/process_spikes.py b/bmtk-vb/build/lib/bmtk/simulator/utils/tools/process_spikes.py
new file mode 100644
index 0000000..0f5519a
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/tools/process_spikes.py
@@ -0,0 +1,207 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import h5py
+import numpy as np
+import pandas as pd
+import os
+
+
+def read_spk_txt(f_name):
+
+ '''
+
+ Parameters
+ ----------
+ f_name: string
+ Full path to a file containing cell IDs and spike times.
+
+ Returns
+ -------
+ A dataframe containing two columns: spike times and cell IDs.
+
+ Usage:
+ x = read_spk_txt('output/spk.dat')
+
+ '''
+
+ df = pd.read_csv(f_name, header=None, sep=' ')
+ df.columns = ['t', 'gid']
+
+ return df
+
+
+def read_spk_h5(f_name):
+
+ '''
+
+ Parameters
+ ----------
+ f_name: string
+ Full path to a file containing cell IDs and spike times.
+
+ Returns
+ -------
+ A dataframe containing two columns: spike times and cell IDs.
+
+ Usage:
+ x = read_spk_h5('output/spk.h5')
+
+ '''
+
+ f = h5py.File(f_name, 'r' , libver='latest')
+ spikes = {}
+
+ t = np.array([])
+ gids = np.array([])
+ for i, gid in enumerate(f.keys()): # save spikes of all gids
+ if (i % 1000 == 0):
+ print(i)
+ spike_times = f[gid][...]
+ t = np.append(t, spike_times)
+ gids = np.append(gids, np.ones(spike_times.size)*int(gid))
+
+ f.close()
+
+ df = pd.DataFrame(columns=['t', 'gid'])
+ df['t'] = t
+ df['gid'] = gids
+
+ return df
+
+
+def spikes_to_mean_f_rate(cells_f, spk_f, t_window, **kwargs):
+
+ '''
+
+ Parameters
+ ----------
+ cells_f: string
+ Full path to a file containing information about all cells (in particular, all cell IDs,
+ and not just those that fired spikes in a simulation).
+ spk_f: string
+ Full path to a file containing cell IDs and spike times.
+ t_window: a tuple of two floats
+ Start and stop time for the window within which the firing rate is computed.
+ **kwargs
+ spk_f_type: string with accepted values 'txt' or 'h5'
+ Type of the file from which spike times should be extracted.
+
+
+ Assumptions
+ -----------
+ It is assumed here that TIME IS in ms and the RATES ARE RETURNED in Hz.
+
+
+ Returns
+ -------
+ A dataframe containing a column of cell IDs and a column of corresponding
+ average firing rates.
+
+ Usage:
+ x = spikes_to_mean_f_rate('../network_model/cells.csv', 'output/spk.dat', (500.0, 3000.0))
+
+ '''
+
+ # Make sure the time window's start and stop times are reasonable.
+ t_start = t_window[0]
+ t_stop = t_window[1]
+ delta_t = t_stop - t_start
+ if (delta_t <= 0.0):
+ print('spikes_to_mean_f_rate: stop time %f is <= start time %f; exiting.' % (t_stop, t_start))
+ quit()
+
+ # Read information about all cells.
+ cells_df = pd.read_csv(cells_f, sep=' ')
+ gids = cells_df['id'].values
+
+ # By default, the spk file type is "None", in which case it should be chosen
+ # based on the extension of the supplied spk file name.
+ spk_f_type = kwargs.get('spk_f_type', None)
+ if (spk_f_type == None):
+ spk_f_ext = spk_f.split('.')[-1]
+ if (spk_f_ext in ['txt', 'dat']):
+ spk_f_type = 'txt' # Assume this is an ASCII file.
+ elif (spk_f_ext in ['h5']):
+ spk_f_type = 'h5' # Assume this is an HDF5 file.
+ else:
+ print('spikes_to_mean_f_rate: unrecognized file extension. Use the flag spk_f_type=\'txt\' or \'h5\' to override this message. Exiting.')
+ quit()
+
+ # In case the spk_f_type was provided directly, check that the value is among those the code recognizes.
+ if (spk_f_type not in ['txt', 'h5']):
+ print('spikes_to_mean_f_rate: unrecognized value of spk_f_type. The recognized values are \'txt\' or \'h5\'. Exiting.')
+ quit()
+
+ # Read spikes.
+ # If the spike file has zero size, create a dataframe with all rates equal to zero.
+ # Otherwise, use spike times from the file to fill the dataframe.
+ if (os.stat(spk_f).st_size == 0):
+ f_rate_df = pd.DataFrame(columns=['gid', 'f_rate'])
+ f_rate_df['gid'] = gids
+ f_rate_df['f_rate'] = np.zeros(gids.size)
+ else:
+ # Use the appropriate function to read the spikes.
+ if (spk_f_type == 'txt'):
+ df = read_spk_txt(spk_f)
+ elif(spk_f_type == 'h5'):
+ df = read_spk_h5(spk_f)
+
+ # Keep only those entries that have spike times within the time window.
+ df = df[(df['t'] >= t_start) & (df['t'] <= t_stop)]
+
+ # Compute rates.
+ f_rate_df = df.groupby('gid').count() * 1000.0 / delta_t # Time is in ms and rate is in Hz.
+ f_rate_df.columns = ['f_rate']
+ # The 'gid' label is now used as index (after the groupby operation).
+ # Convert it to a column; then change the index name to none, as in default.
+ f_rate_df['gid'] = f_rate_df.index
+ f_rate_df.index.names = ['']
+
+ # Find cell IDs from the spk file that are not in the cell file.
+ # Remove them from the dataframe with rates.
+ gids_not_in_cells_f = f_rate_df['gid'].values[~np.in1d(f_rate_df['gid'].values, gids)]
+ f_rate_df = f_rate_df[~f_rate_df['gid'].isin(gids_not_in_cells_f)]
+
+ # Find cell IDs from the cell file that do not have counterparts in the spk file
+ # (for example, because those cells did not fire).
+ # Add these cell IDs to the dataframe; fill rates with zeros.
+ gids_not_in_spk = gids[~np.in1d(gids, f_rate_df['gid'].values)]
+ f_rate_df = f_rate_df.append(pd.DataFrame(np.array([gids_not_in_spk, np.zeros(gids_not_in_spk.size)]).T, columns=['gid', 'f_rate']))
+
+ # Sort the rows according to the cell IDs.
+ f_rate_df = f_rate_df.sort('gid', ascending=True)
+
+ return f_rate_df
+
+
+# Tests.
+
+#x = spikes_to_mean_f_rate('/data/mat/yazan/corticalCol/ice/sims/column/build/net_structure/cells.csv', '/data/mat/yazan/corticalCol/ice/sims/column/full_preliminary_runs/output008/spikes.txt', (500.0, 2500.0))
+#print x
+
+#x = spikes_to_mean_f_rate('/data/mat/yazan/corticalCol/ice/sims/column/build/net_structure/cells.csv', '/data/mat/yazan/corticalCol/ice/sims/column/full_preliminary_runs/output008/spikes.h5', (500.0, 2500.0))
+#print x
+
+#x = spikes_to_mean_f_rate('/data/mat/yazan/corticalCol/ice/sims/column/build/net_structure/cells.csv', '/data/mat/yazan/corticalCol/ice/sims/column/full_preliminary_runs/output008/spikes.txt', (500.0, 2500.0), spk_f_type='txt')
+#print x
+
diff --git a/bmtk-vb/build/lib/bmtk/simulator/utils/tools/spatial.py b/bmtk-vb/build/lib/bmtk/simulator/utils/tools/spatial.py
new file mode 100644
index 0000000..9b331d0
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/simulator/utils/tools/spatial.py
@@ -0,0 +1,26 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+
+def example():
+ print('OK')
diff --git a/bmtk-vb/build/lib/bmtk/utils/__init__.py b/bmtk-vb/build/lib/bmtk/utils/__init__.py
new file mode 100644
index 0000000..1c9c088
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/__init__.py
@@ -0,0 +1,24 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import logging
+
+
diff --git a/bmtk-vb/build/lib/bmtk/utils/cell_vars/__init__.py b/bmtk-vb/build/lib/bmtk/utils/cell_vars/__init__.py
new file mode 100644
index 0000000..021345f
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/cell_vars/__init__.py
@@ -0,0 +1,6 @@
+from .var_reader import CellVarsFile
+
+
+
+
+
diff --git a/bmtk-vb/build/lib/bmtk/utils/cell_vars/var_reader.py b/bmtk-vb/build/lib/bmtk/utils/cell_vars/var_reader.py
new file mode 100644
index 0000000..21da36d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/cell_vars/var_reader.py
@@ -0,0 +1,134 @@
+import h5py
+import numpy as np
+
+
+class CellVarsFile(object):
+ VAR_UNKNOWN = 'Unknown'
+ UNITS_UNKNOWN = 'NA'
+
+ def __init__(self, filename, mode='r', **params):
+ self._h5_handle = h5py.File(filename, 'r')
+ self._h5_root = self._h5_handle[params['h5_root']] if 'h5_root' in params else self._h5_handle['/']
+ self._var_data = {}
+ self._var_units = {}
+
+ self._mapping = None
+
+ # Look for variabl and mapping groups
+ for var_name in self._h5_root.keys():
+ hf_grp = self._h5_root[var_name]
+
+ if var_name == 'data':
+ # According to the sonata format the /data table should be located at the root
+ var_name = self._h5_root['data'].attrs.get('variable_name', CellVarsFile.VAR_UNKNOWN)
+ self._var_data[var_name] = self._h5_root['data']
+ self._var_units[var_name] = self._find_units(self._h5_root['data'])
+
+ if not isinstance(hf_grp, h5py.Group):
+ continue
+
+ if var_name == 'mapping':
+ # Check for /mapping group
+ self._mapping = hf_grp
+ else:
+ # In the bmtk we can support multiple variables in the same file (not sonata compliant but should be)
+ # where each variable table is separated into its own group //data
+ if 'data' not in hf_grp:
+ print('Warning: could not find "data" dataset in {}. Skipping!'.format(var_name))
+ else:
+ self._var_data[var_name] = hf_grp['data']
+ self._var_units[var_name] = self._find_units(hf_grp['data'])
+
+ # create map between gids and tables
+ self._gid2data_table = {}
+ if self._mapping is None:
+ raise Exception('could not find /mapping group')
+ else:
+ gids_ds = self._mapping['gids']
+ index_pointer_ds = self._mapping['index_pointer']
+ for indx, gid in enumerate(gids_ds):
+ self._gid2data_table[gid] = (index_pointer_ds[indx], index_pointer_ds[indx+1]) # slice(index_pointer_ds[indx], index_pointer_ds[indx+1])
+
+ time_ds = self._mapping['time']
+ self._t_start = time_ds[0]
+ self._t_stop = time_ds[1]
+ self._dt = time_ds[2]
+ self._n_steps = int((self._t_stop - self._t_start) / self._dt)
+
+ @property
+ def variables(self):
+ return list(self._var_data.keys())
+
+ @property
+ def gids(self):
+ return list(self._gid2data_table.keys())
+
+ @property
+ def t_start(self):
+ return self._t_start
+
+ @property
+ def t_stop(self):
+ return self._t_stop
+
+ @property
+ def dt(self):
+ return self._dt
+
+ @property
+ def time_trace(self):
+ return np.linspace(self.t_start, self.t_stop, num=self._n_steps, endpoint=True)
+
+ @property
+ def h5(self):
+ return self._h5_root
+
+ def _find_units(self, data_set):
+ return data_set.attrs.get('units', CellVarsFile.UNITS_UNKNOWN)
+
+ def units(self, var_name=VAR_UNKNOWN):
+ return self._var_units[var_name]
+
+ def n_compartments(self, gid):
+ bounds = self._gid2data_table[gid]
+ return bounds[1] - bounds[0]
+
+ def compartment_ids(self, gid):
+ bounds = self._gid2data_table[gid]
+ return self._mapping['element_id'][bounds[0]:bounds[1]]
+
+ def compartment_positions(self, gid):
+ bounds = self._gid2data_table[gid]
+ return self._mapping['element_pos'][bounds[0]:bounds[1]]
+
+ def data(self, gid, var_name=VAR_UNKNOWN,time_window=None, compartments='origin'):
+ if var_name not in self.variables:
+ raise Exception('Unknown variable {}'.format(var_name))
+
+ if time_window is None:
+ time_slice = slice(0, self._n_steps)
+ else:
+ if len(time_window) != 2:
+ raise Exception('Invalid time_window, expecting tuple [being, end].')
+
+ window_beg = max(int((time_window[0] - self.t_start)/self.dt), 0)
+ window_end = min(int((time_window[1] - self.t_start)/self.dt), self._n_steps/self.dt)
+ time_slice = slice(window_beg, window_end)
+
+ multi_compartments = True
+ if compartments == 'origin' or self.n_compartments(gid) == 1:
+ # Return the first (and possibly only) compartment for said gid
+ gid_slice = self._gid2data_table[gid][0]
+ multi_compartments = False
+ elif compartments == 'all':
+ # Return all compartments
+ gid_slice = slice(self._gid2data_table[gid][0], self._gid2data_table[gid][1])
+ else:
+ # return all compartments with corresponding element id
+ compartment_list = list(compartments) if isinstance(compartments, (long, int)) else compartments
+ begin = self._gid2data_table[gid][0]
+ end = self._gid2data_table[gid][1]
+ gid_slice = [i for i in range(begin, end) if self._mapping[i] in compartment_list]
+
+ data = np.array(self._var_data[var_name][time_slice, gid_slice])
+ return data.T if multi_compartments else data
diff --git a/bmtk-vb/build/lib/bmtk/utils/converters/__init__.py b/bmtk-vb/build/lib/bmtk/utils/converters/__init__.py
new file mode 100644
index 0000000..04c8f88
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/converters/__init__.py
@@ -0,0 +1,22 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
diff --git a/bmtk-vb/build/lib/bmtk/utils/converters/hoc_converter.py b/bmtk-vb/build/lib/bmtk/utils/converters/hoc_converter.py
new file mode 100644
index 0000000..c500945
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/converters/hoc_converter.py
@@ -0,0 +1,299 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import json
+import os.path
+import re
+from collections import defaultdict
+from itertools import groupby
+from lxml import etree
+import bluepyopt.ephys as ephys
+from tqdm import tqdm
+import utils
+
+XML_NS = '{http://www.neuroml.org/schema/neuroml2}'
+MECHANISMS = [
+ 'channelDensity', 'channelDensityNernst', 'specificCapacitance', 'species',
+ 'resistivity', 'concentrationModel'
+]
+
+LOCATION_MAP = {
+ 'apic': 'apical',
+ 'soma': 'somatic',
+ 'dend': 'basal',
+ 'axon': 'axonal',
+ 'all': 'all'
+}
+
+
+def map_location_name(name):
+ return LOCATION_MAP[name]
+
+
+def load_json(json_path):
+ params = json.load(open(json_path))
+
+ scalar = ephys.parameterscalers.NrnSegmentLinearScaler()
+ mechanisms = {}
+ sections_lookup = {'soma': 'somatic', 'dend': 'basal', 'axon': 'axonal', 'apic': 'apical'}
+ def getNrnSeclist(loc_name):
+ return ephys.locations.NrnSeclistLocation(loc_name, seclist_name=loc_name)
+
+ parameters = []
+ for d in params['genome']:
+ section = sections_lookup[d['section']]
+ value = d['value']
+ name = d['name']
+ mech = 'pas' if name == 'g_pass' else d['mechanism']
+ mech_name = 'CaDynamics' if mech == 'CaDynamics' else '{}.{}'.format(name, d['section'])
+ p_name = '{}_{}'.format(name, section) if name == 'g_pass' else name
+
+ if mech_name not in mechanisms:
+ nrn_mech = ephys.mechanisms.NrnMODMechanism(name=mech_name, mod_path=None, suffix=mech,
+ locations=[getNrnSeclist(section)])
+ mechanisms[mech_name] = nrn_mech
+
+ parameters.append(ephys.parameters.NrnSectionParameter(name=p_name, param_name=name, value_scaler=scalar,
+ value=value, locations=[getNrnSeclist(section)]))
+
+ parameters.append(ephys.parameters.NrnSectionParameter(name='erev_na', param_name='ena', value_scaler=scalar,
+ value=params['conditions'][0]['erev'][0]['ena'],
+ locations=[getNrnSeclist('somatic')]))
+ parameters.append(ephys.parameters.NrnSectionParameter(name='erev_k', param_name='ek', value_scaler=scalar,
+ value=params['conditions'][0]['erev'][0]['ek'],
+ locations=[getNrnSeclist('somatic')]))
+ parameters.append(ephys.parameters.NrnSectionParameter(name='erev_pas', param_name='e_pas', value_scaler=scalar,
+ value=params['conditions'][0]['v_init'],
+ locations=[getNrnSeclist('somatic'), getNrnSeclist('axonal'),
+ getNrnSeclist('basal'), getNrnSeclist('apical')]))
+
+ parameters.append(ephys.parameters.NrnSectionParameter(name='erev_Ih', param_name='ehcn', value_scaler=scalar,
+ value=-45.0,
+ locations=[getNrnSeclist('somatic')]))
+
+ parameters.append(ephys.parameters.NrnSectionParameter(name='res_all', param_name='Ra', value_scaler=scalar,
+ value=params['passive'][0]['ra'],
+ locations=[getNrnSeclist('somatic')]))
+ for sec in params['passive'][0]['cm']:
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(name='{}_cap'.format(sec['section']), param_name='cm',
+ value_scaler=scalar,
+ value=sec['cm'],
+ locations=[getNrnSeclist(sec['section'])]))
+
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(name='ca', param_name='depth_CaDynamics', value_scaler=scalar,
+ value=0.1, locations=[getNrnSeclist('somatic')]))
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(name='ca', param_name='minCai_CaDynamics', value_scaler=scalar,
+ value=0.0001, locations=[getNrnSeclist('somatic')]))
+
+ return mechanisms.values(), parameters
+
+
+def load_neuroml(neuroml_path):
+ root = etree.parse(neuroml_path).getroot()
+ biophysics = defaultdict(list)
+ for mechanism in MECHANISMS:
+ xml_mechanisms = root.findall('.//' + XML_NS + mechanism)
+ for xml_mechanism in xml_mechanisms:
+ biophysics[mechanism].append(xml_mechanism.attrib)
+
+ return biophysics
+
+
+def define_mechanisms(biophysics):
+ def keyfn(x):
+ return x['segmentGroup']
+
+ channels = biophysics['channelDensity'] + biophysics[
+ 'channelDensityNernst']
+ segment_groups = [(k, list(g))
+ for k, g in groupby(
+ sorted(
+ channels, key=keyfn), keyfn)]
+ mechanisms = []
+ for sectionlist, channels in segment_groups:
+ loc_name = map_location_name(sectionlist)
+ seclist_loc = ephys.locations.NrnSeclistLocation(
+ loc_name, seclist_name=loc_name)
+ for channel in channels:
+ # print 'mechanisms.append(ephys.mechanisms.NrnMODMechanism(name={}.{}, mod_path=None, suffix={}, locations=[{}]))'.format(channel['ionChannel'], loc_name, channel['ionChannel'], seclist_loc)
+ mechanisms.append(
+ ephys.mechanisms.NrnMODMechanism(
+ name='%s.%s' % (channel['ionChannel'], loc_name),
+ mod_path=None,
+ suffix=channel['ionChannel'],
+ locations=[seclist_loc], ))
+ for elem in biophysics['species']:
+ section = map_location_name(elem['segmentGroup'])
+ section_loc = ephys.locations.NrnSeclistLocation(
+ section, seclist_name=section)
+ # print 'mechanisms.append(ephys.mechanisms.NrnMODMechanism(name={}, mod_path=None, suffix={}, location=[{}]))'.format(elem['concentrationModel'], elem['concentrationModel'], section_loc)
+ mechanisms.append(
+ ephys.mechanisms.NrnMODMechanism(
+ name=elem['concentrationModel'],
+ mod_path=None,
+ suffix=elem['concentrationModel'],
+ locations=[section_loc]))
+
+ return mechanisms
+
+
+def define_parameters(biophysics):
+ ''' for the time being all AIBS distribution are uniform '''
+ parameters = []
+
+ def keyfn(x):
+ return x['ionChannel']
+
+ NUMERIC_CONST_PATTERN = r'''[-+]? (?: (?: \d* \. \d+ ) | (?: \d+ \.? ) )(?: [Ee] [+-]? \d+ ) ?'''
+ rx = re.compile(NUMERIC_CONST_PATTERN, re.VERBOSE)
+
+ def get_cond_density(density_string):
+ m = re.match(rx, density_string)
+ return float(m.group())
+
+ scaler = ephys.parameterscalers.NrnSegmentLinearScaler()
+ MAP_EREV = {
+ 'Im': 'ek',
+ 'Ih': 'ehcn', # I am not sure of that one
+ 'Nap': 'ena',
+ 'K_P': 'ek',
+ 'K_T': 'ek',
+ 'SK': 'ek',
+ 'SKv3_1': 'ek',
+ 'NaTs': 'ena',
+ 'Kv3_1': 'ek',
+ 'NaV': 'ena',
+ 'Kd': 'ek',
+ 'Kv2like': 'ek',
+ 'Im_v2': 'ek',
+ 'pas': 'e_pas'
+ }
+ for mech_type in ['channelDensity', 'channelDensityNernst']:
+ mechanisms = biophysics[mech_type]
+ for mech in mechanisms:
+ section_list = map_location_name(mech['segmentGroup'])
+ seclist_loc = ephys.locations.NrnSeclistLocation(
+ section_list, seclist_name=section_list)
+
+ def map_name(name):
+ ''' this name has to match the name in the mod file '''
+ reg_name = re.compile('gbar\_(?P[\w]+)')
+ m = re.match(reg_name, name)
+ if m:
+ channel = m.group('channel')
+ return 'gbar' + '_' + channel
+ if name[:len('g_pas')] == 'g_pas':
+ ''' special case '''
+ return 'g_pas'
+ assert False, "name %s" % name
+
+ param_name = map_name(mech['id'])
+ # print 'parameters.append(ephys.parameters.NrnSectionParameter(name={}, param_name={}, value_scalar={}, value={}, locations=[{}]))'.format(mech['id'], param_name, scaler, get_cond_density(mech['condDensity']), seclist_loc)
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(
+ name=mech['id'],
+ param_name=param_name,
+ value_scaler=scaler,
+ value=get_cond_density(mech['condDensity']),
+ locations=[seclist_loc]))
+ if mech_type != 'channelDensityNernst':
+ # print 'parameters.append(ephys.parameters.NrnSectionParameter(name={}, param_name={}, value_scalar={}, value={}, locations=[{}]))'.format('erev' + mech['id'], MAP_EREV[mech['ionChannel']], scaler, get_cond_density(mech['erev']), seclist_loc)
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(
+ name='erev' + mech['id'],
+ param_name=MAP_EREV[mech['ionChannel']],
+ value_scaler=scaler,
+ value=get_cond_density(mech['erev']),
+ locations=[seclist_loc]))
+
+ # print ''
+ PARAM_NAME = {'specificCapacitance': 'cm', 'resistivity': 'Ra'}
+ for b_type in ['specificCapacitance', 'resistivity']:
+ for elem in biophysics[b_type]:
+ section = map_location_name(elem['segmentGroup'])
+ section_loc = ephys.locations.NrnSeclistLocation(
+ section, seclist_name=section)
+
+ # print 'parameters.append(ephys.parameters.NrnSectionParameter(name={}, param_name={}, value_scalar={}, value={}, locations=[{}]))'.format(elem['id'], PARAM_NAME[b_type], scaler, get_cond_density(elem['value']), seclist_loc)
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(
+ name=elem['id'],
+ param_name=PARAM_NAME[b_type],
+ value_scaler=scaler,
+ value=get_cond_density(elem['value']),
+ locations=[section_loc]))
+ concentrationModel = biophysics['concentrationModel'][0]
+
+ # print ''
+ for elem in biophysics['species']:
+ section = map_location_name(elem['segmentGroup'])
+ section_loc = ephys.locations.NrnSeclistLocation(
+ section, seclist_name=section)
+ for attribute in ['gamma', 'decay', 'depth', 'minCai']:
+ # print 'parameters.append(ephys.parameters.NrnSectionParameter(name={}, param_name={}, value_scalar={}, value={}, locations=[{}]))'.format(elem['id'], attribute + '_' + elem['concentrationModel'], scaler, get_cond_density(concentrationModel[attribute]), seclist_loc)
+ parameters.append(
+ ephys.parameters.NrnSectionParameter(
+ name=elem['id'],
+ param_name=attribute + '_' + elem['concentrationModel'],
+ value_scaler=scaler,
+ value=get_cond_density(concentrationModel[attribute]),
+ locations=[section_loc]))
+
+ return parameters
+
+
+def create_hoc(neuroml_path, neuroml, morphologies, incr, output_dir):
+ if neuroml_path.endswith('json'):
+ mechanisms, parameters = load_json(neuroml_path)
+
+ else:
+ biophysics = load_neuroml(neuroml_path)
+ mechanisms = define_mechanisms(biophysics)
+ parameters = define_parameters(biophysics)
+
+ for morphology in morphologies:
+ ccell_name = utils.name_ccell(neuroml, morphology)
+ hoc = ephys.create_hoc.create_hoc(
+ mechs=mechanisms,
+ parameters=parameters,
+ template_name='ccell' + str(incr),
+ template_filename='cell_template_compatible.jinja2',
+ template_dir='.',
+ morphology=morphology + '.swc', )
+ with open(os.path.join(output_dir, ccell_name + '.hoc'), 'w') as f:
+ f.write(hoc)
+
+
+def convert_to_hoc(config, cells, output_dir):
+ to_convert = cells[['dynamics_params', 'morphology', 'neuroml']]
+ to_convert = to_convert.drop_duplicates()
+ neuroml_config_path = config['components']['biophysical_neuron_models_dir']
+ incr = 0
+ for name, g in tqdm(to_convert.groupby('dynamics_params'), 'creating hoc files'):
+ neuroml_path = os.path.join(neuroml_config_path, name)
+ create_hoc(neuroml_path,
+ list(g['neuroml'])[0],
+ set(g['morphology']), incr, output_dir)
+ incr += 1
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/utils/converters/sonata/__init__.py b/bmtk-vb/build/lib/bmtk/utils/converters/sonata/__init__.py
new file mode 100644
index 0000000..c473e5d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/converters/sonata/__init__.py
@@ -0,0 +1,2 @@
+from edge_converters import convert_edges
+from node_converters import convert_nodes
diff --git a/bmtk-vb/build/lib/bmtk/utils/converters/sonata/edge_converters.py b/bmtk-vb/build/lib/bmtk/utils/converters/sonata/edge_converters.py
new file mode 100644
index 0000000..335d4f5
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/converters/sonata/edge_converters.py
@@ -0,0 +1,278 @@
+import os
+from functools import partial
+
+import numpy as np
+import pandas as pd
+import h5py
+
+column_renames = {
+ 'params_file': 'dynamics_params',
+ 'level_of_detail': 'model_type',
+ 'morphology': 'morphology',
+ 'x_soma': 'x',
+ 'y_soma': 'y',
+ 'z_soma': 'z',
+ 'weight_max': 'syn_weight',
+ 'set_params_function': 'model_template'
+}
+
+
+def convert_edges(edges_file, edge_types_file, **params):
+ is_flat_h5 = False
+ is_new_h5 = False
+ try:
+ h5file = h5py.File(edges_file, 'r')
+ print
+ if 'edges' in h5file:
+ is_new_h5 = True
+ elif 'num_syns' in h5file:
+ is_flat_h5 = True
+ except Exception as e:
+ pass
+
+ if is_flat_h5:
+ update_aibs_edges(edges_file, edge_types_file, **params)
+ return
+ elif is_new_h5:
+ update_h5_edges(edges_file, edge_types_file, **params)
+ return
+
+ try:
+ edges_csv2h5(edges_file, **params)
+ return
+ except Exception as exc:
+ raise exc
+
+ raise Exception('Could not parse edges file')
+
+
+def update_edge_types_file(edge_types_file, src_network=None, trg_network=None, output_dir='network'):
+ edge_types_csv = pd.read_csv(edge_types_file, sep=' ')
+
+ # rename required columns
+ edge_types_csv = edge_types_csv.rename(index=str, columns=column_renames)
+
+ edge_types_output_fn = os.path.join(output_dir, '{}_{}_edge_types.csv'.format(src_network, trg_network))
+ edge_types_csv.to_csv(edge_types_output_fn, sep=' ', index=False, na_rep='NONE')
+
+
+def update_h5_edges(edges_file, edge_types_file, src_network=None, population_name=None, trg_network=None,
+ output_dir='network'):
+ population_name = population_name if population_name is not None else '{}_to_{}'.format(src_network, trg_network)
+ input_h5 = h5py.File(edges_file, 'r')
+
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
+
+ edges_output_fn = os.path.join(output_dir, '{}_{}_edges.h5'.format(src_network, trg_network))
+ with h5py.File(edges_output_fn, 'w') as h5:
+ edges_path = '/edges/{}'.format(population_name)
+ h5.copy(input_h5['/edges'], edges_path)
+ grp = h5[edges_path]
+ grp.move('source_gid', 'source_node_id')
+ grp.move('target_gid', 'target_node_id')
+ grp.move('edge_group', 'edge_group_id')
+
+ if 'network' in grp['source_node_id'].attrs:
+ del grp['source_node_id'].attrs['network']
+ grp['source_node_id'].attrs['node_population'] = src_network
+
+ if 'network' in grp['target_node_id'].attrs:
+ del grp['target_node_id'].attrs['network']
+ grp['target_node_id'].attrs['node_population'] = trg_network
+
+ create_index(input_h5['edges/target_gid'], grp, index_type=INDEX_TARGET)
+ create_index(input_h5['edges/source_gid'], grp, index_type=INDEX_SOURCE)
+
+ update_edge_types_file(edge_types_file, src_network, trg_network, output_dir)
+
+
+def update_aibs_edges(edges_file, edge_types_file, trg_network, src_network, population_name=None, output_dir='output'):
+ population_name = population_name if population_name is not None else '{}_to_{}'.format(src_network, trg_network)
+
+ edges_h5 = h5py.File(edges_file, 'r')
+ src_gids = edges_h5['/src_gids']
+ n_edges = len(src_gids)
+ trg_gids = np.zeros(n_edges, dtype=np.uint64)
+ start = edges_h5['/edge_ptr'][0]
+ for trg_gid, end in enumerate(edges_h5['/edge_ptr'][1:]):
+ trg_gids[start:end] = [trg_gid]*(end-start)
+ start = end
+
+ edges_output_fn = os.path.join(output_dir, '{}_{}_edges.h5'.format(src_network, trg_network))
+ if not os.path.exists(output_dir):
+ os.mkdir(output_dir)
+
+ with h5py.File(edges_output_fn, 'w') as hf:
+ grp = hf.create_group('/edges/{}'.format(population_name))
+
+ grp.create_dataset('target_node_id', data=trg_gids, dtype='uint64')
+ grp['target_node_id'].attrs['node_population'] = trg_network
+ grp.create_dataset('source_node_id', data=edges_h5['src_gids'], dtype='uint64')
+ grp['source_node_id'].attrs['node_population'] = src_network
+ grp.create_dataset('edge_group_id', data=np.zeros(n_edges), dtype='uint32')
+ grp.create_dataset('edge_group_index', data=np.arange(0, n_edges))
+ grp.create_dataset('edge_type_id', data=edges_h5['edge_types'])
+ grp.create_dataset('0/nsyns', data=edges_h5['num_syns'], dtype='uint32')
+ grp.create_group('0/dynamics_params')
+
+ create_index(trg_gids, grp, index_type=INDEX_TARGET)
+ create_index(src_gids, grp, index_type=INDEX_SOURCE)
+
+ update_edge_types_file(edge_types_file, src_network, trg_network, output_dir)
+
+
+def edges_csv2h5(edges_file, edge_types_file, src_network, src_nodes, src_node_types, trg_network, trg_nodes,
+ trg_node_types, output_dir='network', src_label='location', trg_label='pop_name'):
+ """Used to convert oldest (isee engine) edges files
+
+ :param edges_file:
+ :param edge_types_file:
+ :param src_network:
+ :param src_nodes:
+ :param src_node_types:
+ :param trg_network:
+ :param trg_nodes:
+ :param trg_node_types:
+ :param output_dir:
+ :param src_label:
+ :param trg_label:
+ """
+ column_renames = {
+ 'target_model_id': 'node_type_id',
+ 'weight': 'weight_max',
+ 'weight_function': 'weight_func',
+ }
+
+ columns_order = ['edge_type_id', 'target_query', 'source_query']
+
+ edges_h5 = h5py.File(edges_file, 'r')
+ edge_types_df = pd.read_csv(edge_types_file, sep=' ')
+ n_edges = len(edges_h5['src_gids'])
+ n_targets = len(edges_h5['indptr']) - 1
+
+ # rename specified columns in edge-types
+ edge_types_df = edge_types_df.rename(columns=column_renames)
+
+ # Add a "target_query" and "source_query" columns from target_label and source_label
+ def query_col(row, labels, search_col):
+ return '&'.join("{}=='{}'".format(l, row[search_col]) for l in labels)
+ trg_query_fnc = partial(query_col, labels=['node_type_id', trg_label], search_col='target_label')
+ src_query_fnc = partial(query_col, labels=[src_label], search_col='source_label')
+
+ edge_types_df['target_query'] = edge_types_df.apply(trg_query_fnc, axis=1)
+ edge_types_df['source_query'] = edge_types_df.apply(src_query_fnc, axis=1)
+
+ # Add an edge_type_id column
+ edge_types_df['edge_type_id'] = np.arange(100, 100 + len(edge_types_df.index), dtype='uint32')
+
+ nodes_tmp = pd.read_csv(src_nodes, sep=' ', index_col=['id'])
+ node_types_tmp = pd.read_csv(src_node_types, sep=' ')
+ src_nodes_df = pd.merge(nodes_tmp, node_types_tmp, on='model_id')
+
+ nodes_tmp = pd.read_csv(trg_nodes, sep=' ', index_col=['id'])
+ node_types_tmp = pd.read_csv(trg_node_types, sep=' ')
+ trg_nodes_df = pd.merge(nodes_tmp, node_types_tmp, on='model_id')
+
+ # For assigning edge types to each edge. For a given src --> trg pair we need to lookup source_label and
+ # target_label values of the nodes, then use it to find the corresponding edge_types row.
+ print('Processing edge_type_id dataset')
+ edge_types_ids = np.zeros(n_edges, dtype='uint32')
+ edge_types_df = edge_types_df.set_index(['node_type_id', 'target_label', 'source_label'])
+ ten_percent = int(n_targets*.1) # for keepting track of progress
+ index = 0 # keeping track of row index
+ for trg_gid in xrange(n_targets):
+ # for the target find value node_type_id and target_label
+ nodes_row = trg_nodes_df.loc[trg_gid]
+ model_id = nodes_row['model_id']
+ trg_label_val = nodes_row[trg_label]
+
+ # iterate through all the sources
+ idx_begin = edges_h5['indptr'][trg_gid]
+ idx_end = edges_h5['indptr'][trg_gid+1]
+ for src_gid in edges_h5['src_gids'][idx_begin:idx_end]:
+ # find each source_label, use value to find edge_type_id
+ # TODO: may be faster to filter by model_id, trg_label_val before iterating through the sources
+ src_label_val = src_nodes_df.loc[src_gid][src_label]
+ edge_type_id = edge_types_df.loc[model_id, trg_label_val, src_label_val]['edge_type_id']
+ edge_types_ids[index] = edge_type_id
+ index += 1
+
+ if trg_gid % ten_percent == 0 and trg_gid != 0:
+ print(' processed {} out of {} targets'.format(trg_gid, n_targets))
+
+ # Create the target_gid table
+ print('Creating target_gid dataset')
+ trg_gids = np.zeros(n_edges)
+ for trg_gid in xrange(n_targets):
+ idx_begin = edges_h5['indptr'][trg_gid]
+ idx_end = edges_h5['indptr'][trg_gid+1]
+ trg_gids[idx_begin:idx_end] = [trg_gid]*(idx_end - idx_begin)
+
+ # Save edges.h5
+ edges_output_fn = '{}/{}_{}_edges.h5'.format(output_dir, src_network, trg_network)
+ print('Saving edges to {}.'.format(edges_output_fn))
+ with h5py.File(edges_output_fn, 'w') as hf:
+ hf.create_dataset('edges/target_gid', data=trg_gids, dtype='uint64')
+ hf['edges/target_gid'].attrs['node_population'] = trg_network
+ hf.create_dataset('edges/source_gid', data=edges_h5['src_gids'], dtype='uint64')
+ hf['edges/source_gid'].attrs['node_population'] = trg_network
+ hf.create_dataset('edges/index_pointer', data=edges_h5['indptr'])
+ hf.create_dataset('edges/edge_group', data=np.zeros(n_edges), dtype='uint32')
+ hf.create_dataset('edges/edge_group_index', data=np.arange(0, n_edges))
+ hf.create_dataset('edges/edge_type_id', data=edge_types_ids)
+
+ hf.create_dataset('edges/0/nsyns', data=edges_h5['nsyns'], dtype='uint32')
+
+ # Save edge_types.csv
+ update_edge_types_file(edge_types_file, src_network, trg_network, output_dir)
+ '''
+ edges_types_output_fn = '{}/{}_{}_edge_types.csv'.format(output_dir, src_network, trg_network)
+ print('Saving edge-types to {}'.format(edges_types_output_fn))
+ edge_types_df = edge_types_df[edge_types_df['edge_type_id'].isin(np.unique(edge_types_ids))]
+ # reorder columns
+ reorderd_cols = columns_order + [cn for cn in edge_types_df.columns.tolist() if cn not in columns_order]
+ edge_types_df = edge_types_df[reorderd_cols]
+ edge_types_df.to_csv(edges_types_output_fn, sep=' ', index=False, na_rep='NONE')
+ '''
+
+
+INDEX_TARGET = 0
+INDEX_SOURCE = 1
+
+
+def create_index(node_ids_ds, output_grp, index_type=INDEX_TARGET):
+ if index_type == INDEX_TARGET:
+ edge_nodes = np.array(node_ids_ds, dtype=np.int64)
+ output_grp = output_grp.create_group('indicies/target_to_source')
+ elif index_type == INDEX_SOURCE:
+ edge_nodes = np.array(node_ids_ds, dtype=np.int64)
+ output_grp = output_grp.create_group('indicies/source_to_target')
+
+ edge_nodes = np.append(edge_nodes, [-1])
+ n_targets = np.max(edge_nodes)
+ ranges_list = [[] for _ in xrange(n_targets + 1)]
+
+ n_ranges = 0
+ begin_index = 0
+ cur_trg = edge_nodes[begin_index]
+ for end_index, trg_gid in enumerate(edge_nodes):
+ if cur_trg != trg_gid:
+ ranges_list[cur_trg].append((begin_index, end_index))
+ cur_trg = int(trg_gid)
+ begin_index = end_index
+ n_ranges += 1
+
+ node_id_to_range = np.zeros((n_targets+1, 2))
+ range_to_edge_id = np.zeros((n_ranges, 2))
+ range_index = 0
+ for node_index, trg_ranges in enumerate(ranges_list):
+ if len(trg_ranges) > 0:
+ node_id_to_range[node_index, 0] = range_index
+ for r in trg_ranges:
+ range_to_edge_id[range_index, :] = r
+ range_index += 1
+ node_id_to_range[node_index, 1] = range_index
+
+ output_grp.create_dataset('range_to_edge_id', data=range_to_edge_id, dtype='uint64')
+ output_grp.create_dataset('node_id_to_range', data=node_id_to_range, dtype='uint64')
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/utils/converters/sonata/node_converters.py b/bmtk-vb/build/lib/bmtk/utils/converters/sonata/node_converters.py
new file mode 100644
index 0000000..befab51
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/converters/sonata/node_converters.py
@@ -0,0 +1,399 @@
+import os
+
+import h5py
+import pandas as pd
+import numpy as np
+
+
+def convert_nodes(nodes_file, node_types_file, **params):
+ is_h5 = False
+ try:
+ h5file = h5py.File(nodes_file, 'r')
+ is_h5 = True
+ except Exception as e:
+ pass
+
+ if is_h5:
+ update_h5_nodes(nodes_file, node_types_file, **params)
+ return
+
+ update_csv_nodes(nodes_file, node_types_file, **params)
+
+
+# columns which need to be renamed, key is original name and value is the updated name
+column_renames = {
+ 'id': 'node_id',
+ 'model_id': 'node_type_id',
+ 'electrophysiology': 'dynamics_params',
+ 'level_of_detail': 'model_type',
+ 'morphology': 'morphology',
+ 'params_file': 'dynamics_params',
+ 'x_soma': 'x',
+ 'y_soma': 'y',
+ 'z_soma': 'z'
+}
+
+
+def update_h5_nodes(nodes_file, node_types_file, network_name, output_dir='output',
+ column_order=('node_type_id', 'model_type', 'model_template', 'model_processing', 'dynamics_params',
+ 'morphology')):
+ # open nodes and node-types into a single table
+ input_h5 = h5py.File(nodes_file, 'r')
+
+ output_name = '{}_nodes.h5'.format(network_name)
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir)
+ nodes_output_fn = os.path.join(output_dir, output_name)
+
+ # save nodes hdf5
+ with h5py.File(nodes_output_fn, 'w') as h5:
+ #h5.copy()
+ #grp = h5.create_group('/nodes/{}'.format(network_name))
+ #input_grp = input_h5['/nodes/']
+ nodes_path = '/nodes/{}'.format(network_name)
+ h5.copy(input_h5['/nodes/'], nodes_path)
+ grp = h5[nodes_path]
+ grp.move('node_gid', 'node_id')
+ grp.move('node_group', 'node_group_id')
+
+ node_types_csv = pd.read_csv(node_types_file, sep=' ')
+
+ node_types_csv = node_types_csv.rename(index=str, columns=column_renames)
+
+ # Change values for model type
+ model_type_map = {
+ 'biophysical': 'biophysical',
+ 'point_IntFire1': 'point_process',
+ 'intfire': 'point_process',
+ 'virtual': 'virtual',
+ 'iaf_psc_alpha': 'nest:iaf_psc_alpha',
+ 'filter': 'virtual'
+ }
+ node_types_csv['model_type'] = node_types_csv.apply(lambda row: model_type_map[row['model_type']], axis=1)
+
+ # Add model_template column
+ def model_template(row):
+ model_type = row['model_type']
+ if model_type == 'biophysical':
+ return 'ctdb:Biophys1.hoc'
+ elif model_type == 'point_process':
+ return 'nrn:IntFire1'
+ else:
+ return 'NONE'
+ node_types_csv['model_template'] = node_types_csv.apply(model_template, axis=1)
+
+ # Add model_processing column
+ def model_processing(row):
+ model_type = row['model_type']
+ if model_type == 'biophysical':
+ return 'aibs_perisomatic'
+ else:
+ return 'NONE'
+ node_types_csv['model_processing'] = node_types_csv.apply(model_processing, axis=1)
+
+ # Reorder columns
+ orig_columns = node_types_csv.columns
+ col_order = [cn for cn in column_order if cn in orig_columns]
+ col_order += [cn for cn in node_types_csv.columns if cn not in column_order]
+ node_types_csv = node_types_csv[col_order]
+
+ # Save node-types csv
+ node_types_output_fn = os.path.join(output_dir, '{}_node_types.csv'.format(network_name))
+ node_types_csv.to_csv(node_types_output_fn, sep=' ', index=False, na_rep='NONE')
+ # open nodes and node-types into a single table
+
+ '''
+ print('loading csv files')
+ nodes_tmp = pd.read_csv(nodes_file, sep=' ')
+ node_types_tmp = pd.read_csv(node_types_file, sep=' ')
+ nodes_df = pd.merge(nodes_tmp, node_types_tmp, on='node_type_id')
+ n_nodes = len(nodes_df.index)
+
+ # rename required columns
+ nodes_df = nodes_df.rename(index=str, columns=column_renames)
+
+ # Old versions of node_type_id may be set to strings/floats, convert to integers
+ dtype_ntid = nodes_df['node_type_id'].dtype
+ if dtype_ntid == 'object':
+ # if string, move model_id to pop_name and create an integer node_type_id column
+ if 'pop_name' in nodes_df.columns:
+ nodes_df = nodes_df.drop('pop_name', axis=1)
+ nodes_df = nodes_df.rename(index=str, columns={'node_type_id': 'pop_name'})
+ ntid_map = {pop_name: indx for indx, pop_name in enumerate(nodes_df['pop_name'].unique())}
+ nodes_df['node_type_id'] = nodes_df.apply(lambda row: ntid_map[row['pop_name']], axis=1)
+
+ elif dtype_ntid == 'float64':
+ nodes_df['node_type_id'] = nodes_df['node_type_id'].astype('uint64')
+
+ # divide columns up into nodes and node-types columns, and for nodes determine which columns are valid for every
+ # node-type. The rules are
+ # 1. If all values are the same for a node-type-id, column belongs in node_types csv. If there's any intra
+ # node-type heterogenity then the column belongs in the nodes h5.
+ # 2. For nodes h5 columns, a column belongs to a node-type-id if it contains at least one non-null value
+ print('parsing input')
+ opt_columns = [n for n in nodes_df.columns if n not in ['node_id', 'node_type_id']]
+ heterogeneous_cols = {cn: False for cn in opt_columns}
+ nonnull_cols = {} # for each node-type, a list of columns that contains at least one non-null value
+ for node_type_id, nt_group in nodes_df.groupby(['node_type_id']):
+ nonnull_cols[node_type_id] = set(nt_group.columns[nt_group.isnull().any() == False].tolist())
+ for col_name in opt_columns:
+ heterogeneous_cols[col_name] |= len(nt_group[col_name].unique()) > 1
+
+ nodes_columns = set(cn for cn, val in heterogeneous_cols.items() if val)
+ nodes_types_columns = [cn for cn, val in heterogeneous_cols.items() if not val]
+
+ # Check for nodes columns that has non-numeric values, these will require some special processing to save to hdf5
+ string_nodes_columns = set()
+ for col_name in nodes_columns:
+ if nodes_df[col_name].dtype == 'object':
+ string_nodes_columns.add(col_name)
+ if len(string_nodes_columns) > 0:
+ print('Warning: column(s) {} have non-numeric values that vary within a node-type and will be stored in h5 format'.format(list(string_nodes_columns)))
+
+ # Divide the nodes columns into groups and create neccessary lookup tables. If two node-types share the same
+ # non-null columns then they belong to the same group
+ grp_idx2cols = {} # group-id --> group-columns
+ grp_cols2idx = {} # group-columns --> group-id
+ grp_id2idx = {} # node-type-id --> group-id
+ group_index = -1
+ for nt_id, cols in nonnull_cols.items():
+ group_columns = sorted(list(nodes_columns & cols))
+ col_key = tuple(group_columns)
+ if col_key in grp_cols2idx:
+ grp_id2idx[nt_id] = grp_cols2idx[col_key]
+ else:
+ group_index += 1
+ grp_cols2idx[col_key] = group_index
+ grp_idx2cols[group_index] = group_columns
+ grp_id2idx[nt_id] = group_index
+
+ # merge x,y,z columns, if they exists, into 'positions' dataset
+ grp_pos_cols = {}
+ for grp_idx, cols in grp_idx2cols.items():
+ pos_list = []
+ for coord in ['x', 'y', 'z']:
+ if coord in cols:
+ pos_list += coord
+ grp_idx2cols[grp_idx].remove(coord)
+ if len(pos_list) > 0:
+ grp_pos_cols[grp_idx] = pos_list
+
+ # Create the node_group and node_group_index columns
+ nodes_df['__bmtk_node_group'] = nodes_df.apply(lambda row: grp_id2idx[row['node_type_id']], axis=1)
+ nodes_df['__bmtk_node_group_index'] = [0]*n_nodes
+ for grpid in grp_idx2cols.keys():
+ group_size = len(nodes_df[nodes_df['__bmtk_node_group'] == grpid])
+ nodes_df.loc[nodes_df['__bmtk_node_group'] == grpid, '__bmtk_node_group_index'] = range(group_size)
+
+ # Save nodes.h5 file
+ nodes_output_fn = os.path.join(output_dir, '{}_nodes.h5'.format(network_name))
+ node_types_output_fn = os.path.join(output_dir, '{}_node_types.csv'.format(network_name))
+ if not os.path.exists(output_dir):
+ os.mkdir(output_dir)
+
+ print('Creating {}'.format(nodes_output_fn))
+ with h5py.File(nodes_output_fn, 'w') as hf:
+ hf.create_dataset('nodes/node_gid', data=nodes_df['node_id'], dtype='uint64')
+ hf['nodes/node_gid'].attrs['network'] = network_name
+ hf.create_dataset('nodes/node_type_id', data=nodes_df['node_type_id'], dtype='uint64')
+ hf.create_dataset('nodes/node_group', data=nodes_df['__bmtk_node_group'], dtype='uint32')
+ hf.create_dataset('nodes/node_group_index', data=nodes_df['__bmtk_node_group_index'], dtype='uint64')
+
+ for grpid, cols in grp_idx2cols.items():
+ group_slice = nodes_df[nodes_df['__bmtk_node_group'] == grpid]
+ for col_name in cols:
+ dataset_name = 'nodes/{}/{}'.format(grpid, col_name)
+ if col_name in string_nodes_columns:
+ # for columns with non-numeric values
+ dt = h5py.special_dtype(vlen=bytes)
+ hf.create_dataset(dataset_name, data=group_slice[col_name], dtype=dt)
+ else:
+ hf.create_dataset(dataset_name, data=group_slice[col_name])
+
+ # special case for positions
+ if grpid in grp_pos_cols:
+ hf.create_dataset('nodes/{}/positions'.format(grpid),
+ data=group_slice.as_matrix(columns=grp_pos_cols[grpid]))
+
+ # Save the node_types.csv file
+ print('Creating {}'.format(node_types_output_fn))
+ node_types_table = nodes_df[['node_type_id'] + nodes_types_columns]
+ node_types_table = node_types_table.drop_duplicates()
+ if len(sort_order) > 0:
+ node_types_table = node_types_table.sort_values(by=sort_order)
+
+ node_types_table.to_csv(node_types_output_fn, sep=' ', index=False) # , na_rep='NONE')
+ '''
+
+
+def update_csv_nodes(nodes_file, node_types_file, network_name, output_dir='network',
+ column_order=('node_type_id', 'model_type', 'model_template', 'model_processing',
+ 'dynamics_params', 'morphology')):
+ # open nodes and node-types into a single table
+ print('loading csv files')
+ nodes_tmp = pd.read_csv(nodes_file, sep=' ')
+ node_types_tmp = pd.read_csv(node_types_file, sep=' ')
+ if 'model_id' in nodes_tmp:
+ nodes_df = pd.merge(nodes_tmp, node_types_tmp, on='model_id')
+ elif 'node_type_id' in nodes_tmp:
+ nodes_df = pd.merge(nodes_tmp, node_types_tmp, on='node_type_id')
+ else:
+ raise Exception('Could not find column to merge nodes and node_types')
+
+ n_nodes = len(nodes_df.index)
+
+ # rename required columns
+ nodes_df = nodes_df.rename(index=str, columns=column_renames)
+
+ # Old versions of node_type_id may be set to strings/floats, convert to integers
+ dtype_ntid = nodes_df['node_type_id'].dtype
+ if dtype_ntid == 'object':
+ # if string, move model_id to pop_name and create an integer node_type_id column
+ if 'pop_name' in nodes_df:
+ nodes_df = nodes_df.drop('pop_name', axis=1)
+
+ nodes_df = nodes_df.rename(index=str, columns={'node_type_id': 'pop_name'})
+
+ ntid_map = {pop_name: indx for indx, pop_name in enumerate(nodes_df['pop_name'].unique())}
+ nodes_df['node_type_id'] = nodes_df.apply(lambda row: ntid_map[row['pop_name']], axis=1)
+
+ elif dtype_ntid == 'float64':
+ nodes_df['node_type_id'] = nodes_df['node_type_id'].astype('uint64')
+
+ # divide columns up into nodes and node-types columns, and for nodes determine which columns are valid for every
+ # node-type. The rules are
+ # 1. If all values are the same for a node-type-id, column belongs in node_types csv. If there's any intra
+ # node-type heterogenity then the column belongs in the nodes h5.
+ # 2. For nodes h5 columns, a column belongs to a node-type-id if it contains at least one non-null value
+ print('parsing input')
+ opt_columns = [n for n in nodes_df.columns if n not in ['node_id', 'node_type_id']]
+ heterogeneous_cols = {cn: False for cn in opt_columns}
+ nonnull_cols = {} # for each node-type, a list of columns that contains at least one non-null value
+ for node_type_id, nt_group in nodes_df.groupby(['node_type_id']):
+ nonnull_cols[node_type_id] = set(nt_group.columns[nt_group.isnull().any() == False].tolist())
+ for col_name in opt_columns:
+ heterogeneous_cols[col_name] |= len(nt_group[col_name].unique()) > 1
+
+ nodes_columns = set(cn for cn, val in heterogeneous_cols.items() if val)
+ nodes_types_columns = [cn for cn, val in heterogeneous_cols.items() if not val]
+
+ # Check for nodes columns that has non-numeric values, these will require some special processing to save to hdf5
+ string_nodes_columns = set()
+ for col_name in nodes_columns:
+ if nodes_df[col_name].dtype == 'object':
+ string_nodes_columns.add(col_name)
+ if len(string_nodes_columns) > 0:
+ print('Warning: column(s) {} have non-numeric values that vary within a node-type and will be stored in h5 format'.format(list(string_nodes_columns)))
+
+ # Divide the nodes columns into groups and create neccessary lookup tables. If two node-types share the same
+ # non-null columns then they belong to the same group
+ grp_idx2cols = {} # group-id --> group-columns
+ grp_cols2idx = {} # group-columns --> group-id
+ grp_id2idx = {} # node-type-id --> group-id
+ group_index = -1
+ for nt_id, cols in nonnull_cols.items():
+ group_columns = sorted(list(nodes_columns & cols))
+ col_key = tuple(group_columns)
+ if col_key in grp_cols2idx:
+ grp_id2idx[nt_id] = grp_cols2idx[col_key]
+ else:
+ group_index += 1
+ grp_cols2idx[col_key] = group_index
+ grp_idx2cols[group_index] = group_columns
+ grp_id2idx[nt_id] = group_index
+
+ # merge x,y,z columns, if they exists, into 'positions' dataset
+ grp_pos_cols = {}
+ for grp_idx, cols in grp_idx2cols.items():
+ pos_list = []
+ for coord in ['x', 'y', 'z']:
+ if coord in cols:
+ pos_list += coord
+ grp_idx2cols[grp_idx].remove(coord)
+ if len(pos_list) > 0:
+ grp_pos_cols[grp_idx] = pos_list
+
+ # Create the node_group and node_group_index columns
+ nodes_df['__bmtk_node_group'] = nodes_df.apply(lambda row: grp_id2idx[row['node_type_id']], axis=1)
+ nodes_df['__bmtk_node_group_index'] = [0]*n_nodes
+ for grpid in grp_idx2cols.keys():
+ group_size = len(nodes_df[nodes_df['__bmtk_node_group'] == grpid])
+ nodes_df.loc[nodes_df['__bmtk_node_group'] == grpid, '__bmtk_node_group_index'] = range(group_size)
+
+ # Save nodes.h5 file
+ nodes_output_fn = os.path.join(output_dir, '{}_nodes.h5'.format(network_name))
+ node_types_output_fn = os.path.join(output_dir, '{}_node_types.csv'.format(network_name))
+ if not os.path.exists(output_dir):
+ os.mkdir(output_dir)
+
+ print('Creating {}'.format(nodes_output_fn))
+ with h5py.File(nodes_output_fn, 'w') as hf:
+ grp = hf.create_group('/nodes/{}'.format(network_name))
+ grp.create_dataset('node_id', data=nodes_df['node_id'], dtype='uint64')
+ grp.create_dataset('node_type_id', data=nodes_df['node_type_id'], dtype='uint64')
+ grp.create_dataset('node_group_id', data=nodes_df['__bmtk_node_group'], dtype='uint32')
+ grp.create_dataset('node_group_index', data=nodes_df['__bmtk_node_group_index'], dtype='uint64')
+
+ for grpid, cols in grp_idx2cols.items():
+ group_slice = nodes_df[nodes_df['__bmtk_node_group'] == grpid]
+ for col_name in cols:
+ dataset_name = '{}/{}'.format(grpid, col_name)
+ if col_name in string_nodes_columns:
+ # for columns with non-numeric values
+ dt = h5py.special_dtype(vlen=bytes)
+ grp.create_dataset(dataset_name, data=group_slice[col_name], dtype=dt)
+ else:
+ grp.create_dataset(dataset_name, data=group_slice[col_name])
+
+ # special case for positions
+ if grpid in grp_pos_cols:
+ grp.create_dataset('{}/positions'.format(grpid),
+ data=group_slice.as_matrix(columns=grp_pos_cols[grpid]))
+
+ # Create empty dynamics_params
+ grp.create_group('{}/dynamics_params'.format(grpid))
+
+ # Save the node_types.csv file
+ print('Creating {}'.format(node_types_output_fn))
+ node_types_table = nodes_df[['node_type_id'] + nodes_types_columns]
+ node_types_table = node_types_table.drop_duplicates()
+
+ # Change values for model type
+ model_type_map = {
+ 'biophysical': 'biophysical',
+ 'point_IntFire1': 'point_process',
+ 'virtual': 'virtual',
+ 'intfire': 'point_process',
+ 'filter': 'virtual'
+ }
+ node_types_table['model_type'] = node_types_table.apply(lambda row: model_type_map[row['model_type']], axis=1)
+ if 'set_params_function' in node_types_table:
+ node_types_table = node_types_table.drop('set_params_function', axis=1)
+
+ # Add model_template column
+ def model_template(row):
+ model_type = row['model_type']
+ if model_type == 'biophysical':
+ return 'ctdb:Biophys1.hoc'
+ elif model_type == 'point_process':
+ return 'nrn:IntFire1'
+ else:
+ return 'NONE'
+ node_types_table['model_template'] = node_types_table.apply(model_template, axis=1)
+
+ # Add model_processing column
+ def model_processing(row):
+ model_type = row['model_type']
+ if model_type == 'biophysical':
+ return 'aibs_perisomatic'
+ else:
+ return 'NONE'
+ node_types_table['model_processing'] = node_types_table.apply(model_processing, axis=1)
+
+ # Reorder columns
+ orig_columns = node_types_table.columns
+ col_order = [cn for cn in column_order if cn in orig_columns]
+ col_order += [cn for cn in node_types_table.columns if cn not in column_order]
+ node_types_table = node_types_table[col_order]
+
+ node_types_table.to_csv(node_types_output_fn, sep=' ', index=False, na_rep='NONE')
diff --git a/bmtk-vb/build/lib/bmtk/utils/io/__init__.py b/bmtk-vb/build/lib/bmtk/utils/io/__init__.py
new file mode 100644
index 0000000..aaccbcd
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/io/__init__.py
@@ -0,0 +1,27 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+#from tabular_network_v1 import TabularNetwork
+#from tabular_network_v0 import TabularNetwork as TabularNetwork_AI
+
+def log_warning(message):
+ pass
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/utils/io/cell_vars.py b/bmtk-vb/build/lib/bmtk/utils/io/cell_vars.py
new file mode 100644
index 0000000..a5646d7
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/io/cell_vars.py
@@ -0,0 +1,361 @@
+import os
+import h5py
+import numpy as np
+
+from bmtk.utils import io
+from bmtk.utils.sonata.utils import add_hdf5_magic, add_hdf5_version
+
+
+try:
+ from mpi4py import MPI
+ comm = MPI.COMM_WORLD
+ rank = comm.Get_rank()
+ nhosts = comm.Get_size()
+
+except Exception as exc:
+ pass
+
+
+class CellVarRecorder(object):
+ """Used to save cell membrane variables (V, Ca2+, etc) to the described hdf5 format.
+
+ For parallel simulations this class will write to a seperate tmp file on each rank, then use the merge method to
+ combine the results. This is less efficent, but doesn't require the user to install mpi4py and build h5py in
+ parallel mode. For better performance use the CellVarRecorderParrallel class instead.
+ """
+ _io = io
+
+ class DataTable(object):
+ """A small struct to keep track of different */data (and buffer) tables"""
+ def __init__(self, var_name):
+ self.var_name = var_name
+ # If buffering data, buffer_block will be an in-memory array and will write to data_block during when
+ # filled. If not buffering buffer_block is an hdf5 dataset and data_block is ignored
+ self.data_block = None
+ self.buffer_block = None
+
+ def __init__(self, file_name, tmp_dir, variables, buffer_data=True, mpi_rank=0, mpi_size=1):
+ self._file_name = file_name
+ self._h5_handle = None
+ self._tmp_dir = tmp_dir
+ self._variables = variables if isinstance(variables, list) else [variables]
+ self._n_vars = len(self._variables) # Used later to keep track if more than one var is saved to the same file.
+
+ self._mpi_rank = mpi_rank
+ self._mpi_size = mpi_size
+ self._tmp_files = []
+ self._saved_file = file_name
+
+ if mpi_size > 1 and not isinstance(self, CellVarRecorderParallel):
+ self._io.log_warning('Was unable to run h5py in parallel (mpi) mode.' +
+ ' Saving of membrane variable(s) may slow down.')
+ tmp_fname = os.path.basename(file_name) # make sure file names don't clash if there are multiple reports
+ self._tmp_files = [os.path.join(tmp_dir, '__bmtk_tmp_cellvars_{}_{}'.format(r, tmp_fname))
+ for r in range(self._mpi_size)]
+ self._file_name = self._tmp_files[self._mpi_rank]
+
+ self._mapping_gids = [] # list of gids in the order they appear in the data
+ self._gid_map = {} # table for looking up the gid offsets
+
+ self._mapping_element_ids = [] # sections
+ self._mapping_element_pos = [] # segments
+ self._mapping_index = [0] # index_pointer
+
+ self._buffer_data = buffer_data
+ self._data_blocks = {var_name: self.DataTable(var_name) for var_name in self._variables}
+ self._last_save_indx = 0 # for buffering, used to keep track of last timestep data was saved to disk
+
+ self._buffer_block_size = 0
+ self._total_steps = 0
+
+ # Keep track of gids across the different ranks
+ self._n_gids_all = 0
+ self._n_gids_local = 0
+ self._gids_beg = 0
+ self._gids_end = 0
+
+ # Keep track of segment counts across the different ranks
+ self._n_segments_all = 0
+ self._n_segments_local = 0
+ self._seg_offset_beg = 0
+ self._seg_offset_end = 0
+
+ self._tstart = 0.0
+ self._tstop = 0.0
+ self._dt = 0.01
+ self._is_initialized = False
+
+ @property
+ def tstart(self):
+ return self._tstart
+
+ @tstart.setter
+ def tstart(self, time_ms):
+ self._tstart = time_ms
+
+ @property
+ def tstop(self):
+ return self._tstop
+
+ @tstop.setter
+ def tstop(self, time_ms):
+ self._tstop = time_ms
+
+ @property
+ def dt(self):
+ return self._dt
+
+ @dt.setter
+ def dt(self, time_ms):
+ self._dt = time_ms
+
+ @property
+ def is_initialized(self):
+ return self._is_initialized
+
+ def _calc_offset(self):
+ self._n_segments_all = self._n_segments_local
+ self._seg_offset_beg = 0
+ self._seg_offset_end = self._n_segments_local
+
+ self._n_gids_all = self._n_gids_local
+ self._gids_beg = 0
+ self._gids_end = self._n_gids_local
+
+ def _create_h5_file(self):
+ self._h5_handle = h5py.File(self._file_name, 'w')
+ add_hdf5_version(self._h5_handle)
+ add_hdf5_magic(self._h5_handle)
+
+ def add_cell(self, gid, sec_list, seg_list):
+ assert(len(sec_list) == len(seg_list))
+ # TODO: Check the same gid isn't added twice
+ n_segs = len(seg_list)
+ self._gid_map[gid] = (self._n_segments_local, self._n_segments_local + n_segs)
+ self._mapping_gids.append(gid)
+ self._mapping_element_ids.extend(sec_list)
+ self._mapping_element_pos.extend(seg_list)
+ self._mapping_index.append(self._mapping_index[-1] + n_segs)
+ self._n_segments_local += n_segs
+ self._n_gids_local += 1
+
+ def _create_big_dataset(self, where, name, shape, dtype):
+ """
+ Create and return a dataset that doesn't get filled right when created
+ """
+ spaceid = h5py.h5s.create_simple(shape)
+ plist = h5py.h5p.create(h5py.h5p.DATASET_CREATE)
+ plist.set_fill_time(h5py.h5d.FILL_TIME_NEVER)
+ if shape[0] < 500 or shape[1] < 512:
+ chunkshape = shape
+ else:
+ chunkshape = (shape[0]/500, shape[1]/512) # TODO: don't use fixed values?
+ plist.set_chunk(chunkshape)
+ datasetid = h5py.h5d.create(where.id,name,h5py.h5t.NATIVE_FLOAT, spaceid, plist)
+ return h5py.Dataset(datasetid)
+
+ def initialize(self, n_steps, buffer_size=0):
+ self._calc_offset()
+ self._create_h5_file()
+
+ var_grp = self._h5_handle.create_group('/mapping')
+ var_grp.create_dataset('gids', shape=(self._n_gids_all,), dtype=np.uint)
+ var_grp.create_dataset('element_id', shape=(self._n_segments_all,), dtype=np.uint)
+ var_grp.create_dataset('element_pos', shape=(self._n_segments_all,), dtype=np.float)
+ var_grp.create_dataset('index_pointer', shape=(self._n_gids_all+1,), dtype=np.uint64)
+ var_grp.create_dataset('time', data=[self.tstart, self.tstop, self.dt])
+
+ var_grp['gids'][self._gids_beg:self._gids_end] = self._mapping_gids
+ var_grp['element_id'][self._seg_offset_beg:self._seg_offset_end] = self._mapping_element_ids
+ var_grp['element_pos'][self._seg_offset_beg:self._seg_offset_end] = self._mapping_element_pos
+ var_grp['index_pointer'][self._gids_beg:(self._gids_end+1)] = self._mapping_index
+
+ self._total_steps = n_steps
+ self._buffer_block_size = buffer_size
+ if not self._buffer_data:
+ # If data is not being buffered and instead written to the main block, we have to add a rank offset
+ # to the gid offset
+ for gid, gid_offset in self._gid_map.items():
+ self._gid_map[gid] = (gid_offset[0] + self._seg_offset_beg, gid_offset[1] + self._seg_offset_beg)
+
+ for var_name, data_tables in self._data_blocks.items():
+ # If users are trying to save multiple variables in the same file put data table in its own /{var} group
+ # (not sonata compliant). Otherwise the data table is located at the root
+ data_grp = self._h5_handle if self._n_vars == 1 else self._h5_handle.create_group('/{}'.format(var_name))
+ if self._buffer_data:
+ # Set up in-memory block to buffer recorded variables before writing to the dataset
+ data_tables.buffer_block = np.zeros((buffer_size, self._n_segments_local), dtype=np.float)
+ # data_tables.data_block = data_grp.create_dataset('data', shape=(n_steps, self._n_segments_all),
+ # dtype=np.float, chunks=True)
+ data_tables.data_block = self._create_big_dataset(data_grp, 'data', (n_steps, self._n_segments_all), np.float)
+ data_tables.data_block.attrs['variable_name'] = var_name
+ else:
+ # Since we are not buffering data, we just write directly to the on-disk dataset
+ data_tables.buffer_block = data_grp.create_dataset('data', shape=(n_steps, self._n_segments_all),
+ dtype=np.float, chunks=True)
+ data_tables.buffer_block.attrs['variable_name'] = var_name
+
+ self._is_initialized = True
+
+ def record_cell(self, gid, var_name, seg_vals, tstep):
+ """Record cell parameters.
+
+ :param gid: gid of cell.
+ :param var_name: name of variable being recorded.
+ :param seg_vals: list of all segment values
+ :param tstep: time step
+ """
+ gid_beg, gid_end = self._gid_map[gid]
+ buffer_block = self._data_blocks[var_name].buffer_block
+ update_index = (tstep - self._last_save_indx)
+ buffer_block[update_index, gid_beg:gid_end] = seg_vals
+
+ def record_cell_block(self, gid, var_name, seg_vals):
+ """Save cell parameters one block at a time
+
+ :param gid: gid of cell.
+ :param var_name: name of variable being recorded.
+ :param seg_vals: A vector/matrix of values being recorded
+ """
+ gid_beg, gid_end = self._gid_map[gid]
+ buffer_block = self._data_blocks[var_name].buffer_block
+ if gid_end - gid_beg == 1:
+ buffer_block[:, gid_beg] = seg_vals
+ else:
+ buffer_block[:, gid_beg:gid_end] = seg_vals
+
+ def flush(self):
+ """Move data from memory to dataset"""
+ if self._buffer_data:
+ blk_beg = self._last_save_indx
+ blk_end = blk_beg + self._buffer_block_size
+ if blk_end > self._total_steps:
+ # Need to handle the case that simulation doesn't end on a block step
+ blk_end = blk_beg + self._total_steps - blk_beg
+ seg_beg, seg_end = self._seg_offset_beg, self._seg_offset_end
+
+ block_size = blk_end - blk_beg
+ self._last_save_indx += block_size
+
+ for _, data_table in self._data_blocks.items():
+ dat, buf = data_table.data_block, data_table.buffer_block
+ dat[blk_beg:blk_end, seg_beg:seg_end] = buf[:block_size, :]
+
+ def close(self):
+ self._h5_handle.close()
+
+ def merge(self):
+ if self._mpi_size > 1 and self._mpi_rank == 0:
+ h5final = h5py.File(self._saved_file, 'w')
+ tmp_h5_handles = [h5py.File(name, 'r') for name in self._tmp_files]
+
+ # Find the gid and segment offsets for each temp h5 file
+ gid_ranges = [] # list of (gid-beg, gid-end)
+ gid_offset = 0
+ total_gid_count = 0 # total number of gids across all ranks
+
+ seg_ranges = []
+ seg_offset = 0
+ total_seg_count = 0 # total number of segments across all ranks
+ time_ds = None
+ for h5_tmp in tmp_h5_handles:
+ seg_count = len(h5_tmp['/mapping/element_pos'])
+ seg_ranges.append((seg_offset, seg_offset+seg_count))
+ seg_offset += seg_count
+ total_seg_count += seg_count
+
+ gid_count = len(h5_tmp['mapping/gids'])
+ gid_ranges.append((gid_offset, gid_offset+gid_count))
+ gid_offset += gid_count
+ total_gid_count += gid_count
+
+ time_ds = h5_tmp['mapping/time']
+
+ mapping_grp = h5final.create_group('mapping')
+ if time_ds:
+ mapping_grp.create_dataset('time', data=time_ds)
+ element_id_ds = mapping_grp.create_dataset('element_id', shape=(total_seg_count,), dtype=np.uint)
+ el_pos_ds = mapping_grp.create_dataset('element_pos', shape=(total_seg_count,), dtype=np.float)
+ gids_ds = mapping_grp.create_dataset('gids', shape=(total_gid_count,), dtype=np.uint)
+ index_pointer_ds = mapping_grp.create_dataset('index_pointer', shape=(total_gid_count+1,), dtype=np.uint)
+
+ # combine the /mapping datasets
+ for i, h5_tmp in enumerate(tmp_h5_handles):
+ tmp_mapping_grp = h5_tmp['mapping']
+ beg, end = seg_ranges[i]
+ element_id_ds[beg:end] = tmp_mapping_grp['element_id']
+ el_pos_ds[beg:end] = tmp_mapping_grp['element_pos']
+
+ # shift the index pointer values
+ index_pointer = np.array(tmp_mapping_grp['index_pointer'])
+ update_index = beg + index_pointer
+
+ beg, end = gid_ranges[i]
+ gids_ds[beg:end] = tmp_mapping_grp['gids']
+ index_pointer_ds[beg:(end+1)] = update_index
+
+ # combine the /var/data datasets
+ for var_name in self._variables:
+ data_name = '/data' if self._n_vars == 1 else '/{}/data'.format(var_name)
+ # data_name = '/{}/data'.format(var_name)
+ var_data = h5final.create_dataset(data_name, shape=(self._total_steps, total_seg_count), dtype=np.float)
+ var_data.attrs['variable_name'] = var_name
+ for i, h5_tmp in enumerate(tmp_h5_handles):
+ beg, end = seg_ranges[i]
+ var_data[:, beg:end] = h5_tmp[data_name]
+
+ for tmp_file in self._tmp_files:
+ os.remove(tmp_file)
+
+
+class CellVarRecorderParallel(CellVarRecorder):
+ """
+ Unlike the parent, this take advantage of parallel h5py to writting to the results file across different ranks.
+
+ """
+ def __init__(self, file_name, tmp_dir, variables, buffer_data=True, mpi_rank=0, mpi_size=1):
+ super(CellVarRecorderParallel, self).__init__(
+ file_name, tmp_dir, variables, buffer_data=buffer_data,
+ mpi_rank=mpi_rank, mpi_size=mpi_size
+ )
+
+ def _calc_offset(self):
+ # iterate through the ranks let rank r determine the offset from rank r-1
+ for r in range(comm.Get_size()):
+ if rank == r:
+ if rank > 0:
+ # get num of segments and gids from prev. rank and calculate offsets
+ offsets = np.empty(2, dtype=np.uint)
+ comm.Recv([offsets, MPI.UNSIGNED_INT], source=(r-1))
+ self._seg_offset_beg = offsets[0]
+ self._gids_beg = offsets[1]
+
+ # for some reason, np.uint64 + int = np.float64, so need cast to int
+ self._seg_offset_end = int(self._seg_offset_beg) \
+ + int(self._n_segments_local)
+ self._gids_end = int(self._gids_beg) + int(self._n_gids_local)
+
+ if rank < (nhosts - 1):
+ # pass the next rank its offset
+ offsets = np.array([self._seg_offset_end, self._gids_end], dtype=np.uint)
+ comm.Send([offsets, MPI.UNSIGNED_INT], dest=(rank+1))
+
+ comm.Barrier()
+
+ # broadcast the total num of gids/segments from the final rank to all the others
+ if rank == (nhosts - 1):
+ total_counts = np.array([self._seg_offset_end, self._gids_end], dtype=np.uint)
+ else:
+ total_counts = np.empty(2, dtype=np.uint)
+
+ comm.Bcast(total_counts, root=(nhosts-1))
+ self._n_segments_all = total_counts[0]
+ self._n_gids_all = total_counts[1]
+
+ def _create_h5_file(self):
+ self._h5_handle = h5py.File(self._file_name, 'w', driver='mpio', comm=MPI.COMM_WORLD)
+ add_hdf5_version(self._h5_handle)
+ add_hdf5_magic(self._h5_handle)
+
+ def merge(self):
+ pass
diff --git a/bmtk-vb/build/lib/bmtk/utils/io/firing_rates.py b/bmtk-vb/build/lib/bmtk/utils/io/firing_rates.py
new file mode 100644
index 0000000..827cc21
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/io/firing_rates.py
@@ -0,0 +1,35 @@
+import pandas as pd
+import csv
+
+class RatesInput(object):
+ def __init__(self, params):
+ self._rates_df = pd.read_csv(params['rates'], sep=' ')
+ self._node_population = params['node_set']
+ self._rates_dict = {int(row['gid']): row['firing_rate'] for _, row in self._rates_df.iterrows()}
+
+ @property
+ def populations(self):
+ return [self._node_population]
+
+ def get_rate(self, gid):
+ return self._rates_dict[gid]
+
+
+class RatesWriter(object):
+ def __init__(self, file_name):
+ self._file_name = file_name
+ self._fhandle = open(file_name, 'a')
+ self._csv_writer = csv.writer(self._fhandle, delimiter=' ')
+
+ def add_rates(self, gid, times, rates):
+ for t, r in zip(times, rates):
+ self._csv_writer.writerow([gid, t, r])
+ self._fhandle.flush()
+
+ def to_csv(self, file_name):
+ pass
+
+ def to_h5(self, file_name):
+ raise NotImplementedError
+
+
diff --git a/bmtk-vb/build/lib/bmtk/utils/io/spike_trains.py b/bmtk-vb/build/lib/bmtk/utils/io/spike_trains.py
new file mode 100644
index 0000000..73d9bfd
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/io/spike_trains.py
@@ -0,0 +1,312 @@
+import os
+import sys
+import csv
+
+import h5py
+import pandas as pd
+import numpy as np
+from bmtk.utils.sonata.utils import add_hdf5_magic, add_hdf5_version
+
+
+class SpikeTrainWriter(object):
+ class TmpFileMetadata(object):
+ def __init__(self, file_name, sort_order=None):
+ self.file_name = file_name
+ self.sort_order = sort_order
+
+ def __init__(self, tmp_dir, mpi_rank=0, mpi_size=1):
+ # For NEST/NEURON based simulations it is prefereable not to use mpi4py, so let the parent simulator determine
+ # MPI rank and size
+ self._mpi_rank = mpi_rank
+ self._mpi_size = mpi_size
+
+ # used to temporary save spike files since for large simulations saving spikes into memory can crash the
+ # system. Requires the user to create the directory
+ self._tmp_dir = tmp_dir
+ if self._tmp_dir is None or not os.path.exists(self._tmp_dir):
+ raise Exception('Directory path {} does not exists'.format(self._tmp_dir))
+ self._all_tmp_files = [self.TmpFileMetadata(self._get_tmp_filename(r)) for r in range(mpi_size)]
+ # TODO: Determine best buffer size.
+ self._tmp_file_handle = open(self._all_tmp_files[mpi_rank].file_name, 'w')
+
+ self._tmp_spikes_handles = [] # used when sorting mulitple file
+ self._spike_count = -1
+
+ # Nest gid files uses tab seperators and a different order for tmp spike files.
+ self.delimiter = ' ' # delimiter for temporary file
+ self.time_col = 0
+ self.gid_col = 1
+
+ def _get_tmp_filename(self, rank):
+ return os.path.join(self._tmp_dir, '_bmtk_tmp_spikes_{}.csv'.format(rank))
+
+ def _count_spikes(self):
+ if self._mpi_rank == 0:
+ if self._spike_count > -1:
+ return self._spike_count
+
+ self._spike_count = 0
+ for tmp_file in self._all_tmp_files:
+ with open(tmp_file.file_name, 'r') as csvfile:
+ csv_reader = csv.reader(csvfile, delimiter=self.delimiter)
+ self._spike_count += sum(1 for _ in csv_reader)
+
+ def _sort_tmp_file(self, filedata, sort_order):
+ # For now load spikes into pandas, it's the fastest way but may be an issue with memory
+ if sort_order is None or filedata.sort_order == sort_order:
+ return
+
+ file_name = filedata.file_name
+ tmp_spikes_ds = pd.read_csv(file_name, sep=' ', names=['time', 'gid'])
+ tmp_spikes_ds = tmp_spikes_ds.sort_values(by=sort_order)
+ tmp_spikes_ds.to_csv(file_name, sep=' ', index=False, header=False)
+ filedata.sort_order = sort_order
+
+ def _next_spike(self, rank):
+ try:
+ val = next(self._tmp_spikes_handles[rank])
+ return val[0], val[1], rank
+ except StopIteration:
+ return None
+
+ def add_spike(self, time, gid):
+ self._tmp_file_handle.write('{:.6f} {}\n'.format(time, gid))
+
+ def add_spikes(self, times, gid):
+ for t in times:
+ self.add_spike(t, gid)
+
+ def add_spikes_file(self, file_name, sort_order=None):
+ self._all_tmp_files.append(self.TmpFileMetadata(file_name, sort_order))
+
+ def _sort_files(self, sort_order, sort_column, file_write_fnc):
+ self._tmp_spikes_handles = []
+ for fdata in self._all_tmp_files:
+ self._sort_tmp_file(fdata, sort_order)
+ self._tmp_spikes_handles.append(csv.reader(open(fdata.file_name, 'r'), delimiter=self.delimiter))
+
+ spikes = []
+ for rank in range(len(self._tmp_spikes_handles)): # range(self._mpi_size):
+ spike = self._next_spike(rank)
+ if spike is not None:
+ spikes.append(spike)
+
+ # Iterate through all the ranks and find the first spike. Write that spike/gid to the output, then
+ # replace that data point with the next spike on the selected rank
+ indx = 0
+ while spikes:
+ # find which rank has the first spike
+ selected_index = 0
+ selected_val = spikes[0][sort_column]
+ for i, spike in enumerate(spikes[1:]):
+ if spike[sort_column] < selected_val:
+ selected_index = i + 1
+ selected_val = spike[sort_column]
+
+ # write the spike to the file
+ row = spikes.pop(selected_index)
+ file_write_fnc(float(row[self.time_col]), int(row[self.gid_col]), indx)
+ indx += 1
+
+ # get the next spike on that rank and replace in spikes table
+ another_spike = self._next_spike(row[2])
+ if another_spike is not None:
+ spikes.append(another_spike)
+
+ def _merge_files(self, file_write_fnc):
+ indx = 0
+ for fdata in self._all_tmp_files:
+ if not os.path.exists(fdata.file_name):
+ continue
+
+ with open(fdata.file_name, 'r') as csv_file:
+ csv_reader = csv.reader(csv_file, delimiter=self.delimiter)
+ for row in csv_reader:
+ file_write_fnc(float(row[self.time_col]), int(row[self.gid_col]), indx)
+ indx += 1
+
+ def _to_file(self, file_name, sort_order, file_write_fnc):
+ if sort_order is None:
+ sort_column = 0
+ elif sort_order == 'time':
+ sort_column = self.time_col
+ elif sort_order == 'gid':
+ sort_column = self.gid_col
+ else:
+ raise Exception('Unknown sort order {}'.format(sort_order))
+
+ # TODO: Need to make sure an MPI_Barrier is called beforehand
+ self._tmp_file_handle.close()
+ if self._mpi_rank == 0:
+ if sort_order is not None:
+ self._sort_files(sort_order, sort_column, file_write_fnc)
+ else:
+ self._merge_files(file_write_fnc)
+
+ def to_csv(self, csv_file, sort_order=None, gid_map=None):
+ # TODO: Need to call flush and then barrier
+ if self._mpi_rank == 0:
+ # For the single rank case don't just copy the tmp-csv to the new name. It will fail if user calls to_hdf5
+ # or to_nwb after calling to_csv.
+ self._count_spikes()
+ csv_handle = open(csv_file, 'w')
+ csv_writer = csv.writer(csv_handle, delimiter=' ')
+
+ def file_write_fnc_identity(time, gid, indx):
+ csv_writer.writerow([time, gid])
+
+ def file_write_fnc_transform(time, gid, indx):
+ # For the case when NEURON/NEST ids don't match with the user's gid table
+ csv_writer.writerow([time, gid_map[gid]])
+
+ file_write_fnc = file_write_fnc_identity if gid_map is None else file_write_fnc_transform
+ self._to_file(csv_file, sort_order, file_write_fnc)
+ csv_handle.close()
+
+ # TODO: Let user pass in in barrier and use it here
+
+ def to_nwb(self, nwb_file):
+ raise NotImplementedError
+
+ def to_hdf5(self, hdf5_file, sort_order=None, gid_map=None):
+ if self._mpi_rank == 0:
+ with h5py.File(hdf5_file, 'w') as h5:
+ add_hdf5_magic(h5)
+ add_hdf5_version(h5)
+
+ self._count_spikes()
+ spikes_grp = h5.create_group('/spikes')
+ spikes_grp.attrs['sorting'] = 'none' if sort_order is None else sort_order
+ time_ds = spikes_grp.create_dataset('timestamps', shape=(self._spike_count,), dtype=np.float)
+ gid_ds = spikes_grp.create_dataset('gids', shape=(self._spike_count,), dtype=np.uint64)
+
+ def file_write_fnc_identity(time, gid, indx):
+ time_ds[indx] = time
+ gid_ds[indx] = gid
+
+ def file_write_fnc_transform(time, gid, indx):
+ time_ds[indx] = time
+ gid_ds[indx] = gid_map[gid]
+
+ file_write_fnc = file_write_fnc_identity if gid_map is None else file_write_fnc_transform
+ self._to_file(hdf5_file, sort_order, file_write_fnc)
+
+ # TODO: Need to make sure a barrier is used here (before close is called)
+
+ def flush(self):
+ self._tmp_file_handle.flush()
+
+ def close(self):
+ if self._mpi_rank == 0:
+ for tmp_file in self._all_tmp_files:
+ if os.path.exists(tmp_file.file_name):
+ os.remove(tmp_file.file_name)
+
+
+class PoissonSpikesGenerator(object):
+ def __init__(self, gids, firing_rate, tstart=0.0, tstop=1000.0):
+ self._gids = gids
+ self._firing_rate = firing_rate / 1000.0
+ self._tstart = tstart
+ self._tstop = tstop
+
+ def to_hdf5(self, file_name, sort_order='gid'):
+ if sort_order == 'gid':
+ gid_list = []
+ times_list = []
+ if sort_order == 'gid':
+ for gid in self._gids:
+ c_time = self._tstart
+ while c_time < self._tstop:
+ interval = -np.log(1.0 - np.random.uniform()) / self._firing_rate
+ c_time += interval
+ gid_list.append(gid)
+ times_list.append(c_time)
+
+ with h5py.File(file_name, 'w') as h5:
+ h5.create_dataset('/spikes/gids', data=gid_list, dtype=np.uint)
+ h5.create_dataset('/spikes/timestamps', data=times_list, dtype=np.float)
+ h5['/spikes'].attrs['sorting'] = 'by_gid'
+
+ else:
+ raise NotImplementedError
+
+
+class SpikesInput(object):
+ def get_spikes(self, gid):
+ raise NotImplementedError()
+
+ @staticmethod
+ def load(name, module, input_type, params):
+ module_lc = module.lower()
+ if module_lc == 'nwb':
+ return SpikesInputNWBv1(name, module, input_type, params)
+ elif module_lc == 'h5' or module_lc == 'hdf5':
+ return SpikesInputH5(name, module, input_type, params)
+ elif module_lc == 'csv':
+ return SpikesInputCSV(name, module, input_type, params)
+ else:
+ raise Exception('Unable to load spikes for module type {}'.format(module))
+
+
+class SpikesInputNWBv1(SpikesInput):
+ def __init__(self, name, module, input_type, params):
+ self.input_file = params['input_file']
+ self._h5_handle = h5py.File(self.input_file, 'r')
+
+ if 'trial' in params:
+ self.trial = params['trial']
+ self._spike_trains_handles = {}
+ for node_id, h5grp in self._h5_handle['processing'][self.trial]['spike_train'].items():
+ self._spike_trains_handles[int(node_id)] = h5grp['data']
+
+ elif '/spikes' in self._h5_handle:
+ raise Exception
+
+ def get_spikes(self, gid):
+ return np.array(self._spike_trains_handles[gid])
+
+
+class SpikesInputH5(SpikesInput):
+ def __init__(self, name, module, input_type, params):
+ self._input_file = params['input_file']
+ self._h5_handle = h5py.File(self._input_file, 'r')
+ self._sort_order = self._h5_handle['/spikes'].attrs.get('sorting', None)
+ if sys.version_info[0] >= 3 and isinstance(self._sort_order, bytes):
+ # h5py attributes return str in py 2, bytes in py 3
+ self._sort_order = self._sort_order.decode()
+
+ self._gid_ds = self._h5_handle['/spikes/gids']
+ self._timestamps_ds = self._h5_handle['/spikes/timestamps']
+
+ self._gid_indicies = {}
+ self._build_indicies()
+
+ def _build_indicies(self):
+ if self._sort_order == 'by_gid':
+ indx_beg = 0
+ c_gid = self._gid_ds[0]
+ for indx, gid in enumerate(self._gid_ds):
+ if gid != c_gid:
+ self._gid_indicies[c_gid] = slice(indx_beg, indx)
+ c_gid = gid
+ indx_beg = indx
+ self._gid_indicies[c_gid] = slice(indx_beg, indx+1)
+
+ else:
+ raise NotImplementedError
+
+ def get_spikes(self, gid):
+ if gid in self._gid_indicies:
+ return self._timestamps_ds[self._gid_indicies[gid]]
+ else:
+ return []
+
+
+class SpikesInputCSV(SpikesInput):
+ def __init__(self, name, module, input_type, params):
+ self._spikes_df = pd.read_csv(params['input_file'], index_col='gid', sep=' ')
+
+ def get_spikes(self, gid):
+ spike_times_str = self._spikes_df.iloc[gid]['spike-times']
+ return np.array(spike_times_str.split(','), dtype=float)
diff --git a/bmtk-vb/build/lib/bmtk/utils/io/tabular_network.py b/bmtk-vb/build/lib/bmtk/utils/io/tabular_network.py
new file mode 100644
index 0000000..9b594bd
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/io/tabular_network.py
@@ -0,0 +1,350 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import h5py
+
+
+"""
+An interface for reading network files.
+
+We are continuing to develop network file format this interface is a way to provide backward compatibility. This
+namespace should not be instantiated directly, and updates to the network standard should be given their own. The
+class TabularNetwork, NodeRow, NodesFile, EdgeRow and EdgesFile are abstract and should be overridden.
+
+In general the developed formats have all take schema:
+ * Networks are split between nodes (NodesFile) and edges (EdgesFile)
+ * Each type is made up of rows (NodeRow, EdgeRow)
+ * Each row has its own set column properties (ColumnProperty), depending on the file/group it belongs too.
+ * Each row also has properties from (edge/node)-type metadata.
+"""
+
+
+##########################################
+# Interface files
+##########################################
+class TabularNetwork(object):
+ """Factory for loading nodes and edges files."""
+ @staticmethod
+ def load_nodes(nodes_file, node_types_file):
+ raise NotImplementedError()
+
+ @staticmethod
+ def load_edges(edges_file, edge_types_files):
+ raise NotImplementedError()
+
+
+class NodeRow(object):
+ """Node file row.
+
+ Each row represents node/cell/population in a network and can include edge-type metadata and dynamics_params when
+ applicable. The only mandatory for a NodeRow is a unique gid (i.e cell_id, node_id). Properties can be accessed
+ like a dictionary.
+ """
+ def __init__(self, gid, node_props, types_props):
+ self._gid = gid
+ self._node_props = node_props # properties from the csv/hdf5 file
+ self._node_type_props = types_props # properties from the edge_types metadata file
+
+ @property
+ def gid(self):
+ return self._gid
+
+ @property
+ def with_dynamics_params(self):
+ """Set to true if dynamics_params subgroup attached to HDF5 properities"""
+ raise NotImplementedError()
+
+ @property
+ def dynamics_params(self):
+ raise NotImplementedError()
+
+ @property
+ def columns(self):
+ return self._node_props.keys() + self._node_type_props.keys()
+
+ @property
+ def node_props(self):
+ return self._node_props
+
+ @property
+ def node_type_props(self):
+ return self._node_type_props
+
+ def get(self, prop_key, default=None):
+ # First see if property existing in node file, then check node-types
+ if prop_key in self._node_props:
+ return self._node_props[prop_key]
+ elif prop_key in self._node_type_props:
+ return self._node_type_props[prop_key]
+ else:
+ return default
+
+ def __contains__(self, prop_key):
+ return prop_key in self._node_props.keys() or prop_key in self._node_type_props.keys()
+
+ def __getitem__(self, prop_key):
+ val = self.get(prop_key)
+ if val is None:
+ raise Exception('Invalid property key {}.'.format(prop_key))
+ return val
+
+ def __repr__(self):
+ return build_row_repr(self)
+
+
+class EdgeRow(object):
+ """Representation of a edge.
+
+ Edges must include a source and target node gid. Other properties, from the edges or edge-types files, can be
+ directly accessed like a dictionary.
+ """
+ def __init__(self, trg_gid, src_gid, edge_props={}, edge_type_props={}):
+ self._trg_gid = trg_gid
+ self._src_gid = src_gid
+ self._edge_props = edge_props
+ self._edge_type_props = edge_type_props
+
+ @property
+ def target_gid(self):
+ return self._trg_gid
+
+ @property
+ def source_gid(self):
+ return self._src_gid
+
+ @property
+ def with_dynamics_params(self):
+ raise NotImplementedError()
+
+ @property
+ def dynamics_params(self):
+ raise NotImplementedError()
+
+ @property
+ def columns(self):
+ return self._edge_props.keys() + self._edge_type_props.keys()
+
+ @property
+ def edge_props(self):
+ return self._edge_props
+
+ def __contains__(self, prop_key):
+ return prop_key in self._edge_props.keys() or prop_key in self._edge_type_props.keys()
+
+ def __getitem__(self, prop_key):
+ if prop_key in self._edge_props:
+ return self._edge_props[prop_key]
+ elif prop_key in self._edge_type_props:
+ return self._edge_type_props[prop_key]
+ else:
+ raise Exception('Invalid property name {}.'.format(prop_key))
+
+ def __repr__(self):
+ return build_row_repr(self)
+
+
+class NodesFile(object):
+ """Class for reading and iterating properties of each node in a nodes/node-types file.
+
+ Use the load method to load in the necessary node files. Nodes can be accessed using an interator:
+ nodes = NodesFile()
+ nodes.load(nodes_file.h5, node_types.csv)
+ for node in nodes:
+ print node['prop']
+ ...
+ Or indivdually by gid:
+ node = nodes[101]
+ print node['prop']
+ """
+ def __init__(self):
+ self._network_name = None
+ self._version = None
+ self._iter_index = 0
+ self._nrows = 0
+ self._node_types_table = None
+
+ @property
+ def name(self):
+ """name of network containing these nodes"""
+ return self._network_name
+
+ @property
+ def version(self):
+ return self._version
+
+ @property
+ def gids(self):
+ raise NotImplementedError()
+
+ @property
+ def node_types_table(self):
+ return self._node_types_table
+
+ def load(self, nodes_file, node_types_file):
+ raise NotImplementedError()
+
+ def get_node(self, gid, cache=False):
+ raise NotImplementedError()
+
+ def __len__(self):
+ raise NotImplementedError()
+
+ def __iter__(self):
+ self._iter_index = 0
+ return self
+
+ def next(self):
+ raise NotImplementedError()
+
+ def __getitem__(self, gid):
+ return self.get_node(gid)
+
+
+class EdgesFile(object):
+ """Class for reading and iterating over edge files.
+
+ Use the load() method to instantiate from the file. Edges can be accessed for any given edge with a target-gid
+ using the edges_itr() method:
+ edges = EdgesFile()
+ edges.load(edge_file.h5, edge_types.csv)
+ for edge_prop in edges.edges_itr(101):
+ assert(edge_prop.target_gid == 101)
+ source_node = nodes[edge_prop.source_gid]
+ print edge_prop['prop_name']
+ """
+ @property
+ def source_network(self):
+ """Name of network containing the source gids"""
+ raise NotImplementedError()
+
+ @property
+ def target_network(self):
+ """Name of network containing the target gids"""
+ raise NotImplementedError()
+
+ def load(self, edges_file, edge_types_file):
+ raise NotImplementedError()
+
+ def edges_itr(self, target_gid):
+ raise NotImplementedError()
+
+ def __len__(self):
+ raise NotImplementedError()
+
+
+##########################################
+# Helper functions
+##########################################
+class ColumnProperty(object):
+ """Representation of a column name and metadata from a hdf5 dataset, csv column, etc.
+
+ """
+ def __init__(self, name, dtype, dimension, attrs={}):
+ self._name = name
+ self._dtype = dtype
+ self._dim = dimension
+ self._attrs = attrs
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def dtype(self):
+ return self._dtype
+
+ @property
+ def dimension(self):
+ return self._dim
+
+ @property
+ def attributes(self):
+ return self._attrs
+
+ @classmethod
+ def from_h5(cls, hf_obj, name=None):
+ if isinstance(hf_obj, h5py.Dataset):
+ ds_name = name if name is not None else hf_obj.name.split('/')[-1]
+ ds_dtype = hf_obj.dtype
+
+ # If the dataset shape is in the form "(N, M)" then the dimension is M. If the shape is just "(N)" then the
+ # dimension is just 1
+ dim = 1 if len(hf_obj.shape) < 2 else hf_obj.shape[1]
+ return cls(ds_name, ds_dtype, dim, attrs=hf_obj.attrs)
+
+ elif isinstance(hf_obj, h5py.Group):
+ columns = []
+ for name, ds in hf_obj.items():
+ if isinstance(ds, h5py.Dataset):
+ columns.append(ColumnProperty.from_h5(ds, name))
+ return columns
+
+ else:
+ raise Exception('Unable to convert hdf5 object {} to a property or list of properties.'.format(hf_obj))
+
+ @classmethod
+ def from_csv(cls, pd_obj, name=None):
+ if isinstance(pd_obj, pd.Series):
+ c_name = name if name is not None else pd_obj.name
+ c_dtype = pd_obj.dtype
+ return cls(c_name, c_dtype, 1)
+
+ elif isinstance(pd_obj, pd.DataFrame):
+ return [cls(name, pd_obj[name].dtype, 1) for name in pd_obj.columns]
+
+ else:
+ raise Exception('Unable to convert pandas object {} to a property or list of properties.'.format(pd_obj))
+
+ def __hash__(self):
+ return hash(self._name)
+
+ def __repr__(self):
+ return '{} ({})'.format(self.name, self.dtype)
+
+
+class TypesTable(dict):
+ def __init__(self, types_file, index_column, seperator=' ', comment='#'):
+ super(TypesTable, self).__init__()
+
+ types_df = pd.read_csv(types_file, sep=seperator, comment=comment)
+ self._columns = ColumnProperty.from_csv(types_df)
+ for _, row in types_df.iterrows():
+ # TODO: iterrows does not preserve dtype and should be replaced with itertuples
+ type_id = row[index_column]
+ row = {col.name: row[col.name] for col in self._columns}
+ self.update({type_id: row})
+
+ @property
+ def columns(self):
+ return self._columns
+
+
+def build_row_repr(row):
+ columns = row.columns
+ if columns > 0:
+ rstr = "{"
+ for c in columns:
+ rstr += "'{}': {}, ".format(c, row[c])
+ return rstr[:-2] + "}"
+ else:
+ return "{}"
diff --git a/bmtk-vb/build/lib/bmtk/utils/io/tabular_network_v0.py b/bmtk-vb/build/lib/bmtk/utils/io/tabular_network_v0.py
new file mode 100644
index 0000000..711c177
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/io/tabular_network_v0.py
@@ -0,0 +1,160 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import h5py
+
+import tabular_network as tn
+
+"""
+This is for the original bionet network format developed at the AI in 2016-2017. nodes, node_types, and edge_types
+use csv format, while edges use an hdf5 format.
+
+"""
+class TabularNetwork(tn.TabularNetwork):
+ @staticmethod
+ def load_nodes(nodes_file, node_types_file):
+ nf = NodesFile()
+ nf.load(nodes_file, node_types_file)
+ return nf
+
+ @staticmethod
+ def load_edges(edges_file, edge_types_file):
+ ef = EdgesFile()
+ ef.load(edges_file, edge_types_file)
+ return ef
+
+
+class NodeRow(tn.NodeRow):
+ def __init__(self, gid, node_props, types_props, columns):
+ super(NodeRow, self).__init__(gid, node_props, types_props)
+ self._columns = columns
+
+ @property
+ def with_dynamics_params(self):
+ return False
+
+ @property
+ def dynamics_params(self):
+ return None
+
+
+class NodesFile(tn.NodesFile):
+ def __init__(self):
+ super(NodesFile, self).__init__()
+ self._network_name = 'NA'
+ self._version = 'v0.0'
+
+ self._nodes_df = None
+ self._nodes_columns = None
+ self._columns = None
+
+ @property
+ def gids(self):
+ return list(self._nodes_df.index)
+
+ def load(self, nodes_file, node_types_file):
+ self._nodes_df = pd.read_csv(nodes_file, sep=' ', index_col=['node_id'])
+ self._node_types_table = tn.TypesTable(node_types_file, 'node_type_id')
+
+ self._nrows = len(self._nodes_df.index)
+ self._nodes_columns = tn.ColumnProperty.from_csv(self._nodes_df)
+ self._columns = self._nodes_columns + self._node_types_table.columns
+
+ def get_node(self, gid, cache=False):
+ nodes_data = self._nodes_df.loc[gid]
+ node_type_data = self._node_types_table[nodes_data['node_type_id']]
+ return NodeRow(gid, nodes_data, node_type_data, self._columns)
+
+ def __len__(self):
+ return self._nrows
+
+ def next(self):
+ if self._iter_index >= len(self):
+ raise StopIteration
+ else:
+ gid = self._nodes_df.index.get_loc(self._iter_index)
+ self._iter_index += 1
+ return self.get_node(gid)
+
+
+class EdgeRow(tn.EdgeRow):
+ def __init__(self, trg_gid, src_gid, nsyns, edge_type_props):
+ super(EdgeRow, self).__init__(trg_gid, src_gid, edge_type_props=edge_type_props)
+ self._edge_props['nsyns'] = nsyns
+
+ @property
+ def with_dynamics_params(self):
+ return False
+
+ @property
+ def dynamics_params(self):
+ return None
+
+
+class EdgesFile(tn.EdgesFile):
+ def __init__(self):
+ self._nrows = 0
+ self._index_len = 0
+
+ self._edge_ptr_ds = None
+ self._num_syns_ds = None
+ self._src_gids_ds = None
+ self._edge_types_ds = None
+ self._edge_types_table = {}
+
+ @property
+ def source_network(self):
+ return None
+
+ @property
+ def target_network(self):
+ return None
+
+ def load(self, edges_file, edge_types_file):
+ edges_hf = h5py.File(edges_file, 'r')
+ self._edge_ptr_ds = edges_hf['edge_ptr']
+ self._num_syns_ds = edges_hf['num_syns']
+ self._src_gids_ds = edges_hf['src_gids']
+
+ # TODO: validate edge_types dataset keys
+ self._edge_types_ds = edges_hf['edge_types']
+ self._edge_types_table = tn.TypesTable(edge_types_file, 'edge_type_id')
+ self._index_len = len(self._edge_ptr_ds)
+ self._nrows = len(self._src_gids_ds)
+
+ def edges_itr(self, target_gid):
+ assert(isinstance(target_gid, int))
+ if target_gid+1 >= self._index_len:
+ raise StopIteration()
+
+ index_begin = self._edge_ptr_ds[target_gid]
+ index_end = self._edge_ptr_ds[target_gid+1]
+ for iloc in xrange(index_begin, index_end):
+ source_gid = self._src_gids_ds[iloc]
+ edge_type_id = self._edge_types_ds[iloc]
+ edge_type = self._edge_types_table[edge_type_id]
+ nsyns = self._num_syns_ds[iloc]
+ yield EdgeRow(target_gid, source_gid, nsyns, edge_type)
+
+ def __len__(self):
+ return self._nrows
diff --git a/bmtk-vb/build/lib/bmtk/utils/io/tabular_network_v1.py b/bmtk-vb/build/lib/bmtk/utils/io/tabular_network_v1.py
new file mode 100644
index 0000000..3506b81
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/io/tabular_network_v1.py
@@ -0,0 +1,256 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import h5py
+
+import tabular_network as tn
+
+
+"""
+For the initial draft of the network format developed jointly by AI and collaborators in Q2 of 2017.
+
+Edges and nodes files are stored in hdf5, while the edge-types and node-types are stored in csv. In the hd5f files
+optional properties are stored in groups assigned to each node/edge. Optionally each property group may include
+dynamics_params subgroup to describe the model of each node/row, or dynamics_params may be referenced in the types
+metadata file.
+
+"""
+
+class TabularNetwork(tn.TabularNetwork):
+ @staticmethod
+ def load_nodes(nodes_file, node_types_file):
+ nf = NodesFile()
+ nf.load(nodes_file, node_types_file)
+ return nf
+
+ @staticmethod
+ def load_edges(edges_file, edge_types_file):
+ ef = EdgesFile()
+ ef.load(edges_file, edge_types_file)
+ return ef
+
+
+class NodeRow(tn.NodeRow):
+ def __init__(self, gid, group, group_props, types_props):
+ super(NodeRow, self).__init__(gid, group_props, types_props)
+ # TODO: use group to determine if dynamics_params are included.
+
+ @property
+ def with_dynamics_params(self):
+ return False
+
+ @property
+ def dynamics_params(self):
+ return None
+
+
+class NodesFile(tn.NodesFile):
+ def __init__(self):
+ super(NodesFile, self).__init__()
+
+ self._nodes_hf = None
+ self._nodes_index = pd.DataFrame()
+ self._group_table = {}
+ self._nrows = 0
+
+ @property
+ def gids(self):
+ return list(self._nodes_index.index)
+
+ def load(self, nodes_file, node_types_file):
+ nodes_hf = h5py.File(nodes_file, 'r')
+ if 'nodes' not in nodes_hf.keys():
+ raise Exception('Could not find nodes in {}'.format(nodes_file))
+ nodes_group = nodes_hf['nodes']
+
+ self._network_name = nodes_group.attrs['network'] if 'network' in nodes_group.attrs.keys() else 'NA'
+ self._version = 'v0.1' # TODO: get the version number from the attributes
+
+ # Create Indices
+ self._nodes_index['node_gid'] = pd.Series(nodes_group['node_gid'], dtype=nodes_group['node_gid'].dtype)
+ self._nodes_index['node_type_id'] = pd.Series(nodes_group['node_type_id'],
+ dtype=nodes_group['node_type_id'].dtype)
+ self._nodes_index['node_group'] = pd.Series(nodes_group['node_group'],
+ dtype=nodes_group['node_group'].dtype)
+ self._nodes_index['node_group_index'] = pd.Series(nodes_group['node_group_index'],
+ dtype=nodes_group['node_group_index'].dtype)
+ self._nodes_index.set_index(['node_gid'], inplace=True)
+ self._nrows = len(self._nodes_index)
+
+ # Save the node-types
+ self._node_types_table = tn.TypesTable(node_types_file, 'node_type_id')
+
+ # save pointers to the groups table
+ self._group_table = {grp_id: Group(grp_id, grp_ptr, self._node_types_table)
+ for grp_id, grp_ptr in nodes_group.items() if isinstance(grp_ptr, h5py.Group)}
+
+ def get_node(self, gid, cache=False):
+ node_metadata = self._nodes_index.loc[gid]
+ ng = node_metadata['node_group']
+ ng_idx = node_metadata['node_group_index']
+
+ group_props = self._group_table[str(ng)][ng_idx]
+ types_props = self._node_types_table[node_metadata['node_type_id']]
+
+ return NodeRow(gid, self._group_table[str(ng)], group_props, types_props)
+
+ def __len__(self):
+ return self._nrows
+
+ def next(self):
+ if self._iter_index >= len(self):
+ raise StopIteration
+ else:
+ gid = self._nodes_index.index.get_loc(self._iter_index)
+ self._iter_index += 1
+ return self.get_node(gid)
+
+
+class EdgeRow(tn.EdgeRow):
+ def __init__(self, trg_gid, src_gid, syn_group, edge_props={}, edge_type_props={}):
+ super(EdgeRow, self).__init__(trg_gid, src_gid, edge_props, edge_type_props)
+ # TODO: Look in syn_group to see if dynamics_params are included
+
+ @property
+ def with_dynamics_params(self):
+ return False
+
+ @property
+ def dynamics_params(self):
+ return None
+
+
+class EdgesFile(tn.EdgesFile):
+ def __init__(self):
+ super(EdgesFile, self).__init__()
+ self._nedges = 0
+ self._source_network = None
+ self._target_network = None
+
+ # We'll save the target-index dataset into memory
+ self._target_index = None
+ self._target_index_len = 0
+
+ # to save memory just keep pointers to datasets and access them as needed.
+ self._target_gid_ds = None
+ self._source_gid_ds = None
+ self._edge_type_ds = None
+ self._edge_group_ds = None
+ self._edge_group_index_ds = None
+ self._edge_types_table = None
+
+ self._group_table = {} # A table for all subgroups
+
+ @property
+ def source_network(self):
+ return self._source_network
+
+ @property
+ def target_network(self):
+ return self._target_network
+
+ def load(self, edges_file, edge_types_file):
+ edges_hf = h5py.File(edges_file, 'r')
+ if 'edges' not in edges_hf.keys():
+ raise Exception('Could not find edges in {}'.format(edges_file))
+ edges_group = edges_hf['edges']
+
+ # Preload the target index pointers into memory
+ self._target_index = pd.Series(edges_group['index_pointer'], dtype=edges_group['index_pointer'].dtype)
+ self._target_index_len = len(self._target_index)
+
+ # For the other index tables we only load in a file pointer
+ self._target_gid_ds = edges_group['target_gid']
+ if 'network' in self._target_gid_ds.attrs.keys():
+ self._target_network = self._target_gid_ds.attrs['network']
+
+ self._source_gid_ds = edges_group['source_gid']
+ if 'network' in self._source_gid_ds.attrs.keys():
+ self._source_network = self._source_gid_ds.attrs['network']
+
+ self._edge_type_ds = edges_group['edge_type_id']
+ self._edge_group_ds = edges_group['edge_group']
+ self._edge_group_index_ds = edges_group['edge_group_index']
+
+ self._nedges = len(self._edge_group_index_ds)
+
+ # Load in edge-types table
+ self._edge_types_table = tn.TypesTable(edge_types_file, 'edge_type_id')
+
+ # Load in the group properties
+ # TODO: look in attributes for group synonyms
+ # TODO: HDF5 group name will always be a string, but value in groups dataset will be an int.
+ self._group_table = {grp_id: Group(grp_id, grp_ptr, self._edge_types_table)
+ for grp_id, grp_ptr in edges_group.items() if isinstance(grp_ptr, h5py.Group)}
+
+ def edges_itr(self, target_gid):
+ assert(isinstance(target_gid, int))
+ if target_gid+1 >= self._target_index_len:
+ raise StopIteration()
+
+ index_begin = self._target_index.iloc[target_gid]
+ index_end = self._target_index.iloc[target_gid+1]
+ for iloc in xrange(index_begin, index_end):
+ yield self[iloc]
+
+ def __len__(self):
+ return self._nedges
+
+ def __getitem__(self, iloc):
+ trg_gid = self._target_gid_ds[iloc]
+ src_gid = self._source_gid_ds[iloc]
+
+ et_id = self._edge_type_ds[iloc]
+ et_props = self._edge_types_table[et_id]
+
+ syn_group = self._edge_group_ds[iloc]
+ syn_index = self._edge_group_index_ds[iloc]
+ group_props = self._group_table[str(syn_group)][syn_index]
+
+ return EdgeRow(trg_gid, src_gid, syn_group, group_props, et_props)
+
+
+class Group(object):
+ def __init__(self, group_id, h5_group, types_table):
+ self._types_table = types_table
+ self._group_id = group_id
+
+ self._group_columns = tn.ColumnProperty.from_h5(h5_group)
+ self._group_table = [(prop, h5_group[prop.name]) for prop in self._group_columns]
+
+ self._all_columns = self._group_columns + types_table.columns
+
+ # TODO: check to see if dynamics_params exists
+
+ @property
+ def columns(self):
+ return self._all_columns
+
+ def __getitem__(self, indx):
+ group_props = {}
+ for cprop, h5_obj in self._group_table:
+ group_props[cprop.name] = h5_obj[indx]
+ return group_props
+
+ def __repr__(self):
+ return "Group('group id': {}, 'properties':{})".format(self._group_id, self._all_columns)
diff --git a/bmtk-vb/build/lib/bmtk/utils/property_schema.py b/bmtk-vb/build/lib/bmtk/utils/property_schema.py
new file mode 100644
index 0000000..54f2005
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/property_schema.py
@@ -0,0 +1,35 @@
+# Allen Institute Software License - This software license is the 2-clause BSD license plus clause a third
+# clause that prohibits redistribution for commercial purposes without further permission.
+#
+# Copyright 2017. Allen Institute. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Redistributions for commercial purposes are not permitted without the Allen Institute's written permission. For
+# purposes of this license, commercial purposes is the incorporation of the Allen Institute's software into anything for
+# which you will charge fees or other compensation. Contact terms@alleninstitute.org for commercial licensing
+# opportunities.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+# TODO: go through the individual simulator's property_schemas and pull out the common functionality. Ideally all
+# simulators should share ~80% of the same schema, with some differences in how certain columns are determined.
+# TODO: Add access to builder so when a network is built with a given property schema
+# TODO: have utils.io.tabular_network use these schemas to discover name of node-id, node-type-id, etc for different
+# standards.
+class PropertySchema:
+ pass
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/utils/scripts/sonata.circuit_config.json b/bmtk-vb/build/lib/bmtk/utils/scripts/sonata.circuit_config.json
new file mode 100644
index 0000000..a2c7969
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/scripts/sonata.circuit_config.json
@@ -0,0 +1,21 @@
+{
+ "manifest": {
+ "$BASE_DIR": "%%BASE_DIR%%",
+ "$COMPONENTS_DIR": "%%COMPONENTS_DIR%%",
+ "$NETWORK_DIR": "%%NETWORK_DIR%%"
+ },
+
+ "components": {
+ "morphologies_dir": "$COMPONENTS_DIR/morphologies",
+ "synaptic_models_dir": "$COMPONENTS_DIR/synaptic_models",
+ "mechanisms_dir":"$COMPONENTS_DIR/mechanisms",
+ "biophysical_neuron_models_dir": "$COMPONENTS_DIR/biophysical_neuron_templates",
+ "point_neuron_models_dir": "$COMPONENTS_DIR/point_neuron_templates"
+ },
+
+ "networks": {
+ "nodes": [],
+
+ "edges": []
+ }
+}
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/utils/scripts/sonata.simulation_config.json b/bmtk-vb/build/lib/bmtk/utils/scripts/sonata.simulation_config.json
new file mode 100644
index 0000000..c619d29
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/scripts/sonata.simulation_config.json
@@ -0,0 +1,31 @@
+{
+ "manifest": {
+ "$BASE_DIR": "%%BASE_DIR",
+ "$OUTPUT_DIR": "$BASE_DIR/output"
+ },
+
+ "target_simulator":"%%TARGET_SIMULATOR%%",
+
+ "run": {
+ "tstop": 0.0,
+ "dt": 0.1
+ },
+
+ "conditions": {
+ "celsius": 34.0
+ },
+
+ "inputs": {},
+
+ "reports": {},
+
+ "output": {
+ "log_file": "log.txt",
+ "output_dir": "${OUTPUT_DIR}",
+ "spikes_file": "spikes.h5",
+ "spikes_file_csv": "spikes.csv",
+ "overwrite_output_dir": true
+ },
+
+ "network": "./circuit_config.json"
+}
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/utils/sim_setup.py b/bmtk-vb/build/lib/bmtk/utils/sim_setup.py
new file mode 100644
index 0000000..8301fb5
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/sim_setup.py
@@ -0,0 +1,443 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import shutil
+import json
+import h5py
+import re
+from subprocess import call
+from optparse import OptionParser
+from collections import OrderedDict
+
+# Order of the different sections of the config.json. Any non-listed items will be placed at the end of the config
+config_order = [
+ 'manifest',
+ 'target_simulator',
+ 'run',
+ 'conditions',
+ 'inputs',
+ 'components',
+ 'output',
+ 'reports',
+ 'networks'
+]
+
+local_path = os.path.dirname(os.path.realpath(__file__))
+scripts_path = os.path.join(local_path, 'scripts')
+
+'''
+order_lookup = {k: i for i, k in enumerate(config_order)}
+def sort_config_keys(ckey):
+ print(ckey)
+ exit()
+'''
+
+def get_network_block(circuit_config, network_dir):
+ net_nodes = {}
+ net_edges = {}
+ for f in os.listdir(network_dir):
+ if not os.path.isfile(os.path.join(network_dir, f)) or f.startswith('.'):
+ continue
+
+ if '_nodes' in f:
+ net_name = f[:f.find('_nodes')]
+ nodes_dict = net_nodes.get(net_name, {})
+ nodes_dict['nodes_file'] = os.path.join('$NETWORK_DIR', f)
+ net_nodes[net_name] = nodes_dict
+
+ elif '_node_types' in f:
+ net_name = f[:f.find('_node_types')]
+ nodes_dict = net_nodes.get(net_name, {})
+ nodes_dict['node_types_file'] = os.path.join('$NETWORK_DIR', f)
+ net_nodes[net_name] = nodes_dict
+
+ elif '_edges' in f:
+ net_name = f[:f.find('_edges')]
+ edges_dict = net_edges.get(net_name, {})
+ edges_dict['edges_file'] = os.path.join('$NETWORK_DIR', f)
+ try:
+ edges_h5 = h5py.File(os.path.join(network_dir, f), 'r')
+ edges_dict['target'] = edges_h5['edges']['target_gid'].attrs['network']
+ edges_dict['source'] = edges_h5['edges']['source_gid'].attrs['network']
+ except Exception as e:
+ pass
+
+ net_edges[net_name] = edges_dict
+
+ elif '_edge_types' in f:
+ net_name = f[:f.find('_edge_types')]
+ edges_dict = net_edges.get(net_name, {})
+ edges_dict['edge_types_file'] = os.path.join('$NETWORK_DIR', f)
+ net_edges[net_name] = edges_dict
+
+ else:
+ print('Unknown file {}. Will have to enter by hand'.format(f))
+
+ for _, sect in net_nodes.items():
+ circuit_config['networks']['nodes'].append(sect)
+
+ for _, sect in net_edges.items():
+ circuit_config['networks']['edges'].append(sect)
+
+
+def build_components(circuit_config, components_path, scripts_path, with_examples):
+ for c_name, c_dir in circuit_config['components'].items():
+ dir_name = c_dir.replace('$COMPONENTS_DIR/', '')
+ dir_path = os.path.join(components_path, dir_name)
+
+ # create component directory
+ if not os.path.exists(dir_path):
+ os.makedirs(dir_path)
+
+ # Copy in files from scripts//
+ scripts_dir = os.path.join(scripts_path, dir_name)
+ if with_examples and os.path.isdir(scripts_dir):
+ shutil.rmtree(dir_path)
+ shutil.copytree(scripts_dir, dir_path)
+
+
+def build_circuit_env(base_dir, network_dir, components_dir, simulator, with_examples):
+ simulator_path = os.path.join(scripts_path, simulator)
+
+ circuit_config = json.load(open(os.path.join(scripts_path, 'sonata.circuit_config.json')))
+ circuit_config['manifest']['$BASE_DIR'] = base_dir if base_dir == '.' else os.path.abspath(base_dir)
+ circuit_config['manifest']['$COMPONENTS_DIR'] = '$BASE_DIR/{}'.format(components_dir)
+
+ # Try to figure out the $NETWORK_DIR
+ if network_dir is None:
+ network_path = ''
+ if os.path.isabs(network_dir):
+ # In case network_dir is an absolute path
+ network_path = network_dir
+ elif os.path.abspath(network_dir).startswith(os.path.abspath(base_dir)):
+ # If network_dir is in a subdir of base_dir then NETWORK_DIR=$BASE_DIR/path/to/network
+ network_path = os.path.abspath(network_dir).replace(os.path.abspath(base_dir), '$BASE_DIR')
+ else:
+ # if network_dir exists outside of the base_dir just reference the absolute path
+ network_path = os.path.abspath(network_dir)
+
+ circuit_config['manifest']['$NETWORK_DIR'] = network_path
+
+ # Initialize the components directories
+ build_components(circuit_config, os.path.join(base_dir, components_dir), simulator_path, with_examples)
+
+ # Parse the network directory
+ get_network_block(circuit_config, network_dir)
+
+ return circuit_config
+
+
+def build_simulation_env(base_dir, target_simulator, tstop, dt, reports):
+ simulation_config = json.load(open(os.path.join(scripts_path, 'sonata.simulation_config.json')))
+ simulation_config['manifest']['$BASE_DIR'] = base_dir if base_dir == '.' else os.path.abspath(base_dir)
+ simulation_config['target_simulator'] = target_simulator
+ simulation_config['run']['tstop'] = tstop
+ simulation_config['run']['dt'] = dt
+
+ if reports is not None:
+ for report_name, report_params in reports.items():
+ simulation_config['reports'][report_name] = report_params
+
+ return simulation_config
+
+
+def copy_config(base_dir, json_dict, config_file_name):
+ with open(os.path.join(base_dir, config_file_name), 'w') as outfile:
+ ordered_dict = OrderedDict(sorted(json_dict.items(),
+ key=lambda s: config_order.index(s[0]) if s[0] in config_order else 100))
+ json.dump(ordered_dict, outfile, indent=2)
+
+
+def copy_run_script(base_dir, simulator, run_script):
+ simulator_path = os.path.join(scripts_path, simulator)
+ shutil.copy(os.path.join(simulator_path, run_script), os.path.join(base_dir, run_script))
+
+
+def build_env_pointnet(base_dir='.', network_dir=None, reports=None, with_examples=True, tstop=1000.0, dt=0.001, **args):
+ simulator='pointnet'
+ target_simulator='NEST'
+ components_dir='point_components'
+
+ # Copy run script
+ copy_run_script(base_dir=base_dir, simulator=simulator, run_script='run_{}.py'.format(simulator))
+
+ # Build circuit_config and componenets directory
+ circuit_config = build_circuit_env(base_dir=base_dir, network_dir=network_dir, components_dir=components_dir,
+ simulator=simulator, with_examples=with_examples)
+ copy_config(base_dir, circuit_config, 'circuit_config.json')
+
+ simulation_config = build_simulation_env(base_dir=base_dir, target_simulator=target_simulator, tstop=tstop, dt=dt,
+ reports=reports)
+ copy_config(base_dir, simulation_config, 'simulation_config.json')
+
+
+def build_env_bionet(base_dir='.', network_dir=None, reports=None, with_examples=True, tstop=1000.0, dt=0.001,
+ compile_mechanisms=True, **args):
+ simulator='bionet'
+ target_simulator='NEURON'
+ components_dir='biophys_components'
+
+ # Copy run script
+ copy_run_script(base_dir=base_dir, simulator=simulator, run_script='run_{}.py'.format(simulator))
+
+ # Build circuit_config and componenets directory
+ circuit_config = build_circuit_env(base_dir=base_dir, network_dir=network_dir, components_dir=components_dir,
+ simulator=simulator, with_examples=with_examples)
+ copy_config(base_dir, circuit_config, 'circuit_config.json')
+ if compile_mechanisms:
+ cwd = os.getcwd()
+ os.chdir(os.path.join(base_dir, components_dir, 'mechanisms')) # circuit_config['components']['mechanisms_dir'])
+ try:
+ print(os.getcwd())
+ call(['nrnivmodl', 'modfiles'])
+ except Exception as e:
+ print('Was unable to compile mechanism in {}'.format(circuit_config['components']['mechanisms_dir']))
+ # print e.message
+ os.chdir(cwd)
+
+ # Build simulation config
+ simulation_config = build_simulation_env(base_dir=base_dir, target_simulator=target_simulator, tstop=tstop, dt=dt,
+ reports=reports)
+ simulation_config['run']['dL'] = args.get('dL', 20.0)
+ simulation_config['run']['spike_threshold'] = args.get('spike_threshold', -15.0)
+ simulation_config['run']['nsteps_block'] = args.get('nsteps_block', 5000)
+ simulation_config['conditions']['v_init'] = args.get('v_init', -80.0)
+ copy_config(base_dir, simulation_config, 'simulation_config.json')
+
+
+def build_env_popnet(base_dir='.', network_dir=None, reports=None, with_examples=True, tstop=1000.0, dt=0.001, **args):
+ simulator='popnet'
+ target_simulator='DiPDE'
+ components_dir='pop_components'
+
+ # Copy run script
+ copy_run_script(base_dir=base_dir, simulator=simulator, run_script='run_{}.py'.format(simulator))
+
+ # Build circuit_config and componenets directory
+ circuit_config = build_circuit_env(base_dir=base_dir, network_dir=network_dir, components_dir=components_dir,
+ simulator=simulator, with_examples=with_examples)
+ circuit_config['components']['population_models_dir'] = '$COMPONENTS_DIR/population_models'
+ # population_models_dir = os.path.join(base_dir, components_dir, 'population_models')
+ if with_examples:
+ models_dir = os.path.join(base_dir, components_dir, 'population_models')
+ if os.path.exists(models_dir):
+ shutil.rmtree(models_dir)
+ shutil.copytree(os.path.join(scripts_path, simulator, 'population_models'), models_dir)
+
+ copy_config(base_dir, circuit_config, 'circuit_config.json')
+
+ # Build simulation config
+ simulation_config = build_simulation_env(base_dir=base_dir, target_simulator=target_simulator, tstop=tstop, dt=dt,
+ reports=reports)
+ # PopNet doesn't produce spike files so instead need to replace them with rates files
+ for output_key in simulation_config['output'].keys():
+ if output_key.startswith('spikes'):
+ del simulation_config['output'][output_key]
+ # simulation_config['output']['rates_file_csv'] = 'firing_rates.csv'
+ simulation_config['output']['rates_file'] = 'firing_rates.csv'
+
+ copy_config(base_dir, simulation_config, 'simulation_config.json')
+
+
+"""
+def build_env_bionet(base_dir='.', run_time=0.0, with_config=True, network_dir=None, with_cell_types=True,
+ compile_mechanisms=True, reports=None):
+ local_path = os.path.dirname(os.path.realpath(__file__))
+ scripts_path = os.path.join(local_path, 'scripts', 'bionet')
+
+ components_dir = os.path.join(base_dir, 'components')
+ component_paths = {
+ 'morphologies_dir': os.path.join(components_dir, 'biophysical', 'morphology'),
+ 'biophysical_models_dir': os.path.join(components_dir, 'biophysical', 'electrophysiology'),
+ 'mechanisms_dir': os.path.join(components_dir, 'mechanisms'),
+ 'point_models_dir': os.path.join(components_dir, 'intfire'),
+ 'synaptic_models_dir': os.path.join(components_dir, 'synaptic_models'),
+ 'templates_dir': os.path.join(components_dir, 'hoc_templates')
+ }
+ for path in component_paths.values():
+ if not os.path.exists(path):
+ os.makedirs(path)
+
+ if with_cell_types:
+ shutil.rmtree(component_paths['templates_dir'])
+ shutil.copytree(os.path.join(scripts_path, 'hoc_templates'), component_paths['templates_dir'])
+
+ shutil.rmtree(component_paths['mechanisms_dir'])
+ shutil.copytree(os.path.join(scripts_path, 'mechanisms'), component_paths['mechanisms_dir'])
+
+ shutil.rmtree(component_paths['synaptic_models_dir'])
+ shutil.copytree(os.path.join(scripts_path, 'synaptic_models'), component_paths['synaptic_models_dir'])
+
+ shutil.rmtree(component_paths['point_models_dir'])
+ shutil.copytree(os.path.join(scripts_path, 'intfire'), component_paths['point_models_dir'])
+
+ if compile_mechanisms:
+ cwd = os.getcwd()
+ os.chdir(component_paths['mechanisms_dir'])
+ try:
+ print(os.getcwd())
+ call(['nrnivmodl', 'modfiles'])
+ except Exception as e:
+ print('Was unable to compile mechanism in {}'.format(component_paths['mechanisms_dir']))
+ # print e.message
+ os.chdir(cwd)
+
+ shutil.copy(os.path.join(scripts_path, 'run_bionet.py'), os.path.join(base_dir, 'run_bionet.py'))
+
+ if with_config:
+ config_json = json.load(open(os.path.join(scripts_path, 'default_config.json')))
+ config_json['manifest']['$BASE_DIR'] = os.path.abspath(base_dir)
+ config_json['manifest']['$COMPONENTS_DIR'] = os.path.join('${BASE_DIR}', 'components')
+ config_json['run']['tstop'] = run_time
+
+ if network_dir is not None:
+ config_json['manifest']['$NETWORK_DIR'] = os.path.abspath(network_dir)
+
+ net_nodes = {}
+ net_edges = {}
+ for f in os.listdir(network_dir):
+ if not os.path.isfile(os.path.join(network_dir, f)) or f.startswith('.'):
+ continue
+
+ if '_nodes' in f:
+ net_name = f[:f.find('_nodes')]
+ nodes_dict = net_nodes.get(net_name, {'name': net_name})
+ nodes_dict['nodes_file'] = os.path.join('${NETWORK_DIR}', f)
+ net_nodes[net_name] = nodes_dict
+
+ elif '_node_types' in f:
+ net_name = f[:f.find('_node_types')]
+ nodes_dict = net_nodes.get(net_name, {'name': net_name})
+ nodes_dict['node_types_file'] = os.path.join('${NETWORK_DIR}', f)
+ net_nodes[net_name] = nodes_dict
+
+ elif '_edges' in f:
+ net_name = f[:f.find('_edges')]
+ edges_dict = net_edges.get(net_name, {'name': net_name})
+ edges_dict['edges_file'] = os.path.join('${NETWORK_DIR}', f)
+ try:
+ edges_h5 = h5py.File(os.path.join(network_dir, f), 'r')
+ edges_dict['target'] = edges_h5['edges']['target_gid'].attrs['network']
+ edges_dict['source'] = edges_h5['edges']['source_gid'].attrs['network']
+ except Exception as e:
+ pass
+
+ net_edges[net_name] = edges_dict
+
+ elif '_edge_types' in f:
+ net_name = f[:f.find('_edge_types')]
+ edges_dict = net_edges.get(net_name, {'name': net_name})
+ edges_dict['edge_types_file'] = os.path.join('${NETWORK_DIR}', f)
+ net_edges[net_name] = edges_dict
+
+ else:
+ print('Unknown file {}. Will have to enter by hand'.format(f))
+
+ for _, sect in net_nodes.items():
+ config_json['networks']['nodes'].append(sect)
+
+ for _, sect in net_edges.items():
+ config_json['networks']['edges'].append(sect)
+
+ if reports is not None:
+ for report_name, report_params in reports.items():
+ config_json['reports'][report_name] = report_params
+
+ ordered_dict = OrderedDict(sorted(config_json.items(),
+ key=lambda s: config_order.index(s[0]) if s[0] in config_order else 100))
+ with open(os.path.join(base_dir, 'config.json'), 'w') as outfile:
+ json.dump(ordered_dict, outfile, indent=2)
+ #json.dump(config_json, outfile, indent=2)
+"""
+
+
+if __name__ == '__main__':
+ def str_list(option, opt, value, parser):
+ setattr(parser.values, option.dest, value.split(','))
+
+ #def int_list(option, opt, value, parser):
+ # setattr(parser.values, option.dest, [int(v) for v in value.split(',')])
+
+ def parse_node_set(option, opt, value, parser):
+ try:
+ setattr(parser.values, option.dest, [int(v) for v in value.split(',')])
+ except ValueError as ve:
+ setattr(parser.values, option.dest, value)
+
+
+ parser = OptionParser(usage="Usage: python -m bmtk.utils.sim_setup [options] bionet|pointnet|popnet|mintnet")
+ parser.add_option('-b', '--base_dir', dest='base_dir', default='.', help='path of environment')
+ parser.add_option('-n', '--network_dir', dest='network_dir', default=None,
+ help="Use an exsting directory with network files.")
+ parser.add_option('-r', '--tstop', type='float', dest='tstop', default=1000.0)
+ parser.add_option('-d', '--dt', type=float, dest='dt', help='simulation time step dt', default=0.001)
+
+ # For membrane report
+ def membrane_report_parser(option, opt, value, parser):
+ parser.values.has_membrane_report = True
+ if ',' in value:
+ try:
+ setattr(parser.values, option.dest, [int(v) for v in value.split(',')])
+ except ValueError as ve:
+ setattr(parser.values, option.dest, value.split(','))
+
+ else:
+ setattr(parser.values, option.dest, value)
+
+ parser.add_option('--membrane_report', dest='has_membrane_report', action='store_true', default=False)
+ parser.add_option('--membrane_report-vars', dest='mem_rep_vars', type='string', action='callback',
+ callback=membrane_report_parser, default=[])
+ parser.add_option('--membrane_report-cells', dest='mem_rep_cells', type='string', action='callback',
+ callback=membrane_report_parser, default='all')
+ # parser.add_option('--membrane_report_file', dest='mem_rep_file', type='string', action='callback',
+ # callback=membrane_report_parser, default='$OUTPUT_DIR/cell_vars.h5')
+ parser.add_option('--membrane_report-sections', dest='mem_rep_secs', type='string', action='callback',
+ callback=membrane_report_parser, default='all')
+
+ options, args = parser.parse_args()
+ reports = {}
+
+ if options.has_membrane_report:
+ reports['membrane_report'] = {
+ 'module': 'membrane_report',
+ 'variable_name': options.mem_rep_vars,
+ 'cells': options.mem_rep_cells,
+ # 'file_name': options.mem_rep_file,
+ 'sections': options.mem_rep_secs,
+ }
+
+ target_sim = args[0].lower() if len(args) == 1 else None
+ if target_sim not in ['bionet', 'popnet', 'pointnet', 'mintnet']:
+ raise Exception('Must specify one target simulator. options: "bionet", pointnet", "popnet" or "mintnet"')
+
+ if target_sim == 'bionet':
+ build_env_bionet(base_dir=options.base_dir, network_dir=options.network_dir, tstop=options.tstop,
+ dt=options.dt, reports=reports)
+
+ elif target_sim == 'pointnet':
+ build_env_pointnet(base_dir=options.base_dir, network_dir=options.network_dir, tstop=options.tstop,
+ dt=options.dt, reports=reports)
+
+ elif target_sim == 'popnet':
+ build_env_popnet(base_dir=options.base_dir, network_dir=options.network_dir, tstop=options.tstop,
+ dt=options.dt, reports=reports)
diff --git a/bmtk-vb/build/lib/bmtk/utils/sonata/__init__.py b/bmtk-vb/build/lib/bmtk/utils/sonata/__init__.py
new file mode 100644
index 0000000..c236de1
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/sonata/__init__.py
@@ -0,0 +1,25 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .edge import Edge, EdgeSet
+from .file import File
+from .node import Node, NodeSet
diff --git a/bmtk-vb/build/lib/bmtk/utils/sonata/column_property.py b/bmtk-vb/build/lib/bmtk/utils/sonata/column_property.py
new file mode 100644
index 0000000..34eaa5a
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/sonata/column_property.py
@@ -0,0 +1,103 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import h5py
+import pandas as pd
+
+
+class ColumnProperty(object):
+ """Representation of a column name and metadata from a hdf5 dataset, csv column, etc.
+
+ """
+ def __init__(self, name, dtype, dimension, nrows=0, attrs=None):
+ self._name = name
+ self._dtype = dtype
+ self._dim = dimension
+ self._nrows = nrows
+ self._attrs = attrs or {}
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def dtype(self):
+ return self._dtype
+
+ @property
+ def dimension(self):
+ return self._dim
+
+ @property
+ def nrows(self):
+ return self._nrows
+
+ @property
+ def attributes(self):
+ return self._attrs
+
+ @classmethod
+ def from_h5(cls, hf_obj, name=None):
+ if isinstance(hf_obj, h5py.Dataset):
+ ds_name = name if name is not None else hf_obj.name.split('/')[-1]
+ ds_dtype = hf_obj.dtype
+
+ # If the dataset shape is in the form "(N, M)" then the dimension is M. If the shape is just "(N)" then the
+ # dimension is just 1
+ dim = 1 if len(hf_obj.shape) < 2 else hf_obj.shape[1]
+ nrows = hf_obj.shape[0]
+ return cls(ds_name, ds_dtype, dim, nrows, attrs=hf_obj.attrs)
+
+ elif isinstance(hf_obj, h5py.Group):
+ columns = []
+ for name, ds in hf_obj.items():
+ if isinstance(ds, h5py.Dataset):
+ columns.append(ColumnProperty.from_h5(ds, name))
+ return columns
+
+ else:
+ raise Exception('Unable to convert hdf5 object {} to a property or list of properties.'.format(hf_obj))
+
+ @classmethod
+ def from_csv(cls, pd_obj, name=None):
+ if isinstance(pd_obj, pd.Series):
+ c_name = name if name is not None else pd_obj.name
+ c_dtype = pd_obj.dtype
+ return cls(c_name, c_dtype, 1)
+
+ elif isinstance(pd_obj, pd.DataFrame):
+ return [cls(name, pd_obj[name].dtype, 1) for name in pd_obj.columns]
+
+ else:
+ raise Exception('Unable to convert pandas object {} to a property or list of properties.'.format(pd_obj))
+
+ def __hash__(self):
+ return hash(self._name)
+
+ def __repr__(self):
+ return '{}'.format(self.name, self.dtype)
+
+ def __eq__(self, other):
+ if isinstance(other, ColumnProperty):
+ return self._name == other._name
+ else:
+ return self._name == other
diff --git a/bmtk-vb/build/lib/bmtk/utils/sonata/config.py b/bmtk-vb/build/lib/bmtk/utils/sonata/config.py
new file mode 100644
index 0000000..fdb97ab
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/sonata/config.py
@@ -0,0 +1,341 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import json
+import re
+import copy
+import datetime
+
+
+class SonataConfig(dict):
+ def __init__(self, *args, **kwargs):
+ super(SonataConfig, self).__init__(*args, **kwargs)
+ self._env_built = False
+
+ @property
+ def run(self):
+ return self['run']
+
+ @property
+ def tstart(self):
+ return self.run.get('tstart', 0.0)
+
+ @property
+ def tstop(self):
+ return self.run['tstop']
+
+ @property
+ def dt(self):
+ return self.run.get('dt', 0.1)
+
+ @property
+ def block_step(self):
+ return self.run.get('nsteps_block', 5000)
+
+ @property
+ def conditions(self):
+ return self['conditions']
+
+ @property
+ def celsius(self):
+ return self.conditions['celsius']
+
+ @property
+ def v_init(self):
+ return self.conditions['v_init']
+
+ @property
+ def path(self):
+ return self['config_path']
+
+ @property
+ def output(self):
+ return self['output']
+
+ @property
+ def output_dir(self):
+ return self.output['output_dir']
+
+ @property
+ def overwrite_output(self):
+ return self.output['overwrite_output_dir']
+
+ @property
+ def log_file(self):
+ return self.output['log_file']
+
+ @property
+ def components(self):
+ return self.get('components', {})
+
+ @property
+ def morphologies_dir(self):
+ return self.components['morphologies_dir']
+
+ @property
+ def synaptic_models_dir(self):
+ return self.components['synaptic_models_dir']
+
+ @property
+ def point_neuron_models_dir(self):
+ return self.components['point_neuron_models_dir']
+
+ @property
+ def mechanisms_dir(self):
+ return self.components['mechanisms_dir']
+
+ @property
+ def biophysical_neuron_models_dir(self):
+ return self.components['biophysical_neuron_models_dir']
+
+ @property
+ def templates_dir(self):
+ return self.components.get('templates_dir', None)
+
+ @property
+ def with_networks(self):
+ return 'networks' in self and len(self.nodes) > 0
+
+ @property
+ def networks(self):
+ return self['networks']
+
+ @property
+ def nodes(self):
+ return self.networks.get('nodes', [])
+
+ @property
+ def edges(self):
+ return self.networks.get('edges', [])
+
+ def copy_to_output(self):
+ copy_config(self)
+
+ @staticmethod
+ def get_validator():
+ raise NotImplementedError
+
+ @classmethod
+ def from_json(cls, config_file, validate=False):
+ validator = cls.get_validator() if validate else None
+ return cls(from_json(config_file, validator))
+
+ @classmethod
+ def from_dict(cls, config_dict, validate=False):
+ validator = cls.get_validator() if validate else None
+ return cls(from_dict(config_dict, validator))
+
+ @classmethod
+ def from_yaml(cls, config_file, validate=False):
+ raise NotImplementedError
+
+ @property
+ def reports(self):
+ return self.get('reports', {})
+
+ @property
+ def inputs(self):
+ return self.get('inputs', {})
+
+ def get_modules(self, module_name):
+ return [report for report in self.reports.values() if report['module'] == module_name]
+
+ def build_env(self):
+ if self._env_built:
+ return
+
+ self.create_output_dir()
+ self.copy_to_output()
+ self._env_built = True
+
+
+def from_json(config_file, validator=None):
+ """Builds and validates a configuration json file.
+
+ :param config_file: File object or path to a json file.
+ :param validator: A SimConfigValidator object to validate json file. Won't validate if set to None
+ :return: A dictionary, verified against json validator and with manifest variables resolved.
+ """
+ if isinstance(config_file, file):
+ conf = json.load(config_file)
+ elif isinstance(config_file, basestring):
+ conf = json.load(open(config_file, 'r'))
+ else:
+ raise Exception('{} is not a file or file path.'.format(config_file))
+
+ # insert file path into dictionary
+ if 'config_path' not in conf:
+ conf['config_path'] = os.path.abspath(config_file)
+
+ # Will resolve manifest variables and validate
+ return from_dict(conf, validator)
+
+
+def from_dict(config_dict, validator=None):
+ """Builds and validates a configuration json dictionary object. Best to directly use from_json when possible.
+
+ :param config_dict: Dictionary object
+ :param validator: A SimConfigValidator object to validate json file. Won't validate if set to None
+ :return: A dictionary, verified against json validator and with manifest variables resolved.
+ """
+ assert(isinstance(config_dict, dict))
+ conf = copy.deepcopy(config_dict) # Since the functions will mutate the dictionary we will copy just-in-case.
+
+ if 'config_path' not in conf:
+ conf['config_path'] = os.path.abspath(__file__)
+
+ # Build the manifest and resolve variables.
+ # TODO: Check that manifest exists
+ manifest = __build_manifest(conf)
+ conf['manifest'] = manifest
+ __recursive_insert(conf, manifest)
+
+ # In our work with Blue-Brain it was agreed that 'network' and 'simulator' parts of config may be split up into
+ # separate files. If this is the case we build each sub-file separately and merge into this one
+ for childconfig in ['network', 'simulation']:
+ if childconfig in conf and isinstance(conf[childconfig], basestring):
+ # Try to resolve the path of the network/simulation config files. If an absolute path isn't used find
+ # the file relative to the current config file. TODO: test if this will work on windows?
+ conf_str = conf[childconfig]
+ conf_path = conf_str if conf_str.startswith('/') else os.path.join(conf['config_path'], conf_str)
+
+ # Build individual json file and merge into parent.
+ child_json = from_json(conf_path)
+ del child_json['config_path'] # we don't want 'config_path' of parent being overwritten.
+ conf.update(child_json)
+
+ # Run the validator
+ if validator is not None:
+ validator.validate(conf)
+
+ return conf
+
+
+def copy_config(conf):
+ """Copy configuration file to different directory, with manifest variables resolved.
+
+ :param conf: configuration dictionary
+ """
+ output_dir = conf["output"]["output_dir"]
+ config_name = os.path.basename(conf['config_path'])
+ output_path = os.path.join(output_dir, config_name)
+ with open(output_path, 'w') as fp:
+ json.dump(conf, fp, indent=2)
+
+
+def __special_variables(conf):
+ """A list of preloaded variables to insert into the manifest, containing things like path to run-time directory,
+ configuration directory, etc.
+ """
+ pre_manifest = dict()
+ pre_manifest['$workingdir'] = os.path.dirname(os.getcwd())
+ if 'config_path' in conf:
+ pre_manifest['$configdir'] = os.path.dirname(conf['config_path']) # path of configuration file
+ pre_manifest['$configfname'] = conf['config_path']
+
+ dt_now = datetime.datetime.now()
+ pre_manifest['$time'] = dt_now.strftime('%H-%M-%S')
+ pre_manifest['$date'] = dt_now.strftime('%Y-%m-%d')
+ pre_manifest['$datetime'] = dt_now.strftime('%Y-%m-%d_%H-%M-%S')
+
+ return pre_manifest
+
+
+def __build_manifest(conf):
+ """Resolves the manifest section and resolve any internal variables"""
+ if 'manifest' not in conf:
+ return __special_variables(conf)
+
+ manifest = conf["manifest"]
+ resolved_manifest = __special_variables(conf)
+ resolved_keys = set()
+ unresolved_keys = set(manifest.keys())
+
+ # No longer using recursion since that can lead to an infinite loop if the person who writes the config file isn't
+ # careful. Also added code to allow for ${VAR} format in-case user wants to user "$.../some_${MODEl}_here/..."
+ while unresolved_keys:
+ for key in unresolved_keys:
+ # Find all variables in manifest and see if they can be replaced by the value in resolved_manifest
+ value = __find_variables(manifest[key], resolved_manifest)
+
+ # If value no longer has variables, and key-value pair to resolved_manifest and remove from unresolved-keys
+ if value.find('$') < 0:
+ resolved_manifest[key] = value
+ resolved_keys.add(key)
+
+ # remove resolved key-value pairs from set, and make sure at every iteration unresolved_keys shrinks to prevent
+ # infinite loops
+ n_unresolved = len(unresolved_keys)
+ unresolved_keys -= resolved_keys
+ if n_unresolved == len(unresolved_keys):
+ msg = "Unable to resolve manifest variables: {}".format(unresolved_keys)
+ raise Exception(msg)
+
+ return resolved_manifest
+
+
+def __recursive_insert(json_obj, manifest):
+ """Loop through the config and substitute the path variables (e.g.: $MY_DIR) with the values from the manifest
+
+ :param json_obj: A json dictionary object that may contain variables needing to be resolved.
+ :param manifest: A dictionary of variable values
+ :return: A new json dictionar config file with variables resolved
+ """
+ if isinstance(json_obj, basestring):
+ return __find_variables(json_obj, manifest)
+
+ elif isinstance(json_obj, list):
+ new_list = []
+ for itm in json_obj:
+ new_list.append(__recursive_insert(itm, manifest))
+ return new_list
+
+ elif isinstance(json_obj, dict):
+ for key, val in json_obj.items():
+ if key == 'manifest':
+ continue
+ json_obj[key] = __recursive_insert(val, manifest)
+
+ return json_obj
+
+ else:
+ return json_obj
+
+
+def __find_variables(json_str, manifest):
+ """Replaces variables (i.e. $VAR, ${VAR}) with their values from the manifest.
+
+ :param json_str: a json string that may contain none, one or multiple variable
+ :param manifest: dictionary of variable lookup values
+ :return: json_str with resolved variables. Won't resolve variables that don't exist in manifest.
+ """
+ variables = [m for m in re.finditer('\$\{?[\w]+\}?', json_str)]
+ for var in variables:
+ var_lookup = var.group()
+ if var_lookup.startswith('${') and var_lookup.endswith('}'):
+ # replace ${VAR} with $VAR
+ var_lookup = "$" + var_lookup[2:-1]
+ if var_lookup in manifest:
+ json_str = json_str.replace(var.group(), manifest[var_lookup])
+
+ return json_str
diff --git a/bmtk-vb/build/lib/bmtk/utils/sonata/edge.py b/bmtk-vb/build/lib/bmtk/utils/sonata/edge.py
new file mode 100644
index 0000000..435dd02
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/sonata/edge.py
@@ -0,0 +1,90 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+class EdgeSet(object):
+ def __init__(self, edge_ids, population):
+ self._edge_ids = edge_ids
+ self._population = population
+ self._n_edges = len(self._edge_ids)
+ self.__itr = 0
+
+ def __iter__(self):
+ self.__itr = 0
+ return self
+
+ def next(self):
+ if self.__itr >= self._n_edges:
+ raise StopIteration
+
+ next_edge = self._population.iloc(self._edge_ids[self.__itr])
+ self.__itr += 1
+ return next_edge
+
+
+class Edge(object):
+ def __init__(self, src_node_id, trg_node_id, source_pop, target_pop, group_id, group_props, edge_types_props):
+ self._src_node_id = src_node_id
+ self._trg_node_id = trg_node_id
+ self._source_population = source_pop
+ self._target_population = target_pop
+ self._group_props = group_props
+ self._group_id = group_id
+ self._edge_type_props = edge_types_props
+
+ @property
+ def source_node_id(self):
+ return self._src_node_id
+
+ @property
+ def target_node_id(self):
+ return self._trg_node_id
+
+ @property
+ def source_population(self):
+ return self._source_population
+
+ @property
+ def target_population(self):
+ return self._target_population
+
+ @property
+ def group_id(self):
+ return self._group_id
+
+ @property
+ def edge_type_id(self):
+ return self._edge_type_props['edge_type_id']
+
+ @property
+ def dynamics_params(self):
+ raise NotImplementedError
+
+ def __getitem__(self, prop_key):
+ if prop_key in self._group_props:
+ return self._group_props[prop_key]
+ elif prop_key in self._edge_type_props:
+ return self._edge_type_props[prop_key]
+ else:
+ raise KeyError('Property {} not found in edge.'.format(prop_key))
+
+ def __contains__(self, prop_key):
+ return prop_key in self._group_props or prop_key in self._edge_type_props
\ No newline at end of file
diff --git a/bmtk-vb/build/lib/bmtk/utils/sonata/file.py b/bmtk-vb/build/lib/bmtk/utils/sonata/file.py
new file mode 100644
index 0000000..d70f66a
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/sonata/file.py
@@ -0,0 +1,124 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from . import utils
+from .file_root import NodesRoot, EdgesRoot
+
+
+class File(object):
+ def __init__(self, data_files, data_type_files, mode='r', gid_table=None, require_magic=True):
+ if mode != 'r':
+ raise Exception('Currently only read mode is supported.')
+
+ self._data_files = utils.listify(data_files)
+ self._data_type_files = utils.listify(data_type_files)
+
+ # Open and check HDF5 file(s)
+ self._h5_file_handles = [utils.load_h5(f, mode) for f in self._data_files]
+ if require_magic:
+ map(utils.check_magic, self._h5_file_handles) # Check magic attribute in h5 files
+
+ # Check version number
+ avail_versions = set(map(utils.get_version, self._h5_file_handles))
+ if len(avail_versions) == 1:
+ self._version = list(avail_versions)[0]
+ elif len(avail_versions) > 1:
+ # TODO: log as warning
+ print('Warning: Passing in multiple hdf5 files of different version')
+ self._version = ','.join(avail_versions)
+ else:
+ self._version = utils.VERSION_NA
+
+ self._csv_file_handles = [(f, utils.load_csv(f)) for f in self._data_type_files]
+
+ self._has_nodes = False
+ self._nodes = None # /nodes object
+ self._nodes_groups = [] # list of all hdf5 /nodes group
+ self._node_types_dataframes = [] # list of all csv node-types dataframe
+
+ self._has_edges = False
+ self._edges = None # /edges object
+ self._edges_groups = [] # list of all hdf5 /edges group
+ self._edge_types_dataframes = [] # list of csv edge-types dataframes
+
+ # for multiple inputs sort into edge files and node files
+ self._sort_types_file()
+ self._sort_h5_files()
+
+ if not (self._has_nodes or self._has_edges):
+ raise Exception('Could not find neither nodes nor edges for the given file(s).')
+
+ if self._has_nodes:
+ self._nodes = NodesRoot(nodes=self._nodes_groups, node_types=self._node_types_dataframes, gid_table=gid_table)
+
+ if self._has_edges:
+ self._edges = EdgesRoot(edges=self._edges_groups, edge_types=self._edge_types_dataframes)
+
+ @property
+ def nodes(self):
+ return self._nodes
+
+ @property
+ def has_nodes(self):
+ return self._has_nodes
+
+ @property
+ def edges(self):
+ return self._edges
+
+ @property
+ def has_edges(self):
+ return self._has_edges
+
+ @property
+ def version(self):
+ return self._version
+
+ def _sort_types_file(self):
+ # TODO: node/edge type_id columnn names should not be hardcoded
+ for filename, df in self._csv_file_handles:
+ has_node_type_id = 'node_type_id' in df.columns
+ has_edge_type_id = 'edge_type_id' in df.columns
+ if has_node_type_id and has_edge_type_id:
+ # TODO: users may be creating their own dataframe and thus not have a filename
+ raise Exception('types file {} has both node_types_id and edge_types_id column.'.format(filename))
+ elif has_node_type_id:
+ self._node_types_dataframes.append(df)
+ elif has_edge_type_id:
+ self._edge_types_dataframes.append(df)
+ else:
+ # TODO: if strict this should fail immedietely
+ print('Warning: Could not determine if file {} was an edge-types or node-types file. Ignoring'.format(filename))
+
+ def _sort_h5_files(self):
+ for h5 in self._h5_file_handles:
+ has_nodes = '/nodes' in h5
+ has_edges = '/edges' in h5
+ if not (has_nodes or has_edges):
+ print('File {} contains neither nodes nor edges. Ignoring'.format(h5.filename))
+ else:
+ if has_nodes:
+ self._nodes_groups.append(h5)
+ self._has_nodes = True
+ if has_edges:
+ self._edges_groups.append(h5)
+ self._has_edges = True
diff --git a/bmtk-vb/build/lib/bmtk/utils/sonata/file_root.py b/bmtk-vb/build/lib/bmtk/utils/sonata/file_root.py
new file mode 100644
index 0000000..071e88c
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/sonata/file_root.py
@@ -0,0 +1,301 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import sys
+
+import h5py
+import pandas as pd
+import numpy as np
+
+from . import utils
+from .population import NodePopulation, EdgePopulation
+from .types_table import NodeTypesTable, EdgeTypesTable
+
+
+class FileRoot(object):
+ """Base class for both /nodes and /edges root group in h5 file"""
+ def __init__(self, root_name, h5_files, h5_mode, csv_files):
+ """
+ :param root_name: should either be 'nodes' or 'edges'
+ :param h5_files: file (or list of files) containing nodes/edges
+ :param h5_mode: currently only supporting 'r' mode in h5py
+ :param csv_files: file (or list of files) containing node/edge types
+ """
+ self._root_name = root_name
+ self._h5_handles = [utils.load_h5(f, h5_mode) for f in utils.listify(h5_files)]
+ self._csv_handles = [(f, utils.load_csv(f)) for f in utils.listify(csv_files)]
+
+ # merge and create a table of the types table(s)
+ self._types_table = None
+ self._build_types_table()
+
+ # population_name->h5py.Group table (won't instantiate the population)
+ self._populations_groups = {}
+ self._store_groups()
+
+ # A map between population_name -> Population object. Population objects aren't created until called, in the
+ # case user wants to split populations among MPI nodes (instantiation will create node/edge indicies and other
+ # overhead).
+ self._populations_cache = {}
+
+ self.check_format()
+
+ @property
+ def root_name(self):
+ return self._root_name
+
+ @property
+ def population_names(self):
+ return list(self._populations_groups.keys())
+
+ @property
+ def populations(self):
+ return [self[name] for name in self.population_names]
+
+ @property
+ def types_table(self):
+ return self._types_table
+
+ @types_table.setter
+ def types_table(self, types_table):
+ self._types_table = types_table
+
+ def _build_types_table(self):
+ raise NotImplementedError
+
+ def _store_groups(self):
+ """Create a map between group population to their h5py.Group handle"""
+ for h5handle in self._h5_handles:
+ assert(self.root_name in h5handle.keys())
+ for pop_name, pop_group in h5handle[self._root_name].items():
+ if pop_name in self._populations_groups:
+ raise Exception('Multiple {} populations with name {}.'.format(self._root_name, pop_name))
+ self._populations_groups[pop_name] = pop_group
+
+ def _build_population(self, pop_name, pop_group):
+ raise NotImplementedError
+
+ def get_population(self, population_name, default=None):
+ """Return a population group object based on population's name"""
+ if population_name in self:
+ return self[population_name]
+ else:
+ # need this for EdgeRoot.get_populations
+ return default
+
+ def check_format(self):
+ if len(self._h5_handles) == 0:
+ raise Exception('No {} hdf5 files specified.'.format(self.root_name))
+
+ if len(self._csv_handles) == 0:
+ raise Exception('No {} types csv files specified.'.format(self.root_name))
+
+ def __contains__(self, population_name):
+ # TODO: Add condition if user passes in io.Population object
+ return population_name in self.population_names
+
+ def __getitem__(self, population_name):
+ if population_name not in self:
+ raise Exception('{} does not contain a population with name {}.'.format(self.root_name, population_name))
+
+ if population_name in self._populations_cache:
+ return self._populations_cache[population_name]
+ else:
+ h5_grp = self._populations_groups[population_name]
+ pop_obj = self._build_population(population_name, h5_grp)
+ self._populations_cache[population_name] = pop_obj
+ return pop_obj
+
+
+class NodesRoot(FileRoot):
+ def __init__(self, nodes, node_types, mode='r', gid_table=None):
+ super(NodesRoot, self).__init__('nodes', h5_files=nodes, h5_mode=mode, csv_files=node_types)
+
+ # load the gid <--> (node_id, population) map if specified.
+ self._gid_table = gid_table
+ self._gid_table_groupby = {}
+ self._has_gids = False
+ # TODO: Should we allow gid-table to be built into '/nodes' h5 groups, or must it always be a separat file?
+ if gid_table is not None:
+ self.set_gid_table(gid_table)
+
+ @property
+ def has_gids(self):
+ return self._has_gids
+
+ @property
+ def node_types_table(self):
+ return self.types_table
+
+ def set_gid_table(self, gid_table, force=False):
+ """Adds a map from a gids <--> (node_id, population) based on specification.
+
+ :param gid_table: An h5 file/group containing map specifications
+ :param force: Set to true to have it overwrite any exsiting gid table (default False)
+ """
+ assert(gid_table is not None)
+ if self.has_gids and not force:
+ raise Exception('gid table already exists (use force=True to overwrite)')
+
+ self._gid_table = utils.load_h5(gid_table, 'r')
+ # TODO: validate that the correct columns/dtypes exists.
+ gid_df = pd.DataFrame()
+ gid_df['gid'] = pd.Series(data=self._gid_table['gid'], dtype=self._gid_table['gid'].dtype)
+ gid_df['node_id'] = pd.Series(data=self._gid_table['node_id'], dtype=self._gid_table['node_id'].dtype)
+ gid_df['population'] = pd.Series(data=self._gid_table['population'])
+ population_names_ds = self._gid_table['population_names']
+ for pop_id, subset in gid_df.groupby(by='population'):
+ pop_name = population_names_ds[pop_id]
+ self._gid_table_groupby[pop_name] = subset
+ self._has_gids = True
+
+ def generate_gids(self, file_name, gids=None, force=False):
+ """Creates a gid <--> (node_id, population) table based on sonnet specifications.
+
+ Generating gids will take some time and so not recommend to call this during the simulation. Instead save
+ the file to the disk and pass in h5 file during the simulation (using gid_table parameter). In fact if you're
+ worried about efficeny don't use this method.
+
+ :param file_name: Name of h5 file to save gid map to.
+ :param gids: rule/list of gids to use
+ :param force: set to true to overwrite existing gid map (default False).
+ """
+
+ # TODO: This is very inefficent, fix (although not a priority as this function should be called sparingly)
+ # TODO: Allow users to pass in a list/function to determine gids
+ # TODO: We should use an enumerated lookup table for population ds instead of storing strings
+ # TODO: Move this to a utils function rather than a File
+ if self.has_gids and not force:
+ raise Exception('Nodes already have a gid table. Use force=True to overwrite existing gids.')
+
+ dir_name = os.path.dirname(os.path.abspath(file_name))
+ if not os.path.exists(dir_name):
+ os.makedirs(dir_name)
+
+ with h5py.File(file_name, 'w') as h5:
+ # TODO: should we use mode 'x', or give an option to overwrite existing files
+ n_nodes = 0
+ ascii_len = 0 # store max population name for h5 fixed length strings
+ # Find population names and the total size of every population
+ for node_pop in self.populations:
+ n_nodes += len(node_pop)
+ name_nchars = len(node_pop.name)
+ ascii_len = ascii_len if ascii_len >= name_nchars else name_nchars
+
+ # node_id and gid datasets should just be unsigned integers
+ h5.create_dataset(name='gid', shape=(n_nodes,), dtype=np.uint64)
+ h5.create_dataset(name='node_id', shape=(n_nodes,), dtype=np.uint64)
+ # TODO: determine population precisions from num of populations
+ h5.create_dataset(name='population', shape=(n_nodes,), dtype=np.uint16)
+
+ # Create a lookup table for pop-name
+ pop_name_list = [pname for pname in self.population_names]
+ if utils.using_py3:
+ dt = h5py.special_dtype(vlen=str) # python 3
+ else:
+ dt = h5py.special_dtype(vlen=unicode) # python 2
+ h5.create_dataset(name='population_names', shape=(len(pop_name_list),), dtype=dt)
+ # No clue why but just passing in the data during create_dataset doesn't work h5py
+ for i, n in enumerate(pop_name_list):
+ h5['population_names'][i] = n
+
+ # write each (gid, node_id, population)
+ indx = 0
+ for node_pop in self.populations:
+ # TODO: Block write if special gid generator isn't being used
+ # TODO: Block write populations at least
+ pop_name = node_pop.name # encode('ascii', 'ignore')
+ pop_id = pop_name_list.index(pop_name)
+ for node in node_pop:
+ h5['node_id'][indx] = node.node_id
+ h5['population'][indx] = pop_id
+ h5['gid'][indx] = indx
+ indx += 1
+
+ # pass gid table to current nodes
+ self.set_gid_table(h5)
+
+ def _build_types_table(self):
+ self.types_table = NodeTypesTable()
+ for _, csvhandle in self._csv_handles:
+ self.types_table.add_table(csvhandle)
+
+ def _build_population(self, pop_name, pop_group):
+ return NodePopulation(pop_name, pop_group, self.node_types_table)
+
+ def __getitem__(self, population_name):
+ # If their is a gids map then we must pass it into the population
+ pop_obj = super(NodesRoot, self).__getitem__(population_name)
+ if self.has_gids and (not pop_obj.has_gids) and (population_name in self._gid_table_groupby):
+ pop_obj.add_gids(self._gid_table_groupby[population_name])
+
+ return pop_obj
+
+
+class EdgesRoot(FileRoot):
+ def __init__(self, edges, edge_types, mode='r'):
+ super(EdgesRoot, self).__init__(root_name='edges', h5_files=edges, h5_mode=mode, csv_files=edge_types)
+
+
+ @property
+ def edge_types_table(self):
+ return self.types_table
+
+ def get_populations(self, name=None, source=None, target=None):
+ """Find all populations with matching criteria, either using the population name (which will return a list
+ of size 0 or 1) or based on the source/target population.
+
+ To return a list of all populations just use populations() method
+
+ :param name: (str) name of population
+ :param source: (str or NodePopulation) returns edges with nodes coming from matching source-population
+ :param target: (str or NodePopulation) returns edges with nodes coming from matching target-population
+ :return: A (potential empty) list of EdgePopulation objects filter by criteria.
+ """
+ assert((name is not None) ^ (source is not None or target is not None))
+ if name is not None:
+ return [self[name]]
+
+ else:
+ # TODO: make sure groups aren't built unless they are a part of the results
+ selected_pops = self.population_names
+ if source is not None:
+ # filter out only edges with given source population
+ source = source.name if isinstance(source, NodePopulation) else source
+ selected_pops = [name for name in selected_pops
+ if EdgePopulation.get_source_population(self._populations_groups[name]) == source]
+ if target is not None:
+ # filter out by target population
+ target = target.name if isinstance(target, NodePopulation) else target
+ selected_pops = [name for name in selected_pops
+ if EdgePopulation.get_target_population(self._populations_groups[name]) == target]
+
+ return [self[name] for name in selected_pops]
+
+ def _build_types_table(self):
+ self.types_table = EdgeTypesTable()
+ for _, csvhandle in self._csv_handles:
+ self.edge_types_table.add_table(csvhandle)
+
+ def _build_population(self, pop_name, pop_group):
+ return EdgePopulation(pop_name, pop_group, self.edge_types_table)
diff --git a/bmtk-vb/build/lib/bmtk/utils/sonata/group.py b/bmtk-vb/build/lib/bmtk/utils/sonata/group.py
new file mode 100644
index 0000000..4264d45
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/sonata/group.py
@@ -0,0 +1,416 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import pandas as pd
+
+from .column_property import ColumnProperty
+from .node import Node, NodeSet
+from .edge import Edge, EdgeSet
+
+
+class Group(object):
+ """A container containig a node/edge population groups.
+
+ A node or edge population will have one or more groups, each having a unique identifier. Each group shared the same
+ columns and datatypes, thus each group is essentially a different model.
+ """
+
+ def __init__(self, group_id, h5_group, parent):
+ self._group_id = int(group_id)
+ self._parent = parent
+ self._types_table = parent.types_table
+ self._h5_group = h5_group
+ self._types_index_col = self._types_table.index_column_name
+
+ self._group_columns = ColumnProperty.from_h5(h5_group)
+ # TODO: combine group_columns, group_column_names and group_columns_map, doesn't need to be 3 structures
+ self._group_column_map = {col.name: col for col in self._group_columns}
+ self._group_column_names = set(col.name for col in self._group_columns)
+ self._group_table = {prop: h5_group[prop.name] for prop in self._group_columns}
+ self._ncolumns = len(self._group_columns)
+
+ self._all_columns = self._group_columns + self._types_table.columns
+ self._all_column_names = set(col.name for col in self._all_columns)
+
+ self._nrows = 0 # number of group members
+
+ # For storing dynamics_params subgroup (if it exists)
+ self._has_dynamics_params = 'dynamics_params' in self._h5_group and len(self._h5_group['dynamics_params']) > 0
+ self._dynamics_params_columns = []
+
+ # An index of all the rows in parent population that map onto a member of this group
+ self._parent_indicies = None # A list of parent rows indicies
+ self._parent_indicies_built = False
+
+ self.check_format()
+
+ @property
+ def group_id(self):
+ return self._group_id
+
+ @property
+ def has_dynamics_params(self):
+ return False
+
+ @property
+ def columns(self):
+ return self._group_columns
+
+ @property
+ def group_columns(self):
+ return self._group_columns
+
+ @property
+ def all_columns(self):
+ return self._all_columns
+
+ @property
+ def has_gids(self):
+ return self._parent.has_gids
+
+ @property
+ def parent(self):
+ return self._parent
+
+ def get_dataset(self, column_name):
+ return self._group_table[column_name]
+
+ def column(self, column_name, group_only=False):
+ if column_name in self._group_column_map:
+ return self._group_column_map[column_name]
+ elif not group_only and column_name in self._types_table.columns:
+ return self._types_table.column(column_name)
+ else:
+ return KeyError
+
+ def check_format(self):
+ # Check that all the properties have the same number of rows
+ col_counts = [col.nrows for col in self._group_columns + self._dynamics_params_columns]
+ if len(set(col_counts)) > 1:
+ # TODO: Would be nice to warn user which dataset have different size
+ raise Exception('properties in {}/{} have different ranks'.format(self._parent.name, self._group_id))
+ elif len(set(col_counts)) == 1:
+ self._nrows = col_counts[0]
+
+ def build_indicies(self, force=False):
+ raise NotImplementedError
+
+ def to_dataframe(self):
+ raise NotImplementedError
+
+ def get_values(self, property_name, all_rows=False):
+ """Returns all values for a group property.
+
+ Note that a row within a group may not have a corresponding node/edge, or they may have a different order or
+ multiple node/edges may share the same group row. Setting all_rows=False will return all the values as you
+ see if you iterated through all the population's items. Setting all_rows=True just returns the data as a
+ list as they appear in the dataset (will be faster).
+
+ :param property_name: Name of dataset property/column to fetch.
+ :param all_rows: Set false to return order in which they appear in population, false to return entire dataset
+ :return: A list of values for the given column name.
+ """
+ raise NotImplementedError
+
+ def __len__(self):
+ return self._nrows
+
+ def __getitem__(self, group_index):
+ group_props = {}
+ for cname, h5_obj in self._group_table.items():
+ group_props[cname] = h5_obj[group_index]
+ return group_props
+
+ def __contains__(self, prop_name):
+ """Search that a column name exists in this group"""
+ return prop_name in self._group_column_names
+
+
+class NodeGroup(Group):
+ def __init__(self, group_id, h5_group, parent):
+ super(NodeGroup, self).__init__(group_id, h5_group, parent)
+ # Note: Don't call build_indicies right away so uses can call __getitem__ without having to load all the
+ # node_ids
+
+ @property
+ def node_ids(self):
+ self.build_indicies()
+ # print self._parent_indicies
+ return self._parent.inode_ids(self._parent_indicies)
+
+ @property
+ def node_type_ids(self):
+ self.build_indicies()
+ return self._parent.inode_type_ids(self._parent_indicies)
+
+ @property
+ def gids(self):
+ self.build_indicies()
+ return self._parent.igids(self._parent_indicies)
+
+ def build_indicies(self, force=False):
+ if self._parent_indicies_built and not force:
+ return
+
+ # TODO: Check for the special case where there is only one group
+ # TODO: If memory becomes an issue on very larget nodes (10's of millions) consider using a generator
+ # I've pushed the actual building of the population->group indicies onto the parent population
+ self._parent_indicies = self._parent.group_indicies(self.group_id, build_cache=True)
+ self._parent_indicies_built = True
+
+ def get_values(self, property_name, filtered_indicies=True):
+ self.build_indicies()
+ # TODO: Check if property_name is node_id, node_type, or gid
+
+ if property_name in self._group_columns:
+ if not filtered_indicies:
+ # Just return all values in dataset
+ return np.array(self._group_table[property_name])
+ else:
+ # Return only those values for group indicies with associated nodes
+ grp_indicies = self._parent.igroup_indicies(self._parent_indicies)
+ # It is possible that the group_index is unorderd or contains duplicates which will cause h5py slicing
+ # to fail. Thus convert to a numpy array
+ # TODO: loading the entire table is not good if the filtered nodes is small, consider building.
+ tmp_array = np.array(self._group_table[property_name])
+ return tmp_array[grp_indicies]
+
+ elif property_name in self._parent.node_types_table.columns:
+ # For properties that come from node-types table we need to build the results from scratch
+ # TODO: Need to performance test, I think this code could be optimized.
+ node_types_table = self._parent.node_types_table
+ nt_col = node_types_table.column(property_name)
+ tmp_array = np.empty(shape=len(self._parent_indicies), dtype=nt_col.dtype)
+ for i, ntid in enumerate(self.node_type_ids):
+ tmp_array[i] = node_types_table[ntid][property_name]
+
+ return tmp_array
+
+ def to_dataframe(self):
+ self.build_indicies()
+
+ # Build a dataframe of group properties
+ # TODO: Include dynamics_params?
+ properties_df = pd.DataFrame()
+ for col in self._group_columns:
+ if col.dimension > 1:
+ for i in range(col.dimension):
+ # TODO: see if column name exists in the attributes
+ col_name = '{}.{}'.format(col.name, i)
+ properties_df[col_name] = pd.Series(self._h5_group[col.name][:, i])
+ else:
+ properties_df[col.name] = pd.Series(self._h5_group[col.name])
+
+ # Build a dataframe of parent node (node_id, gid, node_types, etc)
+ root_df = pd.DataFrame()
+ root_df['node_type_id'] = pd.Series(self.node_type_ids)
+ root_df['node_id'] = pd.Series(self.node_ids)
+ root_df['node_group_index'] = pd.Series(self._parent.igroup_indicies(self._parent_indicies)) # used as pivot
+ if self._parent.has_gids:
+ root_df['gid'] = self.gids
+
+ # merge group props df with parent df
+ results_df = root_df.merge(properties_df, how='left', left_on='node_group_index', right_index=True)
+ results_df = results_df.drop('node_group_index', axis=1)
+
+ # Build node_types dataframe and merge
+ node_types_df = self._parent.node_types_table.to_dataframe()
+ # remove properties that exist in the group
+ node_types_cols = [c.name for c in self._parent.node_types_table.columns if c not in self._group_columns]
+ node_types_df = node_types_df[node_types_cols]
+
+ # TODO: consider caching these results
+ return results_df.merge(node_types_df, how='left', left_on='node_type_id', right_index=True)
+
+ def filter(self, **filter_props):
+ """Filter all nodes in the group by key=value pairs.
+
+ The filter specifications may apply to either node_type or group column properties. Currently at the moment
+ it only supports equivlency. An intersection (and operator) is done for every different filter pair. This will
+ produce a generator of all nodes matching the the filters.
+
+ for node in filter(pop_name='VIp', depth=10.0):
+ assert(node['pop_name'] == 'VIp' and node['depth'] == 10.0)
+
+ :param filter_props: keys and their values to filter nodes on.
+ :return: A generator that produces all valid nodes within the group with matching key==value pairs.
+ """
+ # TODO: Integrate this with NodeSet.
+ self.build_indicies()
+ node_types_table = self._parent.node_types_table
+ node_type_filter = set(node_types_table.node_type_ids) # list of valid node_type_ids
+ type_filter = False
+ group_prop_filter = {} # list of 'prop_name'==prov_val for group datasets
+ group_filter = False
+
+ # Build key==value lists
+ for filter_key, filter_val in filter_props.items():
+ # TODO: Check if node_type_id is an input
+ if filter_key in self._group_columns:
+ # keep of list of group_popertiess to filter
+ group_prop_filter[filter_key] = filter_val
+ group_filter = True
+
+ elif filter_key in node_types_table.columns:
+ # for node_types we just keep a list of all node_type_ids with matching key==value pairs
+ node_type_filter &= set(node_types_table.find(filter_key, filter_val))
+ type_filter = True
+
+ else:
+ # TODO: should we raise an exception?
+ # TODO: User logger
+ print('Could not find property {} in either group or types table. Ignoring.'.format(filter_key))
+
+ # iterate through all nodes, skipping ones that don't have matching key==value pairs
+ for indx in self._parent_indicies:
+ # TODO: Don't build the node until you filter out node_type_id
+ node = self._parent.get_row(indx)
+ if type_filter and node.node_type_id not in node_type_filter:
+ # confirm node_type_id is a correct one
+ continue
+
+ if group_filter:
+ # Filter by group property values
+ # TODO: Allow group properties to handle lists
+ src_failed = True
+ for k, v in group_prop_filter.items():
+ if node[k] != v:
+ break
+ else:
+ src_failed = False
+
+ if src_failed:
+ continue
+
+ yield node
+
+ def __iter__(self):
+ self.build_indicies()
+ # Pass a list of indicies into the NodeSet, the NodeSet will take care of the iteration
+ return NodeSet(self._parent_indicies, self._parent).__iter__()
+
+
+class EdgeGroup(Group):
+ def __init__(self, group_id, h5_group, parent):
+ super(EdgeGroup, self).__init__(group_id, h5_group, parent)
+ self._indicies_count = 0 # Used to keep track of number of indicies (since it contains multple ranges)
+
+ self.__itr_index = 0
+ self.__itr_range = []
+ self.__itr_range_idx = 0
+ self.__itr_range_max = 0
+
+ def build_indicies(self, force=False):
+ if self._parent_indicies_built and not force:
+ return
+
+ # Saves indicies as a (potentially empty) list of ranges
+ # TODO: Turn index into generator, allows for cheaper iteration over the group
+ self._indicies_count, self._parent_indicies = self._parent.group_indicies(self.group_id, build_cache=False)
+ self._parent_indicies_built = True
+
+ def to_dataframe(self):
+ raise NotImplementedError
+
+
+ def _get_parent_ds(self, parent_ds):
+ self.build_indicies()
+ ds_vals = np.zeros(self._indicies_count, dtype=parent_ds.dtype)
+ c_indx = 0
+ for indx_range in self._parent_indicies:
+ indx_beg, indx_end = indx_range[0], indx_range[1]
+ n_indx = c_indx + (indx_end - indx_beg)
+ ds_vals[c_indx:n_indx] = parent_ds[indx_beg:indx_end]
+ c_indx = n_indx
+
+ return ds_vals
+
+ def src_node_ids(self):
+ return self._get_parent_ds(self.parent._source_node_id_ds)
+
+ def trg_node_ids(self):
+ return self._get_parent_ds(self.parent._target_node_id_ds)
+
+ def node_type_ids(self):
+ return self._get_parent_ds(self.parent._type_id_ds)
+
+ def get_values(self, property_name, all_rows=False):
+ # TODO: Need to take into account if property_name is in the edge-types
+ if property_name not in self.columns:
+ raise KeyError
+
+ if all_rows:
+ return np.array(self._h5_group[property_name])
+ else:
+ self.build_indicies()
+ # Go through all ranges and build the return list
+ dataset = self._h5_group[property_name]
+ return_list = np.empty(self._indicies_count, self._h5_group[property_name].dtype)
+ i = 0
+ for r_beg, r_end in self._parent_indicies:
+ r_len = r_end - r_beg
+ return_list[i:(i+r_len)] = dataset[r_beg:r_end]
+ i += r_len
+ return return_list
+
+ def filter(self, **filter_props):
+ # TODO: I'm not sure If I want to do this? Need to check on a larger dataset than I currently have.
+ raise NotImplementedError
+
+ def __iter__(self):
+ self.build_indicies()
+ # TODO: Implement using an EdgeSet
+ if len(self._parent_indicies) == 0:
+ self.__itr_max_range = 0
+ self.__itr_range = []
+ self.__itr_index = 0
+ else:
+ # Stop at the largest range end (I'm not sure if the indicies are ordered, if we can make it ordered then
+ # in the future just use self_parent_indicies[-1][1]
+ self.__itr_range_max = len(self._parent_indicies)
+ self.__itr_range_idx = 0
+ self.__itr_range = self._parent_indicies[0]
+ self.__itr_index = self.__itr_range[0]
+
+ return self
+
+ def next(self):
+ return self.__next__()
+
+ def __next__(self):
+ if self.__itr_range_idx >= self.__itr_range_max:
+ raise StopIteration
+
+ nxt_edge = self._parent.get_row(self.__itr_index)
+ self.__itr_index += 1
+ if self.__itr_index >= self.__itr_range[1]:
+ # iterator has moved past the current range
+ self.__itr_range_idx += 1
+ if self.__itr_range_idx < self.__itr_range_max:
+ # move the iterator onto next range
+ self.__itr_range = self._parent_indicies[self.__itr_range_idx] # update range
+ self.__itr_index = self.__itr_range[0] # update iterator to start and the beginning of new range
+ else:
+ self.__itr_range = []
+
+ return nxt_edge
diff --git a/bmtk-vb/build/lib/bmtk/utils/sonata/node.py b/bmtk-vb/build/lib/bmtk/utils/sonata/node.py
new file mode 100644
index 0000000..4fa24ae
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/sonata/node.py
@@ -0,0 +1,126 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+
+
+class NodeSet(object):
+ # TODO: Merge NodeSet and NodePopulation
+ def __init__(self, node_indicies, population, **parameters):
+ self._indicies = node_indicies
+ self._n_nodes = len(self._indicies)
+ self._population = population
+
+ self.__itr_index = 0
+
+ @property
+ def node_ids(self):
+ return self._population.inode_ids(self._indicies)
+
+ @property
+ def gids(self):
+ return self._population.igids(self._indicies)
+
+ @property
+ def node_type_ids(self):
+ return self._population.inode_type_ids(self._indicies)
+
+ '''
+ @property
+ def node_types(self):
+ return [self._population._node_types_table[ntid] for ntid in self._node_type_ids]
+ '''
+
+ def get_properties(self, property_name):
+ raise NotImplementedError
+
+ def __len__(self):
+ return self._n_nodes
+
+ def __iter__(self):
+ self.__itr_index = 0
+ return self
+
+ def next(self):
+ return self.__next__()
+
+ def __next__(self):
+ if self.__itr_index >= self._n_nodes:
+ raise StopIteration
+
+ node = self._population.get_row(self._indicies[self.__itr_index])
+ self.__itr_index += 1
+ return node
+
+
+class Node(object):
+ # TODO: include population name/reference
+ # TODO: make a dictionary (or preferably a collections.MutableMap
+ def __init__(self, node_id, node_type_id, node_types_props, group_id, group_props, dynamics_params, gid=None):
+ self._node_id = node_id
+ self._gid = gid
+ self._node_type_id = node_type_id
+ self._node_type_props = node_types_props
+ self._group_id = group_id
+ self._group_props = group_props
+
+ @property
+ def node_id(self):
+ return self._node_id
+
+ @property
+ def gid(self):
+ return self._gid
+
+ @property
+ def group_id(self):
+ return self._group_id
+
+ @property
+ def node_type_id(self):
+ return self._node_type_id
+
+ @property
+ def group_props(self):
+ return self._group_props
+
+ @property
+ def node_type_properties(self):
+ return self._node_type_props
+
+ @property
+ def dynamics_params(self):
+ raise NotImplementedError
+
+ def __getitem__(self, prop_key):
+ if prop_key in self._group_props:
+ return self._group_props[prop_key]
+ elif prop_key in self._node_type_props:
+ return self._node_type_props[prop_key]
+ elif prop_key == 'node_id':
+ return self.node_id
+ elif property == 'node_type_id':
+ return self.node_type_id
+ else:
+ raise KeyError('Unknown property {}'.format(prop_key))
+
+ def __contains__(self, prop_key):
+ return prop_key in self._group_props or prop_key in self._node_type_props
diff --git a/bmtk-vb/build/lib/bmtk/utils/sonata/population.py b/bmtk-vb/build/lib/bmtk/utils/sonata/population.py
new file mode 100644
index 0000000..0edebd2
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/sonata/population.py
@@ -0,0 +1,608 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import h5py
+import numpy as np
+
+from .utils import range_itr, get_attribute_h5
+from .node import Node, NodeSet
+from .edge import Edge, EdgeSet
+from .group import NodeGroup, EdgeGroup
+
+
+class Population(object):
+ def __init__(self, pop_name, pop_group, types_table):
+ self._pop_name = pop_name
+ self._pop_group = pop_group
+ self._types_table = types_table
+ self._nrows = 0
+
+ # For storing individual groups
+ self._group_map = {} # grp-id --> h5py.Group object
+ self._find_groups()
+ self._group_cache = {} # grp-id --> soneta.io.Group() object
+
+ # Refrences to most of the population's primary dataset
+ self._type_id_ds = pop_group[self.type_ids_column]
+ self._group_id_ds = pop_group[self.group_id_column]
+ self._group_index_ds = pop_group[self.group_index_column]
+
+ self._group_indicies = {} # grp-id --> list of rows indicies
+ self._group_indicies_cache_built = False
+
+ @property
+ def name(self):
+ """name of current population"""
+ return self._pop_name
+
+ @property
+ def group_ids(self):
+ """List of all group_ids belonging to population"""
+ return list(self._group_map.keys())
+
+ @property
+ def groups(self):
+ """Returns a list of sonata.Group objects"""
+ return [self.get_group(name) for name in self._group_map.keys()]
+
+ @property
+ def types_table(self):
+ return self._types_table
+
+ @property
+ def type_ids(self):
+ return np.array(self._type_id_ds)
+
+ @property
+ def group_id_ds(self):
+ return self._group_id_ds
+
+ @property
+ def group_index_ds(self):
+ return self._group_index_ds
+
+ @property
+ def group_id_column(self):
+ raise NotImplementedError
+
+ @property
+ def group_index_column(self):
+ raise NotImplementedError
+
+ @property
+ def type_ids_column(self):
+ raise NotImplementedError
+
+ def to_dataframe(self):
+ """Convert Population to dataframe"""
+ raise NotImplementedError
+
+ def get_group(self, group_id):
+ if group_id in self._group_cache:
+ return self._group_cache[group_id]
+ else:
+ grp_h5 = self._group_map[group_id]
+ grp_obj = self._build_group(group_id, grp_h5)
+ self._group_cache[group_id] = grp_obj
+ return grp_obj
+
+ def group_indicies(self, group_id, build_cache=False):
+ """Returns a list of all the population row index that maps onto the given group.
+
+ Used for iterating or searching within a Group
+
+ :param group_id: id of a given group
+ :param build_cache: Will cache indicies for all groups. Will be faster if making multiple calls but requires
+ more memory (default False)
+ :return: A (possibly empty) list of row indicies (non-contiguous, but unique)
+ """
+ if self._group_indicies_cache_built:
+ return self._group_indicies.get(group_id, [])
+
+ else:
+ tmp_index = pd.DataFrame()
+ # TODO: Need to check the memory overhead, especially for edges. See if an iterative search is just as fast
+ tmp_index['grp_id'] = pd.Series(self._group_id_ds, dtype=self._group_id_ds.dtype)
+ tmp_index['row_indx'] = pd.Series(range_itr(self._nrows), dtype=np.uint32)
+ if build_cache:
+ # save all indicies as arrays
+ self._group_indicies = {grp_id: np.array(subset['row_indx'])
+ for grp_id, subset in tmp_index.groupby(by='grp_id')}
+ self._group_indicies_cache_built = True
+ return self._group_indicies.get(group_id, [])
+ else:
+ # TODO: Manually del tmp_index to clear out the memory?
+ tmp_index = tmp_index[tmp_index['grp_id'] == group_id]
+ return np.array(tmp_index['row_indx'])
+
+ def igroup_ids(self, row_indicies):
+ return self._group_id_ds[list(row_indicies)]
+
+ def igroup_indicies(self, row_indicies):
+ return self._group_index_ds[list(row_indicies)]
+
+ def _find_groups(self):
+ """Create a map between group-id and h5py.Group reference"""
+ for grp_key, grp_h5 in self._pop_group.items():
+ if grp_key.isdigit():
+ grp_id = int(grp_key)
+ self._group_map[grp_id] = grp_h5
+ else:
+ # TODO: Should we put a warning if an unrecognized group exists?
+ pass
+
+ def _build_group(self, group_id, group_h5):
+ raise NotImplementedError
+
+ def __len__(self):
+ return self._nrows
+
+
+class NodePopulation(Population):
+ def __init__(self, pop_name, pop_group, node_types_tables):
+ super(NodePopulation, self).__init__(pop_name=pop_name, pop_group=pop_group, types_table=node_types_tables)
+
+ # TODO: node_ids can be implicit
+ self._node_id_ds = pop_group['node_id']
+ self._nrows = len(self._node_id_ds)
+
+ # TODO: This isn't necessary if only using iterator. Delay building index until get_node() is called.
+ self._index_nid2row = None # A lookup from node_id --> h5 row number
+ self._node_id_index_built = False
+ self._build_node_id_index()
+
+ # indicies for gid <--> node_id map
+ self._has_gids = False
+ self._index_gid2row = None # gid --> row (for searching by gid)
+ self._index_row2gid = None # row --> gid (for iterator or searching by node-id)
+ self._gid_lookup_fnc = lambda _: None # for looking up gid by row, use fnc pointer rather than conditional
+
+ self.__itr_index = 0 # for iterator
+
+ @property
+ def group_id_column(self):
+ return 'node_group_id'
+
+ @property
+ def group_index_column(self):
+ return 'node_group_index'
+
+ @property
+ def type_ids_column(self):
+ return 'node_type_id'
+
+ @property
+ def has_gids(self):
+ return self._has_gids
+
+ @property
+ def node_ids(self):
+ return np.array(self._node_id_ds)
+
+ @property
+ def gids(self):
+ if self.has_gids:
+ return np.array(self._index_gid2row.index)
+ else:
+ return None
+
+ @property
+ def node_types_table(self):
+ return self._types_table
+
+ @property
+ def index_column_name(self):
+ return 'node_id'
+
+ @property
+ def node_types_table(self):
+ return self.types_table
+
+ def add_gids(self, gid_map_df, force=False):
+ if self.has_gids and not force:
+ # TODO: not sure if it's best to return an exception or just continue on in silence?
+ raise Exception('Node population {} already has gids mapped onto node-ids.'.format(self.name))
+ # return
+
+ # Create map from gid --> node_id --> row #
+ self._build_node_id_index()
+ tmp_df = pd.DataFrame()
+ tmp_df['row_id'] = self._index_nid2row.index
+ tmp_df['node_id'] = self._index_nid2row
+ gid_map_df = gid_map_df.merge(tmp_df, how='left', left_on='node_id', right_on='node_id')
+ gid_map_df = gid_map_df.drop(['node_id', 'population'], axis=1)
+ self._index_gid2row = gid_map_df.set_index('gid')
+ self._index_row2gid = gid_map_df.set_index('row_id')
+ self._gid_lookup_fnc = lambda row_indx: self._index_row2gid.loc[row_indx]['gid']
+ self._has_gids = True
+
+ def to_dataframe(self):
+ raise NotImplementedError
+
+ def get_row(self, row_indx):
+ # TODO: Use helper function so we don't have to lookup gid/node_id twice
+ # Note: I'm not cacheing the nodes for memory purposes, but it might be beneificial too.
+ node_id = self._node_id_ds[row_indx]
+ node_type_id = self._type_id_ds[row_indx]
+ node_group_id = self._group_id_ds[row_indx]
+ node_group_index = self._group_index_ds[row_indx]
+
+ node_type_props = self.node_types_table[node_type_id]
+ node_group_props = self.get_group(node_group_id)[node_group_index]
+ node_gid = self._gid_lookup_fnc(row_indx)
+
+ return Node(node_id, node_type_id, node_type_props, node_group_id, node_group_props, None, gid=node_gid)
+
+ def get_rows(self, row_indicies):
+ """Returns a set of all nodes based on list of row indicies.
+
+ Warning: currently due to the use of h5py, the list must be ordered and cannot contain duplicates.
+
+ :param row_indicies: A list of row indicies
+ :return: An iterable NodeSet of nodes in the specified indicies
+ """
+ # TODO: Check that row_indicies is unsigned and the max (which will be the last value) < n_rows
+ # TODO: Check order and check for duplicates in list
+ return NodeSet(row_indicies, self)
+
+ def inode_ids(self, row_indicies):
+ # You get errors if row_indicies is a numpy array or panda series so convert to python list
+ # TODO: list conversion can be expensive, see if h5py will work with np arrays natively.
+ return self._node_id_ds[list(row_indicies)]
+
+ def igids(self, row_indicies):
+ gids = self._gid_lookup_fnc(row_indicies)
+ if gids is not None:
+ gids = np.array(gids)
+ return gids
+
+ def inode_type_ids(self, row_indicies):
+ # self._node_type_id_ds
+ return self._type_id_ds[list(row_indicies)]
+
+ def get_node_id(self, node_id):
+ row_indx = self._index_nid2row.iloc[node_id]
+ return self.get_row(row_indx)
+
+ def get_gid(self, gid):
+ # assert(self.has_gids)
+ row_indx = self._index_gid2row.iloc[gid]['row_id']
+ return self.get_row(row_indx)
+
+ def filter(self, **filter_props):
+ for grp in self.groups:
+ for node in grp.filter(**filter_props):
+ yield node
+
+ def _build_node_id_index(self, force=False):
+ if self._node_id_index_built and not force:
+ return
+
+ self._index_nid2row = pd.Series(range_itr(self._nrows), index=self._node_id_ds, dtype=self._node_id_ds.dtype)
+ self._node_id_index_built = True
+
+ def _build_group(self, group_id, group_h5):
+ return NodeGroup(group_id, group_h5, self)
+
+ def __iter__(self):
+ self.__itr_index = 0
+ return self
+
+ def next(self):
+ return self.__next__()
+
+ def __next__(self):
+ if self.__itr_index >= self._nrows:
+ raise StopIteration
+
+ nxt_node = self.get_row(self.__itr_index)
+ self.__itr_index += 1
+ return nxt_node
+
+ def __getitem__(self, item):
+ if isinstance(item, slice):
+ # TODO: Check
+ start = item.start if item.start is not None else 0
+ stop = item.stop if item.stop is not None else self._nrows
+ row_indicies = range_itr(start, stop, item.step)
+ return NodeSet(row_indicies, self)
+
+ elif isinstance(item, int):
+ return self.get_row(item)
+
+ elif isinstance(item, list):
+ return NodeSet(item)
+ else:
+ print('Unable to get item using {}.'.format(type(item)))
+
+
+class EdgePopulation(Population):
+ class __IndexStruct(object):
+ """Class sto store indicies subgroup"""
+ # TODO: Use collections.namedtuple
+ def __init__(self, lookup_table, edge_table):
+ self.lookup_table = lookup_table
+ self.edge_table = edge_table
+
+ def __init__(self, pop_name, pop_group, edge_types_tables):
+ super(EdgePopulation, self).__init__(pop_name=pop_name, pop_group=pop_group, types_table=edge_types_tables)
+
+ # keep reference to source and target datasets
+ self._source_node_id_ds = pop_group['source_node_id']
+ self._target_node_id_ds = pop_group['target_node_id']
+
+ self._nrows = len(self._source_node_id_ds)
+
+ # TODO: Throw an error/warning if missing
+ self._source_population = EdgePopulation.get_source_population(pop_group)
+ self._target_population = EdgePopulation.get_target_population(pop_group)
+
+ self.__itr_index = 0
+
+ # TODO: use a function pointer for get_index so it doesn't have to run a conditional every time
+ # TODO: add property and/or property so user can determine what indicies exists.
+ self._targets_index = None
+ self._has_target_index = False
+ self._sources_index = None
+ self._has_source_index = False
+ self.build_indicies()
+
+ @property
+ def group_id_column(self):
+ return 'edge_group_id'
+
+ @property
+ def group_index_column(self):
+ return 'edge_group_index'
+
+ @property
+ def type_ids_column(self):
+ return 'edge_type_id'
+
+ @property
+ def source_population(self):
+ return self._source_population
+
+ @property
+ def target_population(self):
+ return self._target_population
+
+ @staticmethod
+ def get_source_population(pop_group_h5):
+ return get_attribute_h5(pop_group_h5['source_node_id'], 'node_population', None)
+
+ @staticmethod
+ def get_target_population(pop_group_h5):
+ return get_attribute_h5(pop_group_h5['target_node_id'], 'node_population', None)
+
+ @property
+ def edge_types_table(self):
+ return self._types_table
+
+ def to_dataframe(self):
+ raise NotImplementedError
+
+ def build_indicies(self):
+ if 'indicies' in self._pop_group:
+ indicies_grp = self._pop_group['indicies']
+ for index_name, index_grp in indicies_grp.items():
+ # TODO: Let __IndexStruct build the indicies
+ # Make sure subgroup has the correct datasets
+ if not isinstance(index_grp, h5py.Group):
+ continue
+
+ if 'node_id_to_range' not in index_grp:
+ # TODO: make this more general, i.e 'id_to_range' thus we can index on gids, edge_types, etc
+ # TODO: Check that there are two columns in dataset
+ raise Exception('index {} in {} edges is missing column {}.'.format(index_name, self.name,
+ 'node_id_to_range'))
+ if 'range_to_edge_id' not in index_grp:
+ raise Exception('index {} in {} edges is missing column {}.'.format(index_name, self.name,
+ 'range_to_edge_id'))
+
+ # Cache the index
+ targets_lookup = index_grp['node_id_to_range']
+ edges_range = index_grp['range_to_edge_id']
+ index_obj = self.__IndexStruct(targets_lookup, edges_range)
+
+ # Determine the type of index
+ if index_name == 'source_to_target':
+ self._sources_index = index_obj
+ self._has_source_index = True
+ elif index_name == 'target_to_source':
+ self._targets_index = index_obj
+ self._has_target_index = True
+ else:
+ # TODO: Need to send this to a logger rather than stdout
+ print('Unrecognized index {}. Ignoring.'.format(index_name))
+
+ def _build_group(self, group_id, group_h5):
+ return EdgeGroup(group_id, group_h5, self)
+
+ def group_indicies(self, group_id, build_cache=False):
+ # For nodes it's safe to just keep a list of all indicies that map onto a given group. For edges bc there are
+ # many more rows (and typically a lot less groups), We want to build an index like for source/target ids
+ if len(self._group_map) == 1:
+ return len(self), [[0, len(self)]]
+
+ grp_indicies = super(EdgePopulation, self).group_indicies(group_id, build_cache=False)
+ if len(grp_indicies) == 0:
+ # Return an index with no ranges
+ return 0, []
+
+ # cluster into ranges. Naively implement, there is probably a faster way to cluster an ordered array!
+ range_beg = grp_indicies[0]
+ ranges = []
+ for i in range_itr(1, len(grp_indicies)):
+ if (grp_indicies[i-1]+1) != grp_indicies[i]:
+ ranges.append([range_beg, grp_indicies[i-1]+1])
+ range_beg = grp_indicies[i]
+ ranges.append([range_beg, grp_indicies[-1]+1])
+ return len(grp_indicies), np.array(ranges, dtype=np.uint32)
+
+ '''
+ def _get_target_index(self):
+ # TODO: Do only once
+ if self._targets_index is not None:
+ return self._targets_index
+
+ if 'incidies' in self._pop_group:
+ if 'target_to_source' in self._pop_group['incidies']:
+ targets_lookup = self._pop_group['incidies']['target_to_source']['node_id_to_range']
+ edges_range = self._pop_group['incidies']['target_to_source']['range_to_edge_id']
+ self._targets_index = self.__IndexStruct(targets_lookup, edges_range)
+ return self._targets_index
+
+ # TODO: What to do if index doesn't exist?
+ raise NotImplementedError
+ '''
+
+ def get_row(self, index):
+ src_node = self._source_node_id_ds[index]
+ trg_node = self._target_node_id_ds[index]
+ edge_type_id = self._type_id_ds[index]
+ edge_types_props = self.edge_types_table[edge_type_id]
+
+ edge_group_id = self._group_id_ds[index]
+ edge_group_index = self._group_index_ds[index]
+ edge_group_props = self.get_group(edge_group_id)[edge_group_index]
+ return Edge(trg_node_id=trg_node, src_node_id=src_node, source_pop=self.source_population,
+ target_pop=self.target_population, group_id = edge_group_id,
+ group_props=edge_group_props, edge_types_props=edge_types_props)
+
+ def filter(self, **filter_props):
+ selected_edge_types = set(self.edge_types_table.edge_type_ids)
+ types_filter = False # Do we need to filter results by edge_type_id
+ if 'edge_type_id' in filter_props:
+ # TODO: Make sure the edge_type_id is valid
+ selected_edge_types = set([filter_props['edge_type_id']])
+ del filter_props['edge_type_id']
+ types_filter = True
+
+ selected_groups = set(self._group_map.keys()) # list of grp_id's that will be used
+ group_prop_filter = {} # list of actual query statements
+ group_filter = False # do we need to filter results by group_id
+
+ # Go through filter key==value pairs, create filters for groups and edge_types
+ for filter_key, filter_val in filter_props.items():
+ # Find out what groups, if any, the column should search in.
+ group_query = False # If it's querying a group property don't look in edge_types
+ types_query = False
+ for grp_id, grp_h5 in self._group_map.items():
+ if filter_key in grp_h5:
+ # TODO: Need to check the dtype's match
+ selected_groups &= set([grp_id])
+ group_prop_filter[filter_key] = filter_val
+ group_query = True
+ group_filter = True
+
+ if (not group_query) and filter_key in self.edge_types_table.columns:
+ # Presearch the edge types and get only those edge_type_ids which match key==val
+ selected_edge_types &= set(self.edge_types_table.find(filter_key, filter_val))
+ types_filter = True
+ types_query = True
+
+ if not (group_query or types_query):
+ # Property key neither exists in a group or the edge_types_table
+ raise Exception('Could not find property {}'.format(filter_key))
+
+ # Iterate through all nodes, only returning those that match the filter
+ for indx in range_itr(self._nrows):
+ # Filter by edge_type_id
+ if types_filter:
+ # TODO: Invert the selected_edge_types, it will be faster to fail immeditely than search the entire list
+ if self._type_id_ds[indx] not in selected_edge_types:
+ continue
+
+ # Filter by group properties
+ if group_filter:
+ # TODO: Invert group search
+ grp_id = self._group_id_ds[indx]
+ if grp_id not in selected_groups:
+ continue
+
+ grp_index = self._group_index_ds[indx]
+ search_failed = True
+ for prop_key, prop_val in group_prop_filter.items():
+ if prop_val != self._group_map[grp_id][prop_key][grp_index]:
+ break
+ else:
+ search_failed = False
+
+ if search_failed:
+ continue
+
+ yield self.get_row(indx)
+
+ def get_target(self, target_node_id):
+ # TODO: Raise an exception, or call find() and log a warning that the index is not available
+ # TODO: check validity of target_node_id (non-negative integer and smaller than index range)
+ assert(self._has_target_index)
+ return self._get_index(self._targets_index, target_node_id)
+
+ def get_targets(self, target_node_ids):
+ # TODO: verify input is iterable
+ assert(self._has_target_index)
+ trg_index = self._targets_index
+ for trg_id in target_node_ids:
+ for edge in self._get_index(trg_index, trg_id):
+ yield edge
+
+ def get_source(self, source_node_id):
+ assert(self._has_source_index)
+ return self._get_index(self._sources_index, source_node_id)
+
+ def get_sources(self, source_node_ids):
+ assert(self._has_target_index)
+ trg_index = self._sources_index
+ for src_id in source_node_ids:
+ for edge in self._get_index(trg_index, src_id):
+ yield edge
+
+ def _get_index(self, index_struct, lookup_id):
+ # TODO: Use a EdgeSet instead
+ if lookup_id >= len(index_struct.lookup_table):
+ # TODO: Store length in index
+ raise StopIteration
+
+ edges_table = index_struct.edge_table
+ lookup_beg, lookup_end = index_struct.lookup_table[lookup_id]
+ for i in range_itr(lookup_beg, lookup_end):
+ edge_indx_beg, edge_indx_end = edges_table[i]
+ for edge_indx in range_itr(edge_indx_beg, edge_indx_end):
+ yield self.get_row(edge_indx)
+
+ def __iter__(self):
+ self.__itr_index = 0
+ return self
+
+ def __next__(self):
+ if self.__itr_index >= self._nrows:
+ raise StopIteration
+
+ next_edge = self.get_row(self.__itr_index)
+ self.__itr_index += 1
+ return next_edge
+
+ def next(self):
+ return self.__next__()
diff --git a/bmtk-vb/build/lib/bmtk/utils/sonata/types_table.py b/bmtk-vb/build/lib/bmtk/utils/sonata/types_table.py
new file mode 100644
index 0000000..375d332
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/sonata/types_table.py
@@ -0,0 +1,220 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import numpy as np
+import pandas as pd
+import numbers
+import math
+
+from .column_property import ColumnProperty
+
+
+def remove_nans(types_dict):
+ """Convert nan values to None in type row (dict)"""
+ for k, v in types_dict.items():
+ if isinstance(v, numbers.Real) and math.isnan(v):
+ types_dict[k] = None
+
+
+class TypesTable(object):
+ def __init__(self, parent=None):
+ self._parent = None # Used to keep track of FileRoot object this table belongs to
+ self._columns = {}
+ self._index_typeid2df = {} # map from node(edge)_type_id --> csv Row
+ self._column_map = {} # TODO: Use defaultdict
+ # self._id_table = self.IDSearcher(self)
+ self._dataframes = [] # list of all pandas dataframe (types tables)
+
+ self._cached_node_types = {}
+ self._df_cache = None
+
+ self._itr_indx = 0
+ self._itr_end = 0
+
+ @property
+ def index_column_name(self):
+ raise NotImplementedError
+
+ @property
+ def type_ids(self):
+ return self._index_typeid2df.keys()
+
+ @property
+ def columns(self):
+ return list(self._columns.values())
+
+ def column(self, column_name):
+ return self._columns[column_name]
+
+ def add_table(self, nt_df):
+ # TODO: Just saving the entire dataframe currently because we don't expect the node-types table to get too large
+ # (few hundred rows at the most). If that changes consider to loading the csv until explicity called by user.
+ self._dataframes.append(nt_df)
+
+ # Check that the type ids are unique and build id --> dataframe map
+ nt_df.set_index(keys=self.index_column_name, inplace=True)
+ for type_id in list(nt_df.index):
+ if type_id in self._index_typeid2df:
+ raise Exception('Multiple {}s with value {}.'.format(self.index_column_name, type_id))
+ self._index_typeid2df[type_id] = nt_df
+
+ columns = ColumnProperty.from_csv(nt_df)
+ for col in columns:
+ self._columns[col.name] = col
+ if col in self._column_map:
+ # TODO: make sure dtype matches. Bad things can happen if the same col has heterogeneous dtypes
+ self._column_map[col.name].append(nt_df)
+ else:
+ self._column_map[col.name] = [nt_df]
+
+ def find(self, column_key, column_val, silent=False):
+ """Returns a list of type_ids that contain column property column_key==column_val
+
+ :param column_key: Name of column to search
+ :param column_val: Value of column to select for
+ :param silent: Set to true to prevent KeyError if column_key doesn't exist (default=False)
+ :return: A (potentially empty) list of type_ids
+ """
+ if not silent and column_key not in self.columns:
+ raise KeyError
+
+ is_list = isinstance(column_val, list)
+ selected_ids = [] # running list of valid type-ids
+ column_dtype = self.column(column_key).dtype
+ for df in self._column_map[column_key]:
+ # if a csv column has all NONE values, pandas will load the values as float(NaN)'s. Thus for str/object
+ # columns we need to check dtype otherwise we'll get an invalid comparisson.
+ if df[column_key].dtype == column_dtype:
+ if is_list:
+ indicies = df[df[column_key].isin(column_val)].index
+ else:
+ indicies = df[df[column_key] == column_val].index
+
+ if len(indicies) > 0:
+ selected_ids.extend(list(indicies))
+
+ return selected_ids
+
+ def to_dataframe(self, cache=False):
+ if self._df_cache is not None:
+ return self._df_cache
+
+ if len(self._dataframes) == 0:
+ return None
+ elif len(self._dataframes) == 1:
+ merged_table = self._dataframes[0]
+ else:
+ # merge all dataframes together
+ merged_table = self._dataframes[0].reset_index() # TODO: just merge on the indicies rather than reset
+ for df in self._dataframes[1:]:
+ try:
+ merged_table = merged_table.merge(df.reset_index(), how='outer')
+ except ValueError as ve:
+ # There is a potential issue if merging where one dtype is different from another (ex, if all
+ # model_template's are NONE pandas will load column as float64). First solution is to find columns
+ # that differ and upcast columns as object's (TODO: look for better solution)
+ right_df = df.reset_index()
+ for col in set(merged_table.columns) & set(right_df.columns):
+ # find all shared columns whose dtype differs
+ if merged_table[col].dtype != right_df[col].dtype:
+ # change column(s) dtype to object
+ merged_table[col] = merged_table[col] if merged_table[col].dtype == object \
+ else merged_table[col].astype(object)
+ right_df[col] = right_df[col] if right_df[col].dtype == object \
+ else right_df[col].astype(object)
+
+ merged_table = merged_table.merge(right_df, how='outer')
+
+ merged_table.set_index(self.index_column_name, inplace=True)
+
+ if cache:
+ self._df_cache = merged_table
+
+ return merged_table
+
+ def __iter__(self):
+ self._itr_indx = 0
+ self._itr_end = len(self.type_ids)
+ return self
+
+ def next(self):
+ return self.__next__()
+
+ def __next__(self):
+ if self._itr_indx >= self._itr_end:
+ raise StopIteration
+
+ ntid = self.type_ids[self._itr_indx]
+ self._itr_indx += 1
+ return self[ntid]
+
+ def __getitem__(self, type_id):
+ if isinstance(type_id, tuple):
+ return [self[ntid] for ntid in type_id]
+
+ elif isinstance(type_id, numbers.Integral):
+ if type_id not in self._index_typeid2df:
+ raise Exception('{} {} not found'.format(self.index_column_name, type_id))
+
+ if type_id in self._cached_node_types:
+ return self._cached_node_types[type_id]
+ else:
+ nt_dict = self._index_typeid2df[type_id].loc[type_id].to_dict()
+ # TODO: consider just removing key from dict if value is None/NaN
+ remove_nans(nt_dict) # pd turns None into np.nan's. Temp soln is to just convert them back.
+ self._cached_node_types[type_id] = nt_dict
+ self._cached_node_types[type_id][self.index_column_name] = type_id # include node/edge_type_id
+ return nt_dict
+ else:
+ raise Exception('Unsupported search on node-type-id')
+
+ def __contains__(self, type_id):
+ return type_id in self._index_typeid2df
+
+ def __repr__(self):
+ return repr(self.to_dataframe())
+
+
+class NodeTypesTable(TypesTable):
+ def __init__(self, parent=None):
+ super(NodeTypesTable, self).__init__(parent)
+
+ @property
+ def index_column_name(self):
+ return 'node_type_id'
+
+ @property
+ def node_type_ids(self):
+ return self.type_ids
+
+
+class EdgeTypesTable(TypesTable):
+ def __init__(self, parent=None):
+ super(EdgeTypesTable, self).__init__(parent)
+
+ @property
+ def index_column_name(self):
+ return 'edge_type_id'
+
+ @property
+ def edge_type_ids(self):
+ return self.type_ids
diff --git a/bmtk-vb/build/lib/bmtk/utils/sonata/utils.py b/bmtk-vb/build/lib/bmtk/utils/sonata/utils.py
new file mode 100644
index 0000000..953572d
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/sonata/utils.py
@@ -0,0 +1,116 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import os
+import sys
+
+import h5py
+import pandas as pd
+import numpy as np
+
+MAGIC_ATTR = 'magic'
+MAGIC_VAL = 0x0A7A
+VERSION_ATTR = 'version'
+VERSION_NA = 'NA'
+VERSION_CURRENT = '0.1'
+
+try:
+ ver_split = VERSION_CURRENT.split('.')
+ VERSION_MAJOR = ver_split[0]
+ VERSION_MINOR = ver_split[1]
+except (IndexError, AttributeError) as err:
+ VERSION_MAJOR = 0
+ VERSION_MINOR = 1
+
+
+def listify(files):
+ # TODO: change this to include any iterable datastructures (sets, panda sequences, etc)
+ if not isinstance(files, (list, tuple)):
+ return [files]
+ else:
+ return files
+
+
+def load_h5(h5file, mode='r'):
+ # TODO: Allow for h5py.Group also
+ if isinstance(h5file, h5py.File):
+ return h5file
+
+ return h5py.File(h5file, mode)
+
+
+def load_csv(csvfile):
+ # TODO: make the separator more flexible
+ if isinstance(csvfile, pd.DataFrame):
+ return csvfile
+
+ # TODO: check if it is csv object and convert to a pd dataframe
+ return pd.read_csv(csvfile, sep=' ', na_values='NONE')
+
+
+def get_attribute_h5(h5obj, attribut_name, default=None):
+ val = h5obj.attrs.get(attribut_name, default)
+ if using_py3 and isinstance(val, bytes):
+ # There is an but with h5py returning unicode/str based attributes as bytes
+ val = val.decode()
+
+ return val
+
+
+def check_magic(hdf5_file):
+ """Check the magic attribute exists according to the sonata format"""
+ h5_file_obj = load_h5(hdf5_file)
+ if MAGIC_ATTR not in h5_file_obj.attrs:
+ raise Exception('File {} missing top-level \"{}\" attribute.'.format(h5_file_obj.filename, MAGIC_ATTR))
+ elif np.uint32(get_attribute_h5(hdf5_file, MAGIC_ATTR)) != MAGIC_VAL:
+ raise Exception('File {} has unexpected magic value (expected {})'.format(h5_file_obj.filename, MAGIC_VAL))
+
+ return True
+
+
+def get_version(hdf5_file):
+ h5_file_obj = load_h5(hdf5_file)
+ if VERSION_ATTR not in h5_file_obj.attrs:
+ return VERSION_NA
+
+ else:
+ version_val = get_attribute_h5(h5_file_obj, VERSION_ATTR)
+ version_str = str(version_val[0])
+ for ver_sub in version_val[1:]:
+ version_str += '.{}'.format(ver_sub)
+ return version_str
+
+
+def add_hdf5_magic(hdf5_handle):
+ hdf5_handle['/'].attrs['magic'] = np.uint32(0x0A7A)
+
+
+def add_hdf5_version(hdf5_handle):
+ hdf5_handle['/'].attrs['version'] = [np.uint32(VERSION_MAJOR), np.uint32(VERSION_MINOR)]
+
+
+if sys.version_info[0] == 3:
+ using_py3 = True
+ range_itr = range
+else:
+ using_py3 = False
+ range_itr = xrange
diff --git a/bmtk-vb/build/lib/bmtk/utils/spike_trains/__init__.py b/bmtk-vb/build/lib/bmtk/utils/spike_trains/__init__.py
new file mode 100644
index 0000000..7fdcfe6
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/spike_trains/__init__.py
@@ -0,0 +1,24 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+from .spikes_csv import SpikesGenerator
+from .spikes_file import SpikesFile
diff --git a/bmtk-vb/build/lib/bmtk/utils/spike_trains/spikes_csv.py b/bmtk-vb/build/lib/bmtk/utils/spike_trains/spikes_csv.py
new file mode 100644
index 0000000..64651d0
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/spike_trains/spikes_csv.py
@@ -0,0 +1,94 @@
+# Copyright 2017. Allen Institute. All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
+# following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
+# disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
+# products derived from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
+# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+import pandas as pd
+import numpy as np
+import csv
+import h5py
+from six import string_types
+
+from bmtk.utils import sonata
+
+class Rates(object):
+ def __iter__(self):
+ return self
+
+ def next(self):
+ raise StopIteration
+
+
+class NormalRates(Rates):
+ def __init__(self, t_start, t_end, rate_mu, rate_sigma=5.0):
+ self.t_start = t_start
+ self.t_end = t_end
+ self.period_mu = 1.0/float(rate_mu)
+ self.period_sigma = 1.0/float(rate_mu + rate_sigma)
+
+ self._current_t = t_start
+
+ def next(self):
+ self._current_t += abs(np.random.normal(self.period_mu, self.period_sigma))
+ if self._current_t > self.t_end:
+ self._current_t = self.t_start
+ raise StopIteration
+ else:
+ return self._current_t
+
+
+class SpikesGenerator(object):
+ def __init__(self, nodes, populations=None, t_min=0, t_max=1.0):
+ self._t_min = t_min
+ self._t_max = t_max
+
+ if isinstance(nodes, string_types):
+ nodes_h5 = h5py.File(nodes, 'r')
+ nodes_grp = nodes_h5['/nodes']
+ if populations is None:
+ populations = nodes_grp.keys()
+
+ # TODO: Need a way to Use sonata library without having to use node-types
+ nodes = []
+ for node_pop in populations:
+ nodes.extend(nodes_grp[node_pop]['node_id'])
+
+ self._nodes = {n: Rates() for n in nodes}
+
+ def set_rate(self, firing_rate, gids=None, t_start=None, t_end=None):
+ t_start = t_start or self._t_min
+ assert(t_start >= self._t_min)
+
+ t_end = t_end or self._t_max
+ assert(t_end <= self._t_max)
+
+ gids = gids or self._nodes.keys()
+ for gid in gids:
+ self._nodes[gid] = NormalRates(t_start, t_end, firing_rate)
+
+ def save_csv(self, csv_file_name, in_ms=False):
+ conv = 1000.0 if in_ms else 1.0
+
+ with open(csv_file_name, 'w') as csv_file:
+ csv_writer = csv.writer(csv_file, delimiter=' ')
+ csv_writer.writerow(['gid', 'spike-times'])
+ for gid, rate_gen in self._nodes.items():
+ csv_writer.writerow([gid, ','.join(str(r*conv) for r in rate_gen)])
+
diff --git a/bmtk-vb/build/lib/bmtk/utils/spike_trains/spikes_file.py b/bmtk-vb/build/lib/bmtk/utils/spike_trains/spikes_file.py
new file mode 100644
index 0000000..fd4577a
--- /dev/null
+++ b/bmtk-vb/build/lib/bmtk/utils/spike_trains/spikes_file.py
@@ -0,0 +1,174 @@
+import os
+from collections import Counter
+import numpy as np
+import pandas as pd
+import h5py
+
+
+class SpikesFile(object):
+ _file_adaptors = {}
+
+ def __init__(self, filename, mode='r', filetype=None, **params):
+ self._ftype = self._get_file_type(filename, filetype)
+ self._adaptor = SpikesFile._file_adaptors[self._ftype](filename, **params)
+
+ def _get_file_type(self, filename, filetype):
+ if filetype is not None:
+ if filetype not in self._file_adaptors:
+ raise Exception('Unknown spikes file type {}'.format(filetype))
+ else:
+ return filetype
+
+ else:
+ for ft, adaptor_cls in self._file_adaptors.items():
+ if adaptor_cls.is_type(filename):
+ return ft
+
+ raise Exception('Unable to determine file type for {}.'.format(filename))
+
+ def _get_spikes_sort(self, spikes_list, t_window=None):
+ if t_window is not None:
+ spikes_list.sort()
+ return [s for s in spikes_list if t_window[0] <= s <= t_window[1]]
+ else:
+ spikes_list.sort()
+ return spikes_list
+
+ @property
+ def gids(self):
+ """Return a list of all gids"""
+ return self._adaptor.gids
+
+ def to_dataframe(self):
+ return self._adaptor.to_dataframe()
+
+ def get_spikes(self, gid, time_window=None):
+ return self._adaptor.get_spikes(gid, time_window=None)
+
+ def __eq__(self, other):
+ return self.is_equal(other)
+
+ def is_equal(self, other, err=0.00001, time_window=None):
+ # check that gids matches
+ if set(self.gids) != set(other.gids):
+ return False
+
+ for gid in self.gids:
+ spikes_self = self._get_spikes_sort(self.get_spikes(gid), time_window)
+ spikes_other = self._get_spikes_sort(other.get_spikes(gid), time_window)
+
+ if len(spikes_other) != len(spikes_self):
+ return False
+
+ for s0, s1 in zip(spikes_self, spikes_other):
+ if abs(s0 - s1) > err:
+ return False
+ return True
+
+ @classmethod
+ def register_adaptor(cls, adaptor_cls):
+ cls._file_adaptors[adaptor_cls.ext_name()] = adaptor_cls
+ return adaptor_cls
+
+
+class SpikesFileAdaptor(object):
+ def __init__(self, filename):
+ self._filename = filename
+
+ @property
+ def gids(self):
+ raise NotImplementedError
+
+ def to_dataframe(self):
+ raise NotImplementedError
+
+ def get_spikes(self, gid, time_window=None):
+ raise NotImplementedError
+
+ @staticmethod
+ def is_type(filename):
+ raise NotImplementedError
+
+ @staticmethod
+ def ext_name():
+ raise NotImplementedError
+
+
+@SpikesFile.register_adaptor
+class SpikesFileH5(SpikesFileAdaptor):
+ def __init__(self, filename, **params):
+ super(SpikesFileH5, self).__init__(filename)
+ self._h5_handle = h5py.File(self._filename, 'r')
+ self._sort_order = self._h5_handle['/spikes'].attrs.get('sorting', None)
+ self._gid_ds = self._h5_handle['/spikes/gids']
+ self._timestamps_ds = self._h5_handle['/spikes/timestamps']
+
+ self._indexed = False
+ self._gid_indicies = {}
+ self._build_indicies()
+
+ def _build_indicies(self):
+ if self._sort_order == 'by_gid':
+ indx_beg = 0
+ c_gid = self._gid_ds[0]
+ for indx, gid in enumerate(self._gid_ds):
+ if gid != c_gid:
+ self._gid_indicies[c_gid] = slice(indx_beg, indx)
+ c_gid = gid
+ indx_beg = indx
+ self._gid_indicies[c_gid] = slice(indx_beg, indx+1)
+ self._indexed = True
+ else:
+ self._gid_indicies = {int(gid): [] for gid in np.unique(self._gid_ds)}
+ for indx, gid in enumerate(self._gid_ds):
+ self._gid_indicies[gid].append(indx)
+ self._indexed = True
+
+ @property
+ def gids(self):
+ return list(self._gid_indicies.keys())
+
+ def to_dataframe(self):
+ return pd.DataFrame({'timestamps': self._timestamps_ds, 'gids': self._gid_ds})
+
+ def get_spikes(self, gid, time_window=None):
+ return self._timestamps_ds[self._gid_indicies[gid]]
+
+ @staticmethod
+ def is_type(filename):
+ _, fext = os.path.splitext(filename)
+ fext = fext.lower()
+ return fext == '.h5' or fext == '.hdf' or fext == '.hdf5'
+
+ @staticmethod
+ def ext_name():
+ return 'h5'
+
+
+@SpikesFile.register_adaptor
+class SpikesFileCSV(SpikesFileAdaptor):
+ def __init__(self, filename, **params):
+ super(SpikesFileCSV, self).__init__(filename)
+ self._spikes_df = pd.read_csv(self._filename, names=['timestamps', 'gids'], sep=' ')
+
+ @property
+ def gids(self):
+ return list(self._spikes_df.gids.unique())
+
+ def to_dataframe(self):
+ return self._spikes_df
+
+ def get_spikes(self, gid, time_window=None):
+ return np.array(self._spikes_df[self._spikes_df.gids == gid].timestamps)
+
+ @staticmethod
+ def is_type(filename):
+ _, fext = os.path.splitext(filename)
+ fext = fext.lower()
+ return fext == '.csv' or fext == '.txt'
+
+ @staticmethod
+ def ext_name():
+ return 'csv'
+
+
diff --git a/bmtk-vb/docker/Dockerfile b/bmtk-vb/docker/Dockerfile
new file mode 100644
index 0000000..5adcc0a
--- /dev/null
+++ b/bmtk-vb/docker/Dockerfile
@@ -0,0 +1,84 @@
+FROM continuumio/anaconda2
+MAINTAINER Kael Dai
+
+RUN apt-get update && apt-get install -y automake \
+ libtool \
+ build-essential \
+ libncurses5-dev
+
+ENV BUILD_DIR=/home/build
+ENV HOME_DIR=/home/shared
+ENV WORK_DIR=${HOME_DIR}/workspace
+
+RUN mkdir -p ${BUILD_DIR}
+RUN mkdir -p ${HOME_DIR}
+RUN mkdir -p ${WORK_DIR}
+
+RUN conda install -y numpy h5py lxml pandas matplotlib jsonschema scipy mpi4py cmake
+
+# Install NEURON for BioNet
+RUN conda install -y -c kaeldai neuron
+
+
+### Install NEST for PointNet
+ENV NEST_VER=2.12.0
+ENV NEST_INSTALL_DIR=${BUILD_DIR}/nest/build
+ENV PYTHON_ENV=python2.7
+
+RUN cd ${BUILD_DIR} \
+ conda install -y gsl; \
+ wget --quiet https://github.com/nest/nest-simulator/releases/download/v${NEST_VER}/nest-${NEST_VER}.tar.gz -O nest.tar.gz; \
+ tar xfz nest.tar.gz; \
+ cd nest-${NEST_VER}; \
+ mkdir build && cd build; \
+ cmake -DCMAKE_INSTALL_PREFIX=${NEST_INSTALL_DIR} -Dwith-mpi=ON -Dwith-gsl=ON -Dwith-python=ON -Dwith-ltdl=ON ..; \
+ make; \
+ make install
+
+# Taken from /home/shared/nest/bin/nest_vars.sh, needed to run nest and pynest in jupyter
+ENV NEST_DATA_DIR=${NEST_INSTALL_DIR}/share/nest
+ENV NEST_DOC_DIR=${NEST_INSTALL_DIR}/share/doc/nest
+ENV NEST_MODULE_PATH=${NEST_INSTALL_DIR}/lib/nest
+ENV NEST_PYTHON_PREFIX=${NEST_INSTALL_DIR}/lib/${PYTHON_ENV}/site-packages
+ENV PYTHONPATH=${NEST_PYTHON_PREFIX}:${PYTHONPATH}
+ENV PATH=${NEST_INSTALL_DIR}/bin:${PATH}
+
+
+### Install DiPDE for PopNet
+RUN conda install -y -c nicholasc dipde
+
+
+### Install Tensorflow for MintNet
+RUN conda install -y tensorflow
+
+### Install AllenSDK (Not used by bmtk, but used by some notebooks to fetch cell-types files)
+RUN pip install allensdk
+
+
+### Install the bmtk
+RUN cd ${BUILD_DIR}; \
+ git clone https://github.com/AllenInstitute/bmtk.git; \
+ cd bmtk; \
+ python setup.py install
+
+# Setup the examples and tutorials
+RUN cd ${BUILD_DIR}/bmtk/docs; \
+ cp -R examples ${HOME_DIR}; \
+ cp -R tutorial ${HOME_DIR}
+
+# Setup components directories for tutorials, including compiling neuron modfiles
+RUN cd ${HOME_DIR}/tutorial; \
+ cp -R ../examples/*_components .; \
+ cd biophys_components/mechanisms; \
+ nrnivmodl modfiles/
+
+# Pre-compile mechanisms for BioNet examples
+RUN cd ${HOME_DIR}/examples/biophys_components/mechanisms; \
+ nrnivmodl modfiles/
+
+
+# Create an entry point for running the image
+COPY entry_script.sh ${BUILD_DIR}
+RUN chmod +x ${BUILD_DIR}/entry_script.sh
+
+ENTRYPOINT ["/home/build/entry_script.sh"]
diff --git a/bmtk-vb/docker/README.md b/bmtk-vb/docker/README.md
new file mode 100644
index 0000000..d0e13ea
--- /dev/null
+++ b/bmtk-vb/docker/README.md
@@ -0,0 +1,56 @@
+docker-bmtk
+==============
+
+With Docker you can test and run the bmtk without having to go through the hasssel of installing all the prerequists (incl.
+NEURON, NEST, DiPDE, etc). All you need is the Docker client installed on your computer. You can use the bmtk Docker
+container through the command-line to build models and run simulations. Or you can use it as a Jupyter Notebook container
+to test out existing tutorials/examples, or create new Notebooks yourself
+
+*Note*: You will not be able to utilize parallelization support (MPI) if running bmtk through Docker. Similarly you can
+expect memory issues and slowness for larger networks. For building and simulating large networks we recommend installing
+bmtk and the required tools natively on your machine.
+
+
+Getting the Image
+------------------
+
+You can pull the bmtk container from DockerHub
+```bash
+ $ docker pull alleninstitute/bmtk
+```
+
+Or to build the image from the bmtk/docker directory
+```bash
+ $ docker build -t alleninstitute/bmtk .
+```
+
+
+Running the BMTK
+----------------
+
+### Through the command-line
+To run a network-build or simulation-run bmtk script using the docker container, go to the directory containing your
+python script and any necessary supporting files:
+```bash
+ $ docker run alleninstitute/bmtk -v $(pwd):/home/shared/workspace python .py
+```
+
+**NOTE**: All files must be under the directory you are running the command; including network, components, and output
+directories. If your config.json files references anything outside the working directory branch things will not work
+as expected.
+
+#### NEURON Mechanisms
+If you are running BioNet and have special mechanims/mod files that need to be compiled, you can do so by running:
+```bash
+ $ cd path/to/mechanims
+ $ docker run -v $(pwd):/home/shared/workspace/mechanisms nrnivmodl modfiles/
+```
+
+### Through Jupyter Notebooks
+To run a Jupyter Notebook server:
+```bash
+ $ docker run -v $(pwd):/home/shared/workspace -p 8888:8888 jupyter
+```
+
+Then open a browser to 127.0.0.1:8888/. Any new files and/or notebooks that you want to save permenately should
+be created in the workspace folder, otherwise the work will be lost when the server is stopped.
\ No newline at end of file
diff --git a/bmtk-vb/docker/entry_script.sh b/bmtk-vb/docker/entry_script.sh
new file mode 100644
index 0000000..2d55ee9
--- /dev/null
+++ b/bmtk-vb/docker/entry_script.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+set -e
+
+if [[ $1 = "nrnivmodl" ]]; then
+ shift
+ nrnivmodl $@
+elif [[ $1 = "python" ]]; then
+ shift
+ python $@
+elif [[ $1 = "jupyter" ]]; then
+ shift
+ echo "HERE"
+ jupyter notebook --allow-root --ip=* --port 8888 --no-browser --notebook-dir /home/shared --NotebookApp.token=""
+else
+ python $@
+fi
diff --git a/bmtk-vb/docs/README.md b/bmtk-vb/docs/README.md
new file mode 100644
index 0000000..e5b7b6d
--- /dev/null
+++ b/bmtk-vb/docs/README.md
@@ -0,0 +1,5 @@
+## BMTK documentation, guides and examples
+#### directory structure
+* autodocs/ - scripts and pages for the generation of github-pages html files.
+* tutorial/ - Tutorials and guides for using bmtk and its different parts. **New users should start here**.
+* examples/ - Various examples of how to build networks, run various simulations and plot their results. A good place to start for users wanting a quick and dirty introduction (warning: many of these examples are not as well documented as the tutorials).
diff --git a/bmtk-vb/docs/autodocs/.nojekyll b/bmtk-vb/docs/autodocs/.nojekyll
new file mode 100644
index 0000000..e69de29
diff --git a/bmtk-vb/docs/autodocs/Makefile b/bmtk-vb/docs/autodocs/Makefile
new file mode 100644
index 0000000..e896f11
--- /dev/null
+++ b/bmtk-vb/docs/autodocs/Makefile
@@ -0,0 +1,25 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = python -msphinx
+SPHINXPROJ = BrainModelingToolkit
+SOURCEDIR = source
+BUILDDIR = build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+html:
+ sphinx-apidoc -f -o source/bmtk/ ../../bmtk
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
\ No newline at end of file
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/all_network_cropped.png b/bmtk-vb/docs/autodocs/source/_static/images/all_network_cropped.png
new file mode 100644
index 0000000..658dd26
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/all_network_cropped.png differ
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/dipde_icon.png b/bmtk-vb/docs/autodocs/source/_static/images/dipde_icon.png
new file mode 100644
index 0000000..e1cfa62
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/dipde_icon.png differ
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/edge_types.png b/bmtk-vb/docs/autodocs/source/_static/images/edge_types.png
new file mode 100644
index 0000000..4d331a8
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/edge_types.png differ
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/edges_h5_structure.png b/bmtk-vb/docs/autodocs/source/_static/images/edges_h5_structure.png
new file mode 100644
index 0000000..8695191
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/edges_h5_structure.png differ
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/ext_inputs_raster.png b/bmtk-vb/docs/autodocs/source/_static/images/ext_inputs_raster.png
new file mode 100644
index 0000000..290f090
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/ext_inputs_raster.png differ
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/graph_structure.png b/bmtk-vb/docs/autodocs/source/_static/images/graph_structure.png
new file mode 100644
index 0000000..af89d20
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/graph_structure.png differ
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/levels_of_resolution.png b/bmtk-vb/docs/autodocs/source/_static/images/levels_of_resolution.png
new file mode 100644
index 0000000..83ad07d
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/levels_of_resolution.png differ
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/nest_icon.png b/bmtk-vb/docs/autodocs/source/_static/images/nest_icon.png
new file mode 100644
index 0000000..c0d8273
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/nest_icon.png differ
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/neuron_icon.png b/bmtk-vb/docs/autodocs/source/_static/images/neuron_icon.png
new file mode 100644
index 0000000..9c754c4
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/neuron_icon.png differ
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/node_types.png b/bmtk-vb/docs/autodocs/source/_static/images/node_types.png
new file mode 100644
index 0000000..46d5a29
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/node_types.png differ
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/nodes_h5_structure.png b/bmtk-vb/docs/autodocs/source/_static/images/nodes_h5_structure.png
new file mode 100644
index 0000000..39fcb5b
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/nodes_h5_structure.png differ
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/tensorflow_icon.png b/bmtk-vb/docs/autodocs/source/_static/images/tensorflow_icon.png
new file mode 100644
index 0000000..9d89ed9
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/tensorflow_icon.png differ
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/v1_raster.png b/bmtk-vb/docs/autodocs/source/_static/images/v1_raster.png
new file mode 100644
index 0000000..aadd0fa
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/v1_raster.png differ
diff --git a/bmtk-vb/docs/autodocs/source/_static/images/workflow.png b/bmtk-vb/docs/autodocs/source/_static/images/workflow.png
new file mode 100644
index 0000000..e603b78
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/_static/images/workflow.png differ
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/aibs_sphinx.css_t b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/aibs_sphinx.css_t
new file mode 100644
index 0000000..d53294b
--- /dev/null
+++ b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/aibs_sphinx.css_t
@@ -0,0 +1,833 @@
+/*
+ * traditional.css
+ * ~~~~~~~~~~~~~~~
+ *
+ * Sphinx stylesheet -- traditional docs.python.org theme.
+ *
+ * :copyright: Copyright 2007-2015 by the Sphinx team, see AUTHORS.
+ * :license: BSD, see LICENSE for details.
+ *
+ */
+
+body {
+ color: #000;
+ margin: 0;
+ padding: 0;
+}
+
+/* :::: LAYOUT :::: */
+
+div.document {
+ position: absolute;
+}
+
+div.documentwrapper {
+ float: left;
+ width: 100%;
+}
+
+div.bodywrapper {
+ margin: 0 0 0 {{ theme_sidebarwidth }}px;
+}
+
+div.body {
+ background-color: white;
+ padding: 20px 50px 30px 0px;
+ font-family: Verdana, sans-serif;
+}
+
+div.sphinxsidebarwrapper {
+ padding: 10px;
+ margin: 0px 0px 0px 50px;
+ padding: 0;
+}
+
+div.sphinxsidebar {
+ position: absolute;
+ top: 0;
+ left: 0;
+ margin-left: 0;
+ width: {{ theme_sidebarwidth }}px;
+ font-family: Verdana, sans-serif;
+}
+
+div.clearer {
+ clear: both;
+}
+
+div.footer {
+ clear: both;
+ width: 100%;
+ padding: 9px 0 9px 0;
+ text-align: center;
+}
+
+div.related {
+ color: #333;
+ width: 100%;
+ height: 30px;
+ line-height: 30px;
+ border-bottom: 5px solid white;
+}
+
+div.related h3 {
+ display: none;
+}
+
+div.related ul {
+ margin: 0;
+ margin-left: 40px;
+ margin-right: 50px;
+ padding: 0 0 0 10px;
+ list-style: none;
+}
+
+div.related li {
+ display: inline;
+ font-weight: bold;
+}
+
+div.related li.right {
+ float: right;
+ margin-right: 5px;
+}
+
+
+/* ::: SIDEBAR :::: */
+div.sphinxsidebar h3 {
+ margin: 0;
+ margin-top: 20px;
+ font-family: bebas_neueregular;
+ font-weight: bold;
+ font-size: 40px;
+ color: rgb(66,66,66);
+}
+
+div.sphinxsidebar h3 a {
+ color: rgb(66,66,66);
+}
+
+div.sphinxsidebar h4 {
+ margin: 5px 0 0 0;
+}
+
+div.sphinxsidebar p.topless {
+ margin: 5px 10px 10px 10px;
+}
+
+div.sphinxsidebar ul {
+ margin: 10px;
+ margin-left: 15px;
+ padding: 0;
+ font-family: arial, sans-serif;
+ font-size: 14px;
+ font-weight: bold;
+ list-style-type: none;
+}
+
+div.sphinxsidebar li {
+ margin-top: 11px;
+}
+
+li.toctree-l2 {
+ font-size: 12px;
+ margin-top: 8px !important;
+}
+
+div.sphinxsidebar ul ul {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+div.sphinxsidebar form {
+ margin-top: 10px;
+}
+
+
+/* :::: SEARCH :::: */
+ul.search {
+ margin: 10px 0 0 20px;
+ padding: 0;
+}
+
+ul.search li {
+ padding: 5px 0 5px 20px;
+ background-image: url(file.png);
+ background-repeat: no-repeat;
+ background-position: 0 7px;
+}
+
+ul.search li a {
+ font-weight: bold;
+}
+
+ul.search li div.context {
+ color: #888;
+ margin: 2px 0 0 30px;
+ text-align: left;
+}
+
+ul.keywordmatches li.goodmatch a {
+ font-weight: bold;
+}
+
+/* :::: COMMON FORM STYLES :::: */
+
+div.actions {
+ border-top: 1px solid #aaa;
+ background-color: #ddd;
+ margin: 10px 0 0 -20px;
+ padding: 5px 0 5px 20px;
+}
+
+form dl {
+ color: #333;
+}
+
+form dt {
+ clear: both;
+ float: left;
+ min-width: 110px;
+ margin-right: 10px;
+ padding-top: 2px;
+}
+
+input#homepage {
+ display: none;
+}
+
+div.error {
+ margin: 5px 20px 0 0;
+ padding: 5px;
+ border: 1px solid #d00;
+ /*border: 2px solid #05171e;
+ background-color: #092835;
+ color: white;*/
+ font-weight: bold;
+}
+
+/* :::: INLINE COMMENTS :::: */
+
+div.inlinecommentswrapper {
+ float: right;
+ max-width: 40%;
+}
+
+div.commentmarker {
+ float: right;
+ background-image: url(style/comment.png);
+ background-repeat: no-repeat;
+ width: 25px;
+ height: 25px;
+ text-align: center;
+ padding-top: 3px;
+}
+
+div.nocommentmarker {
+ float: right;
+ background-image: url(style/nocomment.png);
+ background-repeat: no-repeat;
+ width: 25px;
+ height: 25px;
+}
+
+div.inlinecomments {
+ margin-left: 10px;
+ margin-bottom: 5px;
+ background-color: #eee;
+ border: 1px solid #ccc;
+ padding: 5px;
+}
+
+div.inlinecomment {
+ border-top: 1px solid #ccc;
+ padding-top: 5px;
+ margin-top: 5px;
+}
+
+.inlinecomments p {
+ margin: 5px 0 5px 0;
+}
+
+.inlinecomments .head {
+ font-weight: bold;
+}
+
+.inlinecomments .meta {
+ font-style: italic;
+}
+
+
+/* :::: COMMENTS :::: */
+
+div#comments h3 {
+ border-top: 1px solid #aaa;
+ padding: 5px 20px 5px 20px;
+ margin: 20px -20px 20px -20px;
+ background-color: #ddd;
+}
+
+/*
+div#comments {
+ background-color: #ccc;
+ margin: 40px -20px -30px -20px;
+ padding: 0 0 1px 0;
+}
+
+div#comments h4 {
+ margin: 30px 0 20px 0;
+ background-color: #aaa;
+ border-bottom: 1px solid #09232e;
+ color: #333;
+}
+
+div#comments form {
+ display: block;
+ margin: 0 0 0 20px;
+}
+
+div#comments textarea {
+ width: 98%;
+ height: 160px;
+}
+
+div#comments div.help {
+ margin: 20px 20px 10px 0;
+ background-color: #ccc;
+ color: #333;
+}
+
+div#comments div.help p {
+ margin: 0;
+ padding: 0 0 10px 0;
+}
+
+div#comments input, div#comments textarea {
+ font-family: 'Bitstream Vera Sans', 'Arial', sans-serif;
+ font-size: 13px;
+ color: black;
+ background-color: #aaa;
+ border: 1px solid #092835;
+}
+
+div#comments input[type="reset"],
+div#comments input[type="submit"] {
+ cursor: pointer;
+ font-weight: bold;
+ padding: 2px;
+ margin: 5px 5px 5px 0;
+ background-color: #666;
+ color: white;
+}
+
+div#comments div.comment {
+ margin: 10px 10px 10px 20px;
+ padding: 10px;
+ border: 1px solid #0f3646;
+ background-color: #aaa;
+ color: #333;
+}
+
+div#comments div.comment p {
+ margin: 5px 0 5px 0;
+}
+
+div#comments div.comment p.meta {
+ font-style: italic;
+ color: #444;
+ text-align: right;
+ margin: -5px 0 -5px 0;
+}
+
+div#comments div.comment h4 {
+ margin: -10px -10px 5px -10px;
+ padding: 3px;
+ font-size: 15px;
+ background-color: #888;
+ color: white;
+ border: 0;
+}
+
+div#comments div.comment pre,
+div#comments div.comment code {
+ background-color: #ddd;
+ color: #111;
+ border: none;
+}
+
+div#comments div.comment a {
+ color: #fff;
+ text-decoration: underline;
+}
+
+div#comments div.comment blockquote {
+ margin: 10px;
+ padding: 10px;
+ border-left: 1px solid #0f3646;
+ /*border: 1px solid #0f3646;
+ background-color: #071c25;*/
+}
+
+div#comments em.important {
+ color: #d00;
+ font-weight: bold;
+ font-style: normal;
+}*/
+
+/* :::: SUGGEST CHANGES :::: */
+div#suggest-changes-box input, div#suggest-changes-box textarea {
+ border: 1px solid #ccc;
+ background-color: white;
+ color: black;
+}
+
+div#suggest-changes-box textarea {
+ width: 99%;
+ height: 400px;
+}
+
+
+/* :::: PREVIEW :::: */
+div.preview {
+ background-image: url(style/preview.png);
+ padding: 0 20px 20px 20px;
+ margin-bottom: 30px;
+}
+
+
+/* :::: INDEX PAGE :::: */
+
+table.contentstable {
+ width: 90%;
+}
+
+table.contentstable p.biglink {
+ line-height: 150%;
+}
+
+a.biglink {
+ font-size: 1.5em;
+}
+
+span.linkdescr {
+ font-style: italic;
+ padding-top: 5px;
+}
+
+/* :::: GENINDEX STYLES :::: */
+
+table.indextable td {
+ text-align: left;
+ vertical-align: top;
+}
+
+table.indextable dl, table.indextable dd {
+ margin-top: 0;
+ margin-bottom: 0;
+}
+
+table.indextable tr.pcap {
+ height: 10px;
+}
+
+table.indextable tr.cap {
+ margin-top: 10px;
+ background-color: #f2f2f2;
+}
+
+img.toggler {
+ margin-right: 3px;
+ margin-top: 3px;
+ cursor: pointer;
+}
+
+/* :::: GLOBAL STYLES :::: */
+
+p.subhead {
+ font-weight: bold;
+ margin-top: 20px;
+}
+
+a:link { color: #0779be; }
+a:visited { color: #0779be; }
+a:hover { color: #bbeeff; }
+a:active { color: #bbeeff; }
+
+
+div.body h1,
+div.body h2,
+div.body h3,
+div.body h4,
+div.body h5,
+div.body h6 {
+ font-family: bebas_neueregular;
+ font-weight: bold;
+ color: rgb(66,66,66);
+
+}
+
+
+
+div.body h1 { font-size: 21pt; }
+div.body h2 { font-size: 18pt; }
+div.body h3 { font-size: 14pt; }
+div.body h4 { font-size: 14pt; }
+
+a.headerlink,
+a.headerlink,
+a.headerlink,
+a.headerlink,
+a.headerlink,
+a.headerlink {
+ color: #c60f0f;
+ font-size: 0.8em;
+ padding: 0 4px 0 4px;
+ text-decoration: none;
+ visibility: hidden;
+}
+
+*:hover > a.headerlink,
+*:hover > a.headerlink,
+*:hover > a.headerlink,
+*:hover > a.headerlink,
+*:hover > a.headerlink,
+*:hover > a.headerlink {
+ visibility: visible;
+}
+
+a.headerlink:hover,
+a.headerlink:hover,
+a.headerlink:hover,
+a.headerlink:hover,
+a.headerlink:hover,
+a.headerlink:hover {
+ background-color: #c60f0f;
+ color: white;
+}
+
+div.body p, div.body dd, div.body li {
+ text-align: justify;
+}
+
+div.body td {
+ text-align: left;
+}
+
+ul.fakelist {
+ list-style: none;
+ margin: 10px 0 10px 20px;
+ padding: 0;
+}
+
+/* "Footnotes" heading */
+p.rubric {
+ margin-top: 30px;
+ font-weight: bold;
+}
+
+/* "Topics" */
+
+div.topic {
+ background-color: #eee;
+ border: 1px solid #ccc;
+ padding: 0 7px 0 7px;
+ margin: 10px 0 10px 0;
+}
+
+p.topic-title {
+ font-size: 1.1em;
+ font-weight: bold;
+ margin-top: 10px;
+}
+
+/* Admonitions */
+
+div.admonition {
+ margin-top: 10px;
+ margin-bottom: 10px;
+ padding: 7px;
+}
+
+div.admonition dt {
+ font-weight: bold;
+}
+
+div.admonition dd {
+ margin-bottom: 10px;
+}
+
+div.admonition dl {
+ margin-bottom: 0;
+}
+
+div.admonition p {
+ display: inline;
+}
+
+div.seealso {
+ background-color: #ffc;
+ border: 1px solid #ff6;
+}
+
+div.warning {
+ background-color: #ffe4e4;
+ border: 1px solid #f66;
+}
+
+div.note {
+ background-color: #eee;
+ border: 1px solid #ccc;
+}
+
+p.admonition-title {
+ margin: 0px 10px 5px 0px;
+ font-weight: bold;
+ display: inline;
+}
+
+p.admonition-title:after {
+ content: ":";
+}
+
+div.body p.centered {
+ text-align: center;
+ margin-top: 25px;
+}
+
+table.docutils {
+ border: 0;
+}
+
+table caption span.caption-number {
+ font-style: italic;
+}
+
+table caption span.caption-text {
+}
+
+table.docutils td, table.docutils th {
+ padding: 0 8px 2px 0;
+ border-top: 0;
+ border-left: 0;
+ border-right: 0;
+ border-bottom: 1px solid #aaa;
+}
+
+table.field-list td, table.field-list th {
+ border: 0 !important;
+}
+
+table.footnote td, table.footnote th {
+ border: 0 !important;
+}
+
+dl {
+ margin-bottom: 15px;
+ clear: both;
+}
+
+dd p {
+ margin-top: 0px;
+}
+
+dd ul, dd table {
+ margin-bottom: 10px;
+}
+
+dd {
+ margin-top: 3px;
+ margin-bottom: 10px;
+ margin-left: 30px;
+}
+
+dl.glossary dt {
+ font-weight: bold;
+ font-size: 1.1em;
+}
+
+th {
+ text-align: left;
+ padding-right: 5px;
+}
+
+pre {
+ font-family: monospace;
+ padding: 5px;
+ color: #00008b;
+ border-left: none;
+ border-right: none;
+}
+
+code {
+ font-family: monospace;
+ background-color: #ecf0f3;
+ padding: 0 1px 0 1px;
+}
+
+code.descname {
+ background-color: transparent;
+ font-weight: bold;
+ font-size: 1.2em;
+}
+
+code.descclassname {
+ background-color: transparent;
+}
+
+code.xref, a code {
+ background-color: transparent;
+ font-weight: bold;
+}
+
+.footnote:target { background-color: #ffa }
+
+.line-block {
+ display: block;
+ margin-top: 1em;
+ margin-bottom: 1em;
+}
+
+.line-block .line-block {
+ margin-top: 0;
+ margin-bottom: 0;
+ margin-left: 1.5em;
+}
+
+h1 code, h2 code, h3 code, h4 code, h5 code, h6 code {
+ background-color: transparent;
+}
+
+.optional {
+ font-size: 1.3em;
+}
+
+.sig-paren {
+ font-size: larger;
+}
+
+.versionmodified {
+ font-style: italic;
+}
+
+/* :::: PRINT :::: */
+@media print {
+ div.documentwrapper {
+ width: 100%;
+ }
+
+ div.body {
+ margin: 0;
+ }
+
+ div.sphinxsidebar,
+ div.related,
+ div.footer,
+ div#comments div.new-comment-box,
+ #top-link {
+ display: none;
+ }
+}
+
+.viewcode-link {
+ float: right;
+}
+
+.viewcode-back {
+ float: right;
+ font-family: serif;
+}
+
+div.viewcode-block:target {
+ background-color: #f4debf;
+ border-top: 1px solid #ac9;
+ border-bottom: 1px solid #ac9;
+ margin: -1px -10px;
+ padding: 0 10px;
+}
+
+div.code-block-caption {
+ background-color: #cceeff;
+}
+
+div.code-block-caption span.caption-number {
+ padding: 0.1em 0.3em;
+ font-style: italic;
+}
+
+div.code-block-caption span.caption-text {
+}
+
+div.literal-block-wrapper {
+ padding: 1em 1em 0;
+}
+
+div.literal-block-wrapper pre {
+ margin: 0;
+}
+
+div.figure p.caption span.caption-number {
+ font-style: italic;
+}
+
+div.figure p.caption span.caption-text {
+}
+
+/* davidf: new stuff here */
+
+img.align-right {
+ float: right;
+ padding: 10px;
+}
+
+hr.docutils {
+ border: none;
+ clear: both;
+}
+
+a.current {
+
+}
+
+div.sphinxsidebar p.questions {
+ margin-right: 15px;
+ font-size: 90%;
+}
+
+dl.class > dt {
+ background-color: #ef9c43;
+ border: 1px solid #ff9c33;
+ padding: 3px;
+}
+
+dl.method > dt, dl.classmethod > dt {
+ background-color: #add4eb;
+ border: 1px solid: #99daff;
+ padding: 3px;
+}
+
+dl.function > dt, dl.exception > dt {
+ background-color: #a0d279;
+ border: 1px solid: #9cf25a;
+ padding: 3px;
+}
+
+/*
+table.docutils tr.field th.field-name {
+ background-color: #eaeaea;
+ border-bottom: 4px solid white;
+}
+*/
+
+table.docutils th, table.docutils td {
+ padding: 4px 8px 4px 4px;
+}
+
+table.docutils tr.field {
+ border-bottom: 3px solid white;
+}
+
+td.field-body > p > strong {
+ background-color: #eaeaea;
+ padding: 3px;
+ border-left: 2px solid #d0d0d0;
+}
\ No newline at end of file
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/AIBS_Logo.png b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/AIBS_Logo.png
new file mode 100644
index 0000000..8442fa9
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/AIBS_Logo.png differ
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/Brain_Atlas_Logotype_SDK.png b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/Brain_Atlas_Logotype_SDK.png
new file mode 100644
index 0000000..3bf99a7
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/Brain_Atlas_Logotype_SDK.png differ
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/arrow_off.gif b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/arrow_off.gif
new file mode 100644
index 0000000..3448eff
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/arrow_off.gif differ
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/arrow_on.gif b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/arrow_on.gif
new file mode 100644
index 0000000..1c8ac26
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/arrow_on.gif differ
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/arrow_over.gif b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/arrow_over.gif
new file mode 100644
index 0000000..4b7cde6
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/arrow_over.gif differ
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/close_x.png b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/close_x.png
new file mode 100644
index 0000000..e7cc1c6
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/close_x.png differ
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/logo_AIBS.gif b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/logo_AIBS.gif
new file mode 100644
index 0000000..21ff623
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/logo_AIBS.gif differ
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/logo_aibs_footer.png b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/logo_aibs_footer.png
new file mode 100644
index 0000000..57f831a
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/logo_aibs_footer.png differ
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/progress_indicator.gif b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/progress_indicator.gif
new file mode 100644
index 0000000..c439da4
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/progress_indicator.gif differ
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/tab_blue.gif b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/tab_blue.gif
new file mode 100644
index 0000000..ed399c1
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/tab_blue.gif differ
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/workflow.png b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/workflow.png
new file mode 100644
index 0000000..e603b78
Binary files /dev/null and b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/images/workflow.png differ
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/javascript/AC_RunActiveContent.js b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/javascript/AC_RunActiveContent.js
new file mode 100644
index 0000000..39c294b
--- /dev/null
+++ b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/javascript/AC_RunActiveContent.js
@@ -0,0 +1,292 @@
+//v1.7
+// Flash Player Version Detection
+// Detect Client Browser type
+// Copyright 2005-2007 Adobe Systems Incorporated. All rights reserved.
+var isIE = (navigator.appVersion.indexOf("MSIE") != -1) ? true : false;
+var isWin = (navigator.appVersion.toLowerCase().indexOf("win") != -1) ? true : false;
+var isOpera = (navigator.userAgent.indexOf("Opera") != -1) ? true : false;
+
+function ControlVersion()
+{
+ var version;
+ var axo;
+ var e;
+
+ // NOTE : new ActiveXObject(strFoo) throws an exception if strFoo isn't in the registry
+
+ try {
+ // version will be set for 7.X or greater players
+ axo = new ActiveXObject("ShockwaveFlash.ShockwaveFlash.7");
+ version = axo.GetVariable("$version");
+ } catch (e) {
+ }
+
+ if (!version)
+ {
+ try {
+ // version will be set for 6.X players only
+ axo = new ActiveXObject("ShockwaveFlash.ShockwaveFlash.6");
+
+ // installed player is some revision of 6.0
+ // GetVariable("$version") crashes for versions 6.0.22 through 6.0.29,
+ // so we have to be careful.
+
+ // default to the first public version
+ version = "WIN 6,0,21,0";
+
+ // throws if AllowScripAccess does not exist (introduced in 6.0r47)
+ axo.AllowScriptAccess = "always";
+
+ // safe to call for 6.0r47 or greater
+ version = axo.GetVariable("$version");
+
+ } catch (e) {
+ }
+ }
+
+ if (!version)
+ {
+ try {
+ // version will be set for 4.X or 5.X player
+ axo = new ActiveXObject("ShockwaveFlash.ShockwaveFlash.3");
+ version = axo.GetVariable("$version");
+ } catch (e) {
+ }
+ }
+
+ if (!version)
+ {
+ try {
+ // version will be set for 3.X player
+ axo = new ActiveXObject("ShockwaveFlash.ShockwaveFlash.3");
+ version = "WIN 3,0,18,0";
+ } catch (e) {
+ }
+ }
+
+ if (!version)
+ {
+ try {
+ // version will be set for 2.X player
+ axo = new ActiveXObject("ShockwaveFlash.ShockwaveFlash");
+ version = "WIN 2,0,0,11";
+ } catch (e) {
+ version = -1;
+ }
+ }
+
+ return version;
+}
+
+// JavaScript helper required to detect Flash Player PlugIn version information
+function GetSwfVer(){
+ // NS/Opera version >= 3 check for Flash plugin in plugin array
+ var flashVer = -1;
+
+ if (navigator.plugins != null && navigator.plugins.length > 0) {
+ if (navigator.plugins["Shockwave Flash 2.0"] || navigator.plugins["Shockwave Flash"]) {
+ var swVer2 = navigator.plugins["Shockwave Flash 2.0"] ? " 2.0" : "";
+ var flashDescription = navigator.plugins["Shockwave Flash" + swVer2].description;
+ var descArray = flashDescription.split(" ");
+ var tempArrayMajor = descArray[2].split(".");
+ var versionMajor = tempArrayMajor[0];
+ var versionMinor = tempArrayMajor[1];
+ var versionRevision = descArray[3];
+ if (versionRevision == "") {
+ versionRevision = descArray[4];
+ }
+ if (versionRevision[0] == "d") {
+ versionRevision = versionRevision.substring(1);
+ } else if (versionRevision[0] == "r") {
+ versionRevision = versionRevision.substring(1);
+ if (versionRevision.indexOf("d") > 0) {
+ versionRevision = versionRevision.substring(0, versionRevision.indexOf("d"));
+ }
+ }
+ var flashVer = versionMajor + "." + versionMinor + "." + versionRevision;
+ }
+ }
+ // MSN/WebTV 2.6 supports Flash 4
+ else if (navigator.userAgent.toLowerCase().indexOf("webtv/2.6") != -1) flashVer = 4;
+ // WebTV 2.5 supports Flash 3
+ else if (navigator.userAgent.toLowerCase().indexOf("webtv/2.5") != -1) flashVer = 3;
+ // older WebTV supports Flash 2
+ else if (navigator.userAgent.toLowerCase().indexOf("webtv") != -1) flashVer = 2;
+ else if ( isIE && isWin && !isOpera ) {
+ flashVer = ControlVersion();
+ }
+ return flashVer;
+}
+
+// When called with reqMajorVer, reqMinorVer, reqRevision returns true if that version or greater is available
+function DetectFlashVer(reqMajorVer, reqMinorVer, reqRevision)
+{
+ versionStr = GetSwfVer();
+ if (versionStr == -1 ) {
+ return false;
+ } else if (versionStr != 0) {
+ if(isIE && isWin && !isOpera) {
+ // Given "WIN 2,0,0,11"
+ tempArray = versionStr.split(" "); // ["WIN", "2,0,0,11"]
+ tempString = tempArray[1]; // "2,0,0,11"
+ versionArray = tempString.split(","); // ['2', '0', '0', '11']
+ } else {
+ versionArray = versionStr.split(".");
+ }
+ var versionMajor = versionArray[0];
+ var versionMinor = versionArray[1];
+ var versionRevision = versionArray[2];
+
+ // is the major.revision >= requested major.revision AND the minor version >= requested minor
+ if (versionMajor > parseFloat(reqMajorVer)) {
+ return true;
+ } else if (versionMajor == parseFloat(reqMajorVer)) {
+ if (versionMinor > parseFloat(reqMinorVer))
+ return true;
+ else if (versionMinor == parseFloat(reqMinorVer)) {
+ if (versionRevision >= parseFloat(reqRevision))
+ return true;
+ }
+ }
+ return false;
+ }
+}
+
+function AC_AddExtension(src, ext)
+{
+ if (src.indexOf('?') != -1)
+ return src.replace(/\?/, ext+'?');
+ else
+ return src + ext;
+}
+
+function AC_Generateobj(objAttrs, params, embedAttrs)
+{
+ var str = '';
+ if (isIE && isWin && !isOpera)
+ {
+ str += '';
+ }
+ else
+ {
+ str += '';
+ }
+
+ document.write(str);
+}
+
+function AC_FL_RunContent(){
+ var ret =
+ AC_GetArgs
+ ( arguments, ".swf", "movie", "clsid:d27cdb6e-ae6d-11cf-96b8-444553540000"
+ , "application/x-shockwave-flash"
+ );
+ AC_Generateobj(ret.objAttrs, ret.params, ret.embedAttrs);
+}
+
+function AC_SW_RunContent(){
+ var ret =
+ AC_GetArgs
+ ( arguments, ".dcr", "src", "clsid:166B1BCA-3F9C-11CF-8075-444553540000"
+ , null
+ );
+ AC_Generateobj(ret.objAttrs, ret.params, ret.embedAttrs);
+}
+
+function AC_GetArgs(args, ext, srcParamName, classid, mimeType){
+ var ret = new Object();
+ ret.embedAttrs = new Object();
+ ret.params = new Object();
+ ret.objAttrs = new Object();
+ for (var i=0; i < args.length; i=i+2){
+ var currArg = args[i].toLowerCase();
+
+ switch (currArg){
+ case "classid":
+ break;
+ case "pluginspage":
+ ret.embedAttrs[args[i]] = args[i+1];
+ break;
+ case "src":
+ case "movie":
+ args[i+1] = AC_AddExtension(args[i+1], ext);
+ ret.embedAttrs["src"] = args[i+1];
+ ret.params[srcParamName] = args[i+1];
+ break;
+ case "onafterupdate":
+ case "onbeforeupdate":
+ case "onblur":
+ case "oncellchange":
+ case "onclick":
+ case "ondblclick":
+ case "ondrag":
+ case "ondragend":
+ case "ondragenter":
+ case "ondragleave":
+ case "ondragover":
+ case "ondrop":
+ case "onfinish":
+ case "onfocus":
+ case "onhelp":
+ case "onmousedown":
+ case "onmouseup":
+ case "onmouseover":
+ case "onmousemove":
+ case "onmouseout":
+ case "onkeypress":
+ case "onkeydown":
+ case "onkeyup":
+ case "onload":
+ case "onlosecapture":
+ case "onpropertychange":
+ case "onreadystatechange":
+ case "onrowsdelete":
+ case "onrowenter":
+ case "onrowexit":
+ case "onrowsinserted":
+ case "onstart":
+ case "onscroll":
+ case "onbeforeeditfocus":
+ case "onactivate":
+ case "onbeforedeactivate":
+ case "ondeactivate":
+ case "type":
+ case "codebase":
+ case "id":
+ ret.objAttrs[args[i]] = args[i+1];
+ break;
+ case "width":
+ case "height":
+ case "align":
+ case "vspace":
+ case "hspace":
+ case "class":
+ case "title":
+ case "accesskey":
+ case "name":
+ case "tabindex":
+ ret.embedAttrs[args[i]] = ret.objAttrs[args[i]] = args[i+1];
+ break;
+ default:
+ ret.embedAttrs[args[i]] = ret.params[args[i]] = args[i+1];
+ }
+ }
+ ret.objAttrs["classid"] = classid;
+ if (mimeType) ret.embedAttrs["type"] = mimeType;
+ return ret;
+}
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/javascript/appConfig.js b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/javascript/appConfig.js
new file mode 100644
index 0000000..1fc6376
--- /dev/null
+++ b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/javascript/appConfig.js
@@ -0,0 +1,14 @@
+/*
+ * Application Configuration JavaScript file
+ * Contains config data used by all external web apps
+ *
+ * Changes made here should also be reflected in corresponding changes to browserVersion.js
+ */
+
+// Note that the userAgent reported versions are not always the same as the app-reported versions.
+// See http://www.useragentstring.com/pages/Safari/ for 'webkit'
+// See http://www.useragentstring.com/pages/Internet%20Explorer/ for 'msie'
+// and http://www.useragentstring.com/pages/Firefox/ for 'mozilla'
+// and http://www.useragentstring.com/pages/Chrome/ for 'chrome'
+// for a mapping of userAgent to browser version numbers.
+ var _pSUPPORTED_BROWSERS = {webkit:'537.71', msie:'9.0', mozilla:'33.0', chrome:'38.0.2125.101'};
diff --git a/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/javascript/browserVersions.js b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/javascript/browserVersions.js
new file mode 100644
index 0000000..4cfd622
--- /dev/null
+++ b/bmtk-vb/docs/autodocs/source/aibs_sphinx/static/external_assets/javascript/browserVersions.js
@@ -0,0 +1,28 @@
+// ----> NOTE: changes made here should be mirrored to their equivalent place in appConfig.js
+
+document.write("
Supported Platforms
\
+ This application has been tested with the following configurations. \
+ You may notice irregularities with software that has not been tested. \
+ There are known issues when viewing heat map data using unsupported browsers. \
+
"
+ ],
+ "text/plain": [
+ " gids timestamps\n",
+ "0 0 554.8\n",
+ "1 0 594.4\n",
+ "2 0 637.1\n",
+ "3 0 683.9\n",
+ "4 0 734.7\n",
+ "5 0 788.7\n",
+ "6 0 844.9\n",
+ "7 0 902.6\n",
+ "8 0 961.2\n",
+ "9 0 1020.4\n",
+ "10 0 1079.9\n",
+ "11 0 1139.7\n",
+ "12 0 1199.5\n",
+ "13 0 1259.5\n",
+ "14 0 1319.5\n",
+ "15 0 1379.5\n",
+ "16 0 1439.6\n",
+ "17 0 1499.7"
+ ]
+ },
+ "execution_count": 1,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "from bmtk.analyzer.spike_trains import to_dataframe\n",
+ "to_dataframe(config_file='simulation_config.json')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "When setting up the enviornment and config file we specified cell_vars=['v', 'cai']. This indicates to the simulator to also record membrane potential and calcium diffusion (and we can also specify other variables as long as they are supported in NEURON). The recordings are stored in hdf5 format in the file output/cellvars/0.h5 (0 for the gid of our first and only cell)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZgAAAEKCAYAAAAvlUMdAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xd4VFX6wPHvm04akNAJgUDoAhqKqIBdsQC6LordXQXL\nWlbX3dXd/bmuZXVX3aZY0LVgAfuKiKIooAICAQEhtFCTUBICpJBM6vn9MSVtyk2ZTMr7eR6ezNx7\n7p0zE3LeOV2MMSillFJNLSjQGVBKKdU2aYBRSinlFxpglFJK+YUGGKWUUn6hAUYppZRfaIBRSinl\nFxpglFJK+YUGGKWUUn6hAUYppZRfhAQ6A4HUpUsX069fv0BnQymlWpV169YdMcZ09ZWuXQeYfv36\nkZqaGuhsKKVUqyIi+6yk82sTmYhMFpHtIpIuIg+4OS8i8h/H+U0ikuLrWhGJE5GvRGSn42dnx/FQ\nEXlDRH4Ska0i8qA/35tSSinv/BZgRCQYmA1cBAwDrhaRYbWSXQQMdPybBbxg4doHgK+NMQOBrx3P\nAaYD4caYEcBo4FYR6eeXN6eUUsonf9ZgxgHpxpjdxphSYD4wrVaaacBcY/cD0ElEevq4dhrwhuPx\nG8BljscGiBKREKADUArk++m9KaWaSWFJOX9dtJUP12VSWel+9ffD+TYum72CX7y2hvX7jzVzDpUn\n/uyD6Q1kVHueCZxqIU1vH9d2N8YcdDw+BHR3PP4Ae/A5CEQC9xpjjtY302VlZWRmZmKz2ep7aasU\nERFBQkICoaGhgc6KaoeyC2y8uWofpw2I5/QBXdymmb9mP3O+3Q3A++sy+PeMU+geG1EjzZdbDrEh\n4zjhIUEs3Z7DLROS+N3kIYSF6EDZQGrVnfzGGCMizq8044AKoBfQGfhORJYYY3ZXv0ZEZmFvjiMx\nMbHOPTMzM4mJiaFfv36IiF/zH2jGGHJzc8nMzCQpKSnQ2VFtzImSclbuymV8/zhiItx/gXn1+728\nuHwXz36TzlVj+vDQlGFEhdcslvbmnqBjh1D+cPEQHl6QxiX/+Z7nrjmF8f3jXWmyC0oIElj7p/N4\nevF2Xvl+D2v3HuW5a1LoExfp1/epPPNneM8C+lR7nuA4ZiWNt2sPO5rRcPzMdhy/BvjCGFNmjMkG\nVgBjamfKGDPHGDPGGDOma9e6o+xsNhvx8fFtPrgAiAjx8fHtprammtefF2xh5txUzn56OV9sPuQ2\nTdbxYnp2jOCOswbw3roMpj73PdsPFdRIc6SglG4x4Vw1NpFP7jyD2A4hXPvKal5cvgvnhok5BSXE\nR4cTGxHKI9NO4sXrUth95ASX/Oc7lqQd9vt7Ve75M8CsBQaKSJKIhAEzgAW10iwAbnCMJhsP5Dma\nv7xduwC40fH4RuATx+P9wDkAIhIFjAe2NSTj7SG4OLWn96qazkvLd3HGk98we2k6peWVbtOs33eM\nvvGR9OwYwW1vrePhBVsoKa+okeZwno0+cZH8bvIQ3r7lVPKKy5k2+3s+WJfpSpNTWEKX6HAABnWP\nYcGdE5g8vAdPfr6NmXPXkVdURnZBCd1iwl3XTD6pJ5/dNZE+cZHcMjeVv32xjfIK9/lU/uO3AGOM\nKQfuBBYDW4H3jDFbROQ2EbnNkWwRsBtIB14G7vB2reOaJ4HzRWQncJ7jOdhHnUWLyBbsAeo1Y8wm\nf70/pdqq3MISNmYcx9t26v/9fg9Zx4t5avF2ps1ewa6cwjppcgpLOGtQVz68/XRunpDE6yv38vMX\nVrEv94QrzeECm6s/5fQBXVh0zwRO7tOJ+9/fyO8+2EhxaQVHCkvoWi14RIeH8Nw1p/DwlGEs35HN\nJc9+xzfbsmukAUiMj+TD20/n6nGJvLBsF9e+sprsfK2tNye/9oAZYxYZYwYZYwYYYx53HHvRGPOi\n47ExxvzKcX6EMSbV27WO47nGmHONMQONMec5O/KNMYXGmOnGmOHGmGHGmKf8+d4C4aGHHmLJkiV1\nji9btoxLL73U7TVPPPEEycnJDB48mMWLF/s7i6qVM8Zw1ZwfmDZ7BdNfXEV6dkGdNBWVhtwTpdx5\ndjJzrh/Nobxipjz7PR//WFXrKCmvoMBWTpfocMJCgvi/S4fx0vWj2Zd7gkv+8z0LNx3AGMOhPBs9\nYqsCQ7eYCN6+ZTx3nZPMe6mZXP78CvblFrlqME4iwk1nJPHerafhjIOxbvp5IkKDeeJnI/jHlaPY\nmHmci//zPat25TbRp6V80SEWrcgjjzzCeeedZzl9Wloa8+fPZ8uWLXzxxRfccccdVFRU+L5QtVm7\ncwpZmX7E43Df3BOlpGcXMqpPJ3blFHLps98zb83+GrWZY0WlVFQausaEc8HwHiy6ZyIn9erIve9W\n1TpyC0sBiK8WGC4c3oPP7p5Icrdo7nznR+6c9yMl5ZV1RoQFBwm/uWAwr/9iLJnHigHoEhPmNr+n\nJHZm4V0TuG58IjPG9XGbBuBnKQl88qsJjv6bH5i9NN3jZ6CajgaYFujRRx9l8ODBTJgwgauvvpqn\nn34agJtuuokPPvgAgC+++IIhQ4aQkpLCRx995PY+n3zyCTNmzCA8PJykpCSSk5NZs2ZNs70P1bKc\nKCln2uwVXPPKai5/fgVbDuTVSeMMDLMm9mfxrycxpm8cD370E796Zz15RWWAvUMdcNUqenbswDsz\nT+Wuc5J5f10m02ZX1RK6RNcMDH3iInn/ttO4dVJ/Pttkn21QO8A4nTW4G1/eO4lrT03k4pN6enxf\nnaPCeOyyER6HOTsN7mHvv7l4RE+eWrydW+amcryo1Os1qnFa9TBlf/vLp1tIO9C0czWH9Yrlz1OG\nezy/du1aPvzwQzZu3EhZWRkpKSmMHj26RhqbzcbMmTP55ptvSE5O5qqrrnJ7r6ysLMaPH+96npCQ\nQFZW7YF8qq0oLq2g0pg6w3ydMo4VUWAr57yh3diQkcdls1fwuwuHcPOEJIKC7IM9jhQ6g0cY3WIj\nmPvLccz5bjdPL97Ohv3f8q8Zp7g66qv3eYQEB/GbCwYzLimOe9/dwG/e32i/T61+EYDQ4CAevHgo\n4wfE8/qKvTWGG9fWq1MHHr98RMM+EDeiw0N49upTGJcUx6ML7UOeX7guhZEJnZrsNVQVrcG0MCtW\nrGDatGlEREQQExPDlClT6qTZtm0bSUlJDBw4EBHhuuuuC0BOVUtyoqScs55eyimPfsVfF22lqLS8\nTpojBfZv6zMn9ufLeydx1uBuPL5oKze+tsbV+e0MMM6mraAg4bYzB/Dh7acTFhLEjDmrePCjnwDq\ndKoDTBzYlUX3TGRcUhxhIUH06ex5DsrZg7vxxi/Hub2PP4kIN5zWj/dvOx2An7+wile+261NZn6g\nNRgvvNU0WoPevXuTkVG1IEJmZia9e/cOYI6Uv6zde5TD+SUM7RnLnG93s3jLIZ6ZPoox/eJcaXJP\nOGonMeHERYUx5/rRzFuTwSMLtzD539/xxM9GuJrIajdtjerTiYV3T+ThBVtcQ4i7eQgM3WIieHfW\neApKyt12vLcUJ/fpxMK7JvC7Dzfx2GdbWbY9h2euHOWxyU7Vn9ZgWpgzzjiDTz/9FJvNRmFhIQsX\nLqyTZsiQIezdu5ddu3YBMG/ePLf3mjp1KvPnz6ekpIQ9e/awc+dOxo0b59f8K//4ZEMW89bsd1sz\ngap+kRevS2HezPFUVBqmv7SKJz7f6mrScvWdRNkDg4hwzamJLLxrIr06RXDrm+t4ZGEa4H5EVnR4\nCE9PH8Xsa1K4+5xkj01xznu35ODi1NkRaJ/42QjW7TvGhf/61uOkUFV/GmBamLFjxzJ16lRGjhzJ\nRRddxIgRI+jYsWONNBEREcyZM4dLLrmElJQUunXr5vZew4cP58orr2TYsGFMnjyZ2bNnExwc3Bxv\nQzWhbYfyuWf+Bh786CfOfWY5X2w+WCfN0RP2mkdcVBinDYjni19PYsbYPry0fDdTn7V36B8pLCU0\nWIjtUDMwJHeL5uM7zuDucwcCEBYc5OqTceeSkT2574LBTfgOA0tEuHpcIgvvnkCfzpHc9tY6Hvhw\nEydK3AdzZZ14m0zV1o0ZM8bU3nBs69atDB06NEA5sissLCQ6OpqioiImTZrEnDlzSElJ8X1hA7WE\n96w8+yrtMDPnpvKnS4by4fosth7M55IRPfnLtOGukVxPLNrKayv2sv2xyTVWZ1i6LZvff7iJoydK\nKa80dIkOJ/VPnoe6px3Ip6S8glMSO/v9fbVEpeWV/HPJDl5cvot+8VH866qTGdVHBwDUJiLrjDF1\nluKqTWswLdCsWbM4+eSTSUlJ4YorrvBrcFGBt3jLIT7+MRNbmfs5Ss6O94tG9GTBnWfw2wsH81Xa\nYS7457d8utE+YTH3RClxUWF1lv45e0g3Fv96EpNP6gFAVLj3GuywXrHtNrgAhIUE8fvJQ3jnlvGU\nlFXwsxdW8tTibXWWuFHWaCd/C/TOO+8EOguqmWw7lM+tb64D4G+fb+fhqcO4cHiPGoHiiKPvJD4q\njNDgIH51djLnD+vOb9/fyF3zfuSzTQc5mFdMXJT7yYido8J47poULj/lMLEdWn6/SEtw2oB4Pv/1\nJB5bmMbspbtYkpbN09NHMSKho++LlYvWYNxoT82G7em9tkSbMu2THf96+Qg6R4Vx21vrmTk3lQPH\ni11pjhSWEBMRQkRoVe1jUPcYPrz9dB64aAjfbM9mY2YeoT72Pjl3aHfGVhtVprzr2CGUp6aP4rWb\nxnK8uJTLnl/BM19u97i4p6pLA0wtERER5ObmtouC17kfTESEDssMlGOOzvlpJ/fi0zvP4I8XD2VF\nei7n/2M5c1ftpbLScKSwlK7RdYcEhwQHcduZA1h090TOHNSVaaN6NXPu24ezh3Tjy1+fyWUn9+bZ\nb9KZ+tz3bM6quwqCqks7+Wt18uuOlqqpVFQa7nh7HXuPFHHV2D5cOz6R8JCafSBPLNrK6yv3su3R\nqs75jKNF/OHjn/hu5xFG9+3MniMnSO4azXu3nRaIt6Gq+XrrYR786CeOnihl5qT+3HPuwBo1y/bC\naie/9sHUEhoaqrs7qiaxK6eQxVsOExkWzCML03h95V4enjqMc4Z0d6Vx1znfJy6Sub8cx/82ZPHI\np2kcKyqjY1/9AtASnDu0O1/e25nHP9vKC8t2seingzx+2QgmDPS+Dlp7pU1kSvmJc/TXf28cy9xf\njiMsJIhfvp7KzLmpZBwtAuxNZJ0j63bOiwiXn5LAkvvO5KbT+3Hd+L7NmnflWafIMJ6aPop3Zp5K\nkAjX/Xc19767gVzH71tV0QCjlJ9UX3Zl0qCuLLp7Ig9cNIQV6Uc4/5/Lee6bnRwusHkc/QX2NcEe\nnjqcMwfV3d5bBdbpA7rw+T0TufucZBZuOsC5/1jO+6kZ7aL/1iq/BhgRmSwi20UkXUQecHNeROQ/\njvObRCTF17UiEiciX4nITsfPztXOjRSRVSKyRUR+EhHtvVZ+89mmg9z6Zirz1ux3O4clt9bCkWEh\n9k75JfedyblDuvP0lzvYnJXfLtvw24qI0GDuu2Awi+6eSHLXaH77wSaufvkHdhyuu1Fbe+S3ACMi\nwdi3Mb4IGAZcLSLDaiW7CBjo+DcLeMHCtQ8AXxtjBgJfO54jIiHAW8BtxpjhwFlAmb/en2rfjDH8\n4eOfWLzlsGsJF+ekR6fcE6UECXSqNfekV6cOzL42hTdvHseI3h05c7DWTlq7gd1jeO/W0/jr5SPY\nerCAi/79HY98mka+rX0XQf6swYwD0o0xu40xpcB8YFqtNNOAuY6tk38AOolITx/XTgPecDx+A7jM\n8fgCYJMxZiO4tlbW6bfKL06UVpBXXMaDFw3hzZvHEdshlLvm/cj0F1exMeM4AEcK7R34ntb1mjiw\nK5/eNYHrtX+lTQgKsi8euvT+s7hqbB9eW7mHc55exgfrMtvtVgA+A4yIJIjI/SLyiYisFZFvReR5\nEblERLxd3xvIqPY803HMShpv13Y3xjhX+zsEOIfkDAKMiCwWkfUi8jtf702phnLOX+kcFcbEgV1Z\neNcEnvzZCPbmnmDa7BXc994G0g7kER/VvHudqMCLiwrjr5ePYMGvJpAYF8n972/kihdX8lNm+5s7\n4zXAiMhrwKtAKfA34GrgDmAJMBn4XkQm+TuTnhh7e4Tzq0EIMAG41vHzchE5t/Y1IjJLRFJFJDUn\nJ6f5MqvalGOOrXbjHCPAgoOEGePs315vO3MACzceZGNmHtEROhOgvRqR0JEPbjudZ6aPIuNoMVNn\nf88DH24iu6B9zLED3/NgnjHGbHZzfDPwkYiEAYkers0C+lR7nuA4ZiVNqJdrD4tIT2PMQUdzWrbj\neCbwrTHmCICILAJSsPfTuBhj5gBzwD7R0kPeVTt3KM/Gfe9tIDo8hOlj+nDe0G415qocddVgavav\nxESE8sBFQ7j21ET+8/VOxiXp0iztWVCQcMXoBM4f3p1/L9nJGyv3smDjAW47cwC3TEwiMqxtfwHx\nWoPxEFyqny81xqR7OL0WGCgiSY5ANANYUCvNAuAGx2iy8UCeo/nL27ULgBsdj28EPnE8XgyMEJFI\nR4f/mUCat/wr5cm8NftZuSuX9fuPM3NuKlOfW8G3O3JcnfjHi+ydt+7msIB9suRT00cxfUwft+dV\n+xIbEcr/XTqMr+47kzMHdeUfX+3g7KeX8V5qBhVtuH/Ga/gUkU2eTmFvoRrp6VpjTLmI3Im94A8G\nXjXGbBGR2xznXwQWARcD6UAR8Atv1zpu/STwnojcDOwDrnRcc0xE/oE9OBlgkTHmM18fgFLuHMqz\n0S0mnJUPnMP/Nhzgn1/t4IZX13BqUhy/vXBwjQ2+lLIqqUsUL1w3mrV7j/LYZ1v53QebeG3FXv54\n8dA2uRqA17XIRGQD9sL6HeBToLj6eWPMPr/mzs/crUWmFMCsuansyy1i8b32LsaS8grmr8ng2W/S\nXTP0AXb/9WKvuz8q5YkxhoWbDvK3L7aReayYMwd15bcXDuak3i1/S4Am2XDMGHMy9o79aOxB5nFg\nOJDV2oOLUt4cLyqjU2RV/0p4SDA3nt6Pb393Fr+fPASAxLhIDS6qwUSEKaN68fVvzuQPFw9hQ8Zx\nLn32e25/ax0728hEzXqtpiwiV2GfAPk3Y8xTfstVM9EaTPtWVlFJaLD771gX/HM5SV2ieOl691/S\nCkvKsZVVuLYsVqqx8m1lvPLdHv773W6Kyyq47OTe/Pq8QSTGRwY6a3U02WrKItIbeyf75cAx4F7g\n40bnUKkA+mh9Jr/9YBPdYsK5IiWBG07vS7eYqpWFjhWVkeKhAx8gOjyE6PC2PQJINa/YiFDuO38Q\nN53ejxeX73KNOLtqbB/uOmcgPTq2vpWvfHXyLwdigPewd8DnOk6FiUicMeaon/OnlF98/GMW0eEh\nDO0Zy/PL0nn5u93MGNuHWWcOoFfHCI4XldLJS4BRyl/iosL4w8VDuXlCEs99k878tft5f10mV4/t\nw61nDqBXpw6BzqJlvr6C9cXeyX8r9rXCwD6CDMfx/n7Kl1J+dayolJTETrx601j2HjnBC8t28c6a\n/by9ej8XDu9BWYWhc6TuwaICp3tsBI9edhKzJvXnuW/SeXv1ft5Zs5+fj07g9jOTW2TTWW1eA4wx\npl8z5UOpZnXsRBmDuscA0K9LFH/7+UjuPm8gc5bv4o1V9vEr2r+iWoI+cZGu/58vLtvFu6kZvJea\nybSTe3HHWckkd4sOdBY9stzJLyIjgX5UC0rGmI/8k63moZ387dewh77g6nGJ/N+ltRf4ti+z/822\nbKaM6qVL6asW53C+jTnf7ubt1fsoKa/kkhE9ueOsZIb1im22PDTplski8iowEtgCVDoOG6BVBxjV\nPtnKKigqrfA4STI+Olxn4KsWq3tsBP936TBuP2sA//1+D3NX7mXhpoNMHNiFmRP7M3FglxrLGgWS\n1WEw440xdb/qKdWC7cs9QVFpBYO7x9SYr+JrmRelWoMu0eH8fvIQbps0gLfX7OO1FXu54dU1DOkR\nw6xJ/bl0ZC/CQgK7abHVALNKRIYZY3RtL9UqrNt3jOkvrqTS2P8Qp49J4JpxifSJi6xaqFI78VUb\n0DEylDvOSubmCUks2HCAl7/bzX3vbeTvX2znF2f04+pTE4mNCMz/dasBZi72IHMIKMHCWmRKBdLS\nbdlUGvjbFSP4Ki2bl5bv4sXluzhrUFf6dYkC7Hu5KNVWhIcEM31MH34+OoFlO3J4+dvdPPH5Np79\nJp2fj07gxtP7keT4v99crAaY/wLXAz9R1QejVIt1rMi+m+RVYxO5amwiB44XM3/NfuavzWDpdvs+\nQPEaYFQbJCKcPbgbZw/uxuasPF75zj4g4PWVezlrcFduPL0fZw7s2izLHFkaRSYiq4wxp/k9N81M\nR5G1XXe+s54tB/JZev9ZNY6XVVTy9dZsdh4u4FdnJ+taYqpdyC6wMW91Bm+t3kdOQQlJXaK4dVJ/\nZozztJ2Xd006igz4UUScKyq7lpJt7cOUVduVV1xGbIe67c6hwUFMPqkHk0/qEYBcKRUY3WIiuOe8\ngdx+1gA+33yQ11fuZdsh/y+oaTXAdMAeWC6odkyHKasWK7+4TJd6UaqWsJAgpp3cm2kn96a03P+9\nHZYCjDHmF/7OiFJNKa+4jL7xzduhqVRr0hxDmL2+goj8SUQ8biouIueIyKVNny2lfDPGsG7fMZZt\nzya32iZgAMeLy+jopolMKdV8fNVgfgI+FREbsB7IASKAgcDJwBLgr54uFpHJwL+xb3v8ijHmyVrn\nxXH+YuxbJt9kjFnv7VpHwHsX+7I1e4ErjTHHqt0zEUgDHjbGPO3zE1Ct1qebDnL3vB8BCBI4NSme\nS0b25KKTepCvAUapgPO1o+UnxpgzgNuwLxMTDOQDbwHjjDH3GmNy3F0rIsHYNye7CBgGXC0itVcD\nuAh7sBqIfbXmFyxc+wDwtTFmIPC143l1/wA+9/G+VRuwdFs2nSNDmT9rPHeenczhAht/+t9mRj+2\nhEpDjR0plVLNz2ofzE5gZz3vPQ5IN8bsBhCR+cA07LULp2nAXGMfK/2DiHQSkZ7Yayeerp0GnOW4\n/g1gGfB7R7rLgD3AiXrmVbVCecVl9O7cgfH94xnfP557zx/E1oMFfLrpAN/vPMK4JI+tu0qpZuDP\nLfl6AxnVnmcCp1pI09vHtd2NMQcdjw8B3QFEJBp7oDkfuN9TpkRkFo69bRITGzYGXLUMtZvBRIRh\nvWIZ1iuW308OYMaUUoCPJrKWzlHzcc4UfRj4pzGm0Mc1c4wxY4wxY7p27ervLCo/yisuC9gaS0op\n3/xZg8kCqq95nuA4ZiVNqJdrD4tIT2PMQUdzWrbj+KnAz0Xk70AnoFJEbMaY55rk3agWJ9+mAUap\nlsxrgBGR3xlj/i4iz1JVU3Axxtzt5fK1wEARScIeHGYA19RKswC409HHciqQ5wgcOV6uXQDcCDzp\n+PmJIy8Tq+X7YaBQg0vbll9cTmwHf35HUko1hq+/zq2On/VesMsYUy4idwKLsY8+e9UYs0VEbnOc\nfxFYhH2Icjr2Ycq/8Hat49ZPAu+JyM3APuDK+uZNtX6l5ZUUl1XoUGSlWjCvAcYY86nj5xsNubkx\nZhH2IFL92IvVHhvgV1avdRzPBc718boPNyC7qgU7lGej0hh6doxARMi32TcNc7femFKqZbC6ZXJX\n7CO0hmGfaAmAMeYcP+VLKZdHF6bx3+/3ABAXFcbovp3p79jXQvtglGq5rDZgv4199vwl2Cdd3oh9\nVr9SflVgK+ONlXs5f1h3Jg3qysaM46TuPcpXaYcB6NExwscdlFKBYjXAxBtj/isi9xhjlgPLRWSt\nPzOmFNibxsorDVNG9WLqqF5cP74vAIfzbew5coJTdTKlUi2W1QBT5vh5UEQuAQ4A+pet/K6gpByA\nmIia/1W7x0bQPVZrL0q1ZFYDzGMi0hH4DfAsEAvc67dcKeVQYLMHmNgIHY6sVGtjdS2yhY6HecDZ\n/suOUjUVOEaLxWhnvlKtjqWlYkTkDRHpVO15ZxF51X/ZUsqu0FGDiQ7XGoxSrY3VtchGGmOOO584\n9l85xT9ZUqqKs4msdh+MUqrlsxpggkSks/OJY9Mv/YtXfldgK0MEosL0v5tSrY3Vv9pngFUi8r7j\n+XTgcf9kSakqBSXlRIeFEBQkgc6KUqqerHbyzxWRVMA5c/9nxpg0b9co1RQKbOXaPKZUK2X5L9cR\nUDSoKL8xxvDPr3bw4fosRKBHbAQZx4roHBkW6KwppRqgVW84ptqWBRsP8J9v0knuFs2Yvp0JDQ4i\nIjSYMwfpxnBKtUba9qBajI9/zCIxLpLXbhqrfS5KtQFag1EtRtqBfMYlxWlwUaqN0ACjWoTCknKy\nC0ro3zUq0FlRSjURvwYYEZksIttFJF1EHnBzXkTkP47zm0Qkxde1IhInIl+JyE7Hz86O4+eLyDoR\n+cnxU/eqaUXyiu1LwnSJCg9wTpRSTcVvAUZEgoHZwEXYNyq7WkSG1Up2ETDQ8W8W8IKFax8AvjbG\nDAS+djwHOAJMMcaMwL5fzZt+emvKD4pLKwCICAsOcE6UUk3FnzWYcUC6MWa3MaYUmA9Mq5VmGjDX\n2P0AdBKRnj6unQY4t3B+A7gMwBjzozHmgOP4FqCDiOjX4VbCVmYPMB1CNcAo1Vb4M8D0BjKqPc90\nHLOSxtu13Y0xBx2PDwHd3bz2FcB6Y0xJw7KumluxBhil2pxWPUzZGGNExFQ/JiLDgb8BF7i7RkRm\nYW+OIzEx0e95VNY4m8g6hOm4E6XaCn/+NWcBfao9T3Acs5LG27WHHc1oOH5mOxOJSALwMXCDMWaX\nu0wZY+YYY8YYY8Z07aoT+FoKZw0mQmswSrUZ/gwwa4GBIpIkImHADGBBrTQLgBsco8nGA3mO5i9v\n1y7A3omP4+cnAI79aj4DHjDGrPDj+1J+oH0wSrU9fmsiM8aUi8idwGIgGHjVGLNFRG5znH8RWARc\nDKQDRcAvvF3ruPWTwHsicjOwD7jScfxOIBl4SEQechy7wBjjquGolquqiUwDjFJthV/7YIwxi7AH\nkerHXqzcrLa5AAAgAElEQVT22AC/snqt43gucK6b448BjzUyyypAtJNfqbZHe1RVi6B9MEq1PRpg\nVItgK61ABMJD9L+kUm2F/jWrFqG4rIIOocGI6EKXSrUVGmBUi+AMMEqptkMDjGoRiksrtf9FqTZG\nA4xqEWxlFTpEWak2plUvFRMoFZXGNerJPtLazvnIVF+8xlR/aOqcr5HUcaLmsbrX4/H6Wuk8vFaN\nPNdYaMfzazY0z/i83v5zy4E8OnYIrZsZpVSrpQGmATZn5TFtti4W0NTOHqxL9yjVlmiAaYCenSL4\n48VDXc/dDXyqPhpKahx3d0y8nsfNvaq/puD9erdpaxyrmxe3efbxmjWzXP88n5LYqe6NlFKtlgaY\nBugWE8HMSf0DnQ2llGrRtJNfKaWUX2iAUUop5Rdi3A0jaidEJAf7iswN1QU40kTZaUqar/rRfNWP\n5qt+2mK++hpjfI7KadcBprFEJNUYMybQ+ahN81U/mq/60XzVT3vOlzaRKaWU8gsNMEoppfxCA0zj\nzAl0BjzQfNWP5qt+NF/1027zpX0wSiml/EJrMEoppfxCA4xSSim/0ACjlFLKLzTAKKWU8gsNMEop\npfxCA4xSSim/0ACjlFLKLzTAKKWU8gsNMEoppfxCA4xSSim/0ACjlFLKLzTAKKWU8gsNMEoppfxC\nA4xSSim/CAl0BgKpS5cupl+/foHOhlJKtSrr1q07Yozp6itduw4w/fr1IzU1NdDZUEqpVkVE9llJ\np01kSiml/EIDjGoRcgpKyC0sCXQ2lFJNSAOMahHGPr6E0Y8tqXN8/f5jrN9/rM5xYwzr9h3F05bf\necVlHs8ppZpHu+6DUS3fz55fCcDeJy+pcXzBxgPcM38D/7xqFJefklDjXMbRIib+fSl/umQot0zs\nX+ee+bYyFm8+xPQxfdy+Zm5hCeOf+Jr5s8Yzum+c2zSfbMhi3b5jPDLtpIa8rXqprDSIgIh4TJNv\nKyMsOIiI0GCPaVbvziUoSBjbz/17sqrAVsbh/BKSu0V7TFNSXsHWgwWc3KdTo16rpSkrKyMzMxOb\nzRborDSLiIgIEhISCA0NbdD1GmBUq7TnyAn7z5wTdc5lHCsC4Ku0w24DzIMf/sRnPx1kcI8YRibU\nLQDX7DlKWYVhzre7eel694XxPfM3AHgNMLe8sZbBPWL47YVDPKY5/YmvmXJyLx68aKjHNP3/sIgL\nhnVnzg1jPKYZ+fCXJHTuwPe/P8djmqvm/ADUDdbVzV21l7QD+Tx5xUiPaa7/7xo2ZBz3ep+/fJrG\nO6v3s+z+s+jXJcptmqzjxVz54irevXU8CZ0j3aYxxvD8sl1MH51At9gIj6/X74HPuH58Xx69zL8B\nPzMzk5iYGPr16+c14LcFxhhyc3PJzMwkKSmpQffQJjLVKgn2P253jWDezgFkF9i/fRaXVri/dxOV\nG0u2ZjN76S6vaQ7k2Xhp+W6f9/oy7bDPNJnHii3nzZOHPtnC/LUZXtNsyDju8z6bs/IAOF5c5jHN\n+6kZZB0v5r3UTI9pthzI56nF27lr3o8+X/PNHywNbGoUm81GfHx8mw8uYK8xx8fHN6q2pgFGtUrO\nv2933Sy+/vZ9BSAn7cJpOOevwFI/mJc0FZX2c0UevgwEQnsILk6Nfa8aYFT74/Nvpv0UIH5joWAS\nC59zOyrLG+2hhx5iyZK6A2WWLVvGpZde6vaaJ554guTkZAYPHszixYubPE/aB6ParkbWQLQC03hW\nPkNrafS34csjjzxSr/RpaWnMnz+fLVu2cODAAc477zx27NhBcLDngSL1pTUY1Sq5mmDcFDzezlXn\nqWXGW/ObsqaqicxLGq2dNMijjz7K4MGDmTBhAldffTVPP/00ADfddBMffPABAF988QVDhgwhJSWF\njz76yO19PvnkE2bMmEF4eDhJSUkkJyezZs2aJs2r1mBUq+S9D8Z7yeUrAFVdrRGmoap+BW33M/zL\np1tIO5DfpPcc1iuWP08Z7vH82rVr+fDDD9m4cSNlZWWkpKQwevToGmlsNhszZ87km2++ITk5mauu\nusrtvbKyshg/frzreUJCAllZWU3zRhy0BqNaJSudj75qKMp/muojttJP056sWLGCadOmERERQUxM\nDFOmTKmTZtu2bSQlJTFw4EBEhOuuuy4AObXTGoxq1dwOUxbP53xerCwzxvgM9I0cRFavNM3NW02j\nNejduzcZGVVD0jMzM+ndu3eTvobWYFSr5raJzMc1vr4VOwvNlliotSTe+1d8DwW3UjfR2mZNZ5xx\nBp9++ik2m43CwkIWLlxYJ82QIUPYu3cvu3bZ52DNmzfP7b2mTp3K/PnzKSkpYc+ePezcuZNx48Y1\naX61BqNaJSsFj685GJ7OaplmTVPFXx0hZt3YsWOZOnUqI0eOpHv37owYMYKOHTvWSBMREcGcOXO4\n5JJLiIyMZOLEiRQUFNS51/Dhw7nyyisZNmwYISEhzJ49u0lHkIEGGNUG+ZxoaTGCaLHnnT2ANzwc\na+2kYe6//34efvhhioqKmDRpkquT//XXX3elmTx5Mtu2bfN5rz/+8Y/88Y9/9FdWNcCo1s3bt9+G\nztSvGqGmIcYbK81fTfUR6q+iyqxZs0hLS8Nms3HjjTeSkpIS6Cx5pAFGtUqufhS3BY+vPhbnpR6G\nKVsdJNDONeccF/1dVHnnnXcCnQXLtJNftUpWgoB+6/UvK30nVmqBOhmz7dIAo1olb+WO1kCah9fA\nYGFBUSsjzVqi9tR02tj32uYCjIhMFpHtIpIuIg8EOj/Kv9z9AVgdpuyxD8bHeWVBPZbbsRSoWsgv\nIyIigtzc3BaTH39y7gcTEeF5Hx5f2lQfjIgEA7OB84FMYK2ILDDGpAU2Z6qpWVovzMNJnzUcrQE1\nWn3muLSmYcoJCQlkZmaSk5MT6Kw0C+eOlg3VpgIMMA5IN8bsBhCR+cA0QANMG+NtsmRjm1602d8a\nS7UTL78F7wM17IIcbSyVLaTGEBoa2uDdHdujttZE1huovh1fpuOYi4jMEpFUEUltL99C2pumChDt\noRmkMbwGj6oVRX2m8T7cWZsrW7O2FmB8MsbMMcaMMcaM6dq1a6CzoxqpMeWOpwDSnnYsbAwrfSfe\nBFmYb6QDNlq3thZgsoA+1Z4nOI6pNsZKH4zniZQaQJpCYzcTs1I7sRKEVMvV1gLMWmCgiCSJSBgw\nA1gQ4DwpP3K74ZjFzmMtshrHUs2jkU1kWBjurFquNtXJb4wpF5E7gcVAMPCqMWZLgLOl/MBbLcTn\nasnOBx5KLee35pbSsdxSeZ/jUo/7WJloqb+KVqlNBRgAY8wiYFGg86GaR8OayLzfM0iX67ek0aPI\nXKP9PKdx/i402LdOba2JTLUTlmby+yiTfG2ZrGWae5ZGiFnoX6nP56y/itZJA4xqlRqz4rHPGkor\nnAAYCFaGKVtpRmvsemWq5dIAo1olK03zns5V9bF4Oi9ez7d3TVXDs/Q7dLyIBvvWqdF9MCLSDTgD\n6AUUA5uBVGNMZWPvrZQnQUGeayG+Nxzz3q5fVcOxtlqwDntuGOfv0Er/itZgWqcGBxgRORt4AIgD\nfgSygQjgMmCAiHwAPGOMyW+KjCpVna8gAV4mUvo676OGU12lgeB2Gl8szYPxNpTZlcbKfSxlSbUw\njanBXAzMNMbsr31CREKAS7EvOvlhI15DKbe8NXP5Gqbsqw/GVwCqrtIYgtvZ6mVBIlQa4zW4W1oP\nzkIa5zmdaNk6NSbAPGOMOeTuhDGmHPhfI+6tlFfelnF3LpDoqUyqWkDRw73rsVhmWx0+663pzx6g\nvQcYKzPwq9JYyI+3c230d9AWNKaTf4OILBGRm0WkU5PlSCkLvBVOzhpKhccmMu/zL3wNAqiurZZt\nlmbge0kT7GzC9NITK5a6+e3aaiBv6xoTYHoDTwETgO0i8omIzBCRDk2TNaU8szIBz+M5HwWk1KOT\nv60WfN7elZXPvqofq3HLyTjPtdGPuc1rcIAxxlQYYxYbY36BfYHJV7HvvbJHRN5uqgwq5ZaVWobP\nYcjeazDtufPZSvNXhZcP30ozY1NNtGyrv4O2oEnmwRhjSrFv6rUVyAeGNsV9lfKkqnGl/jUYXwHE\n2XRjpXbSZmswXlc4tr4KcqXXIOR4LQtNZNrP0jo1KsCISB8R+a2IrAcWOu431RiT0iS5U8qDIAtT\nxT124rvOu0/gLPCslGltdTJmY5u/gl1zXDy/RlUg95YT678L1fI0Zh7MSuz9MO9hH668rslypZQP\n3rbSdR7yPZHS+2s0dp5HW+WcIOm1icxKLbCpRpH5vlwFSGOGKT8AfGfa41+YCjgr3359deL7at6y\n1snvM0mr5L0PxkLtxEonv+OnNpG1XQ0OMMaYbwFEJAm4C+hX/X7GmKmNzZxSnlhZTNFT4WZ1tWXt\ng3HPyhyXYC9L+VTdx3ca1ygyz0lUC9YU+8H8D/gv8Cmg64+pZuFtJJjzG7HHAFMrXZ3rXU1svvPR\nVgOMlVn6nuYZge9+Lvt97D8bu5qy1m5arqYIMDZjzH+a4D5KWWZlQUpfG4757INpxxuVeJ8HY//p\nbRKlldqJtS2THa+lQaRVaooA828R+TPwJVDiPGiMWd8E91bKLbFQyHmuwVhbCsZawWchUSvkbS10\naxMtfddyLDWR1XmgWpOmCDAjgOuBc6hqIjOO50r5hasfwOs8GPfHrddgfOejrX6zbuw2xlb6aape\ny0p+Gne9CoymCDDTgf6OyZZKNQvxMpLJ1zBlXxP8nEfbcye/tRFintNYayLTJXnauqaYyb8Z0MUu\nVbOysqS+51PeC7/6dBq31XLP2jBlL2kcJYuVgQBWloFpq59zW9cUNZhOwDYRWUvNPhgdpqz8xlsN\nxsnTMiW+Opfr06/SVr9ZN3aYspXfj1iJMK4kDfkioQKtKQLMn5vgHkp55Wl/Enc7lvhsIqudsO4d\nvJ+upjV28lvZ5tn7Pi7Omfyer7cUhOq15pvPJKoFasxSMWLslvtK09DXUMrJmGrfeKkquBrS+eur\nBlM1ua9tzjCv/Vm6TePlnJVZ+vUapux1FJnvmZZWfk8qMBrTB7NURO4SkcTqB0UkTETOEZE3gBsb\nlz3V3ngqsGsXZo2ZOyG++mBcebHyGhYy0sJ46xdxanQfjKuWY6UPxspIs1b4QatGNZFNBn4JzHMs\nF3Mc6IA9aH0J/MsY82Pjs6jaE0/frmuXU147910z+d2f9zWDvD4dy62xBtPYJilXgPG2W6WFWo6T\nt6Y2K/lRLVdj1iKzAc8Dz4tIKNAFKDbGHG+qzKn2p9IYgtz0rNQuqBqyyKWTr77l+gSN1ljwWXl7\nFRWNq+U4T3nr63FeXeElUjlPeasJtcIY32401YZjZcaYg/4OLiLysIhkicgGx7+Lq517UETSRWS7\niFzoz3wo//HUfFP7sLdCxVthBNXnX7g/X5/yytdreROo2o+VPJd6qVY4A4uV2kmQl74eZz68jgTU\nOTKtWlOMImtu/zTGPF39gIgMA2YAw4FewBIRGWSMqQhEBlXDeSor6tZgPBcqziDlox/b84ZjjsNW\nCuLGFG6WFtP0QxXJSp7LLLRbea/BWGka8x2oyi28fytpVGA0SQ2mBZgGzDfGlBhj9gDpwLgA50k1\ngKdC3VOAcVeQOQtlT60zzuOemm+c97bSGe7tm76Tp8K23FsnhoOVPFh5rRr3tFKDKfecN+fHVual\nGU0sdPKXWwgwVvJqpTlPBUajA4yI3CcivZsiMxbdJSKbRORVEensONYbyKiWJtNxTLUynr6N1i5o\nqpppPKcVH3UYT7WDknJ7xddb847zXJmXgtjJ03uyEF/q3QTnrdB3shIUvdVgnJ381tJ4Cx6Vjp+N\nCzBag2m5mqIGEwN8KSLficidItK9MTcTkSUistnNv2nAC0B/4GTgIPBMA+4/S0RSRSQ1JyenMVlV\nfuDpm3PtgqqkzJ4u2E0UcBagIR4ihDPweCponfcO8tJB7SzTPBWg1WsSngpib7UEV14spKkeKK00\nbVl5XW9ByFk7sVLL8ZbG+dl5iw/ljQyGKrAa3QdjjPkL8BcRGQlcBSwXkUxjzHkNvJ+l60TkZWCh\n42kW0Kfa6QTHMXf3nwPMARgzZox+9WlhPBUWtY8XldprGe4CTKGtHIDw0GC39yqtqPD6Ws5C3VuA\n8ZXf6oGhrNxAWN00BSVlrseeZtcXlpS7HldUGrfv90RpVZqmCjDeah7OGp63+xQ58uT8rN0pcPye\nOnj4PQHk28o8nqt9H9XyNGUfTDZwCMgFujXhfV1EpGe1p5djX2gTYAEwQ0TCHXNyBgJr/JEH1fSq\nf0utXmhVbx4pr1Xg5RXbC56wkLr/hZ2FUribcwDHTtjPeyqMjxTal9SLifD9/cvTN/2cAteyfJR4\nKGSPnqhagNxTgX6sRhr3r1X9Pp7yc6JaoPKUpkaty0vwcAZwb8Est9CeJ29B6FiRPY27oOl01PG7\nigj1XFTlnijxeE4FVqNrMCJyB3Al0BV4H5hpjElr7H09+LuInIx9JOle4FYAY8wWEXkPSAPKgV/p\nCLLW42CezfW4eqGVXVB1vHahmHW82P7ATbm8L7cIgLgoN9UGYG/uCcdruS/U9xyxn4/w8M26eq3C\nUyG723EPb6+Tnl1Y4z7uguXO7ALX49KKSrd52nm46j6eCvSd1V+r3H1+DudXFdTeapLZjuDprfnO\n+XreakLbDxV4fS2AHYftaeKjwj2/VrX3r1qWphim3Af4tTFmQxPcyytjzPVezj0OPO7vPKimV72g\nrV5obcrMcz2uXQhtyLBPuXL3bXzdvmOA+w7i40WlroLNXWFsjGHlrlzA8yivb3dU9d15KhyXbsuu\nSuOhIP56a1Wa2jU0pyVpvtN8mXaoWn48pNlSlcZTs9XiGml8v3dPabYfKnAFaU9B6HhRKat22z9n\nT0GxvKKSr9IO2x97GRHx+eaDgOcaqwqcRv9GjDEPNkdwUW3Xyl1HXI+rF9jLtlcVZtUL1105ha6g\nVLuAzzhaxNq9RwH3BeC7azNco47cBYevt2a7Ckd33/QrKg0vLt/lGkXmrnA8cLyYd9dmEBrseTDB\nlgN5fL75IJFhwR7TbMo8zqLNB4l1NNW5y++OwwV8tD6LzpGhHtNkHS/mjZV7XWncFfrHi0p5bmk6\nXaLDPL4vW1kFTy3eTs+OEfbXcvP5VFYaHl+0lejwECJCgzwGj799sZ3yikq6RId7DNKvrthD1vFi\nenWM8Bg4F285xA+7jxIeEqSjyVogDfkqoMorKln006GqwthRIBXYyliwIYtuMfamkeoF8Osr9hIW\nHES/+Mg6hdPspemEBAVxalKcqzPaKbvAxnNL05k4sAs9O0bUuTbfVsZDn2xmQNcoJg/v4bbgm700\nnU2Zefzh4qGOfNUa3VZewd3zfiRI4PeTh9R4T07Hi0q54+31dI0J5+5zBwJ1A8ORwhJuf2s9PWIj\nXGlq3+fYiVJmzU2lU2Qov73Q/WsVlpQz841UgkT485ThbtPYyiq47a115BWV8dfLRzjyU/N9VVQa\nfvvBJrYdKuCxy04iOEjq1ISMMTzx+Va+3ZHD7ycPpmfHDm4/wzdW7mXemv3cMrE/I3rHug0eS9IO\n8+Tn27hweHcmn9TTbS3wp8w87nt3A8N7xXLLxCQqKk2rXBuuLdMAowLqy7TDZB0v5pdnJAFV367n\nfLubE6UVzJzYH6gqgNOzC5m3Zj9XjE6wF2DVvkWv2pXL/LUZXH9aX/rGR9YoSMsrKrln3gZKyyv5\n85ThhAYH1SjYyioq+dXb68kuKOGp6aOIDA+uU6v4cF0m//hqB5ed3Iufj04AahbWpeWV/OrtH0nd\nd4wnrxjJgK7RNd4T2IPCNS+v5mCejdnXpNA91h5AbWVVhfXhfBtXvbSK3BMlvHT9aLrFRtRJk51v\n46o5qziQZ+PF60bTu3OHOmmOnijlmpd/YMfhAp695hQG94gBqkbggT2Q/+K1tfyw+yh/+/kITk2K\nd6Sp6mdyBs1PNx7ggYuGcO7Q7kSFBZNfXHOE28MLtvDyd3u44bS+XDe+L9HhIa7BGGAPQM8vS+fP\nC7Zw3tDu/H7yECLDQyioNVLsfz9mcdtb6zipd0f+ceXJRIcHc6K0vEawWpl+hGte/oFOkWG8etNY\n10g0K/OAVPPRAKMC6rUVe+gT14GrxtpHmR/Ot5FxtIiXv9vN1FG9SOlr343bVlaBMfZCrENYMPdf\nMIiwkCBsjlpKvq2M3324kX7xkfzmgkF0CA2uUZA+9eV2Vu3O5fHLR5DcLZqoaoWfMYY/fbyZ73Ye\n4a8/G0FKYmc6dgjl6IlS1zfiL7cc4ncfbuKM5HievGIkMRGhhAaLa7RYWUUl98z/kSVbD/PotOFM\nGdWLro7aV3a+fbBCbmEJ17yymvScQl6+YQxj+sXR3RE8Dhy3p8k4WsSVL63iUJ6NN34xjpEJnejl\naJLKdAxsyDhaxPSXVpF1rJjXfzGWMf3iqtIcK3bcr5grX1rF9kMFzLlhNGcN7uYKQhlH7YMgcgpK\nuObl1azde5R/XXUyl5+SQGyHEGLCQ9jvSJNXXMYvX1/LZz8d5E+XDOW2MwcA0Ccu0pXGVmYPQG+s\n2sctE5J4eMpwRITEamnKKyp5eMEW/v7FdqaO6sXz16YQHCT0jYsk81gx5RWVGGN4Ydkufv3uBsb0\n68xbt5xKVHgIifFRVJqqfH/8YyY3vraGnp0ieP+20+geG0FIsL0os7I6gmo+rXEtMtVG/JSZx9q9\nx/i/S4eR0DmSILH3KbyXmkFoUBC/v2iIaxLhoTwbb6zcy/fpR3j0spOIjw6ne2w4aQfzqaw03Dt/\nAweP23j31vFEhoXQs1MHCmzl5NvK+GTDAV5avptrT0101Tz6dO7A7iMnMMbwyMI03k3N4O5zkrly\njD3QJcZFUlRaQU5BCav3HOXedzdwUu+OvHT9GNdIrj6dI9mdU0hhSTm3v7WO73Ye4f8uHcb1p/UD\noG98JAA7DhcysHshv3x9LYfzbbxywxgmDeoKQFKXKAC2HswnIjSI295aR2l5JW/dciqnJHaukWZT\nRh5hwUHc+c56Kg010iTGRxIWHMS6fcfoFhPO3fN/pKSskrm/HMep/e21ktiIUHp1jGDlrlxGJnTi\n3nc3cLy4lJdvGMPZQ+wzC0SEIT1jWLkrlzV7jvK7DzaSeayYf1w5ip+lJLh+d8N6xvLF5kOsSD/C\nY59tZevBfP5w8RBmTRrgSjO8dyyf/XSQz386yOsr97J6z1FumZDEHy4eSpCjE2tkQkfKKw1v/rCP\n1buP8sWWQ1w6sifPXDmK8BD75zwqoSMAb/2wn+KyCuat2c+pSXHMuWEMHTvY+5XCHAGmpKySSPeD\nB1UAaIBRAfPaij1EhQUzfUwCYSFBjO0Xx8vf7QHg3zNOpncnext+SJAwd9U+0rMLOXtwV6471b7H\nXd/4KHIKMrn1rXV8vS2bv0wdzui+cQAM7GZvnrrv3Y18ve0w5w7pxl+mDne99vBeHflq62Gu++9q\nVqTncsuEJO49f5Dr/BjHfW54dQ3bDhUwLimOV24cQ3R41Z/M6cnxzFuTwbnPLONIYSl/v2IkV46t\nmu8bExHKuKQ4XliezvPL0okOD2HerPGkJHZ2penZsQOj+nTi74u3UV5pSIyLZP6sMSR3i3GliY8O\nZ+LALvxzyQ4ABnSN4pUbx7oCD0B4SDBTRvXizR/28eYP++jfJYo5s0bXuA/A1eMSeearHSzfkUOf\nuA58dPsZDOsVWyPNdeP7cs/8DVz50iq6xYQzf9Z4xvSLq5Hm+tP68vGPWVz7ymo6RYby2k1jXUHK\n6ecpCcz5dje3v72eyLDgOkEK4Owh3UjuFs1fPk0jNFj4w8VDmDmxf41JpwO7x3De0O68umIPQQK3\nTurP/RcOJjS4qgGmi6O2eKSwhM4ehqer5iftuVNszJgxJjU1NdDZaJey822c8bdvuPbUvjzsKPh3\n5RTy/NJdnDW4K1NG9XKlve/dDXz0YxYn9Y7l7ZvH09ExGmp/bhGT//0tRaUV3HVOMvedP6jGMibT\nZq9g68F8Lh3Zk6enj6oxhySnoIQrX1rFwbxifn3eIG6dVLNQM8bw2Gdb+WTDAS4d2ZMHLhpSZw5K\nbmEJv3l/I8WlFfzmgsGMS6pZCIN9Ts3jn6URHxXOvecPooejKau6/blF/GvJDnp16sCsM/sTGxFa\nJ01OQQkvLNtF99hwrj+tL5Fhdb8bnigptwft8BBmjE2kQ1jdOTMVlYYP12VSVlnJz05JcJsG4Out\nhzmcX8KUUT2JcZMfgM1ZeWw9mM/5w7rTyUO1IfNYEev2HeOM5C50iXY/lyWvqIwVu44wqk8nenfq\n4DZNaXklK3YdISk+in7VAqvTmj1HufKlVbx58zgmDuzq9h6q6YjIOmPMGJ/pNMBogAmEZ77cznNL\n01n6m7PcFhjVlVVUsu1gAYN7xNSZjJidb+NEaUWNb/PVr8stLHVbqIN9SG2FMTW+CavW6VCejfFP\nfM2fpwzjF44BI8p/rAYY/ctSzc5WVsHbq/dz7pDuPoMLQGhwECMSOrqd6d4tNsJtcHFe5ym4AAQF\niQaXNqJHxwh6dowg1THJVrUM+telmt0327I5eqKU60/rG+isqDbktP7xfL/ziK6u3IJogFHN7qP1\n9gmUE5K7BDorqg2ZfFIP8orL+MGxBI0KPA0wqlnlFpawbHs2l5/S2+squkrV16RBXYkKC+aTDQcC\nnRXloAFGNatFmw9RXmm4PEU3HFVNKyI0mKkn9+bTjQc4XlTq+wLldxpgVLNaknaYpC5RDO4e4zux\nUvV0w2l9KSmv5L3UDN+Jld9pgFHNprCknFW7cjlvaDe3uzcq1VhDe8Yyvr99wm5xqW4JFWgaYBqg\notLwwbpMHa1ST9/tyKG0opLzhnYPdFZUG3bf+YPJKShh7qq9gc5Ku6cBpgF+2J3L/e9v5KnF23V5\n8NsXPRAAAAy0SURBVHr4Zls2HTuEMrpvZ9+JlWqgcUlxnDmoK88tTXctNKoCQwNMA5yR3IVrT01k\nzre7+euirW53TlR1/bAnl/H941wr3yrlL3+eMoyS8kr+vGBLoLPSrulfegM9Ou0kbjitLy9/t4db\n30wlv9aeFqqmzGNFZBwtZrxjZV+l/Kl/12h+fd5APt98iPlr9gc6O+2WBpgGCgoS/jJ1OH+ZOpyl\n23O48J/fsmx7tu8L26nVu+3bGGuAUc3l1kkDmDiwCw99soV1uoRMQGiAaQQR4cbT+/Hh7acTFR7C\nTa+t5cZX17A5Ky/QWWtx1u0/RmxEiA5PVs0mOEj4z4xT6NkpgpteW8OmzOOBzlK7owGmCZzcpxML\n75rAAxcNYUPGcS599nt+/sJKPlqfqU1nDmkH8hnWK9a10ZRSzaFzVBjvzBxPxw6hXPvyar7eejjQ\nWWpXdLn+Jl6uP6+4jPfWZvD26n3szS0iJEgY3z+eM5K7cEpiJ0YmdHS7l0dbd9KfF3NFSm/+Mu2k\nQGdFtUMHjhcz681UthzIZ+bE/tx73iCPe+Eo36wu19/+Sjo/69ghlJmT+nPzhCR+zDjGV2nZLNl6\nmL99sQ2AILHvZ96/SxT9u0aT0LkDXWPC6RodTrfYCOIiw4gKD25TI61KyyspLCl37VGvVHPr1akD\n7996Oo8s3MKcb3ez6KeD3HVOMpefkuB2GwjVNLQG00wbjh07UcqPGcfYkJHHrpxCduecYM+RQmxl\n7idrhocEER0eQpTjX1iwfe+SEMfP0OAgQoKE0JAgQoOEIBEQEAQReyBzPrZPmndzHFwz6v05sb6k\nvJJ3Vu/nkWnDucGxX71SgfLD7lwe/2wrP2Xl0S0mnMtO6c3UUb0Y1lObcK3SHS0tCPSOlpWVhuPF\nZeQUlJBTUEJ2gY1jRWWcKCnnREk5ha6fFZRVVFJeWUlZuaGsstL+vMJQVlFJWYWh0hicv0pjDAZc\nxww4ztmfVzrOG1OV1t/CgoOYc8MYnWSpWgRjDMt25PD2D/tYtj2H8kpD58hQxvePZ3ivWAb3iCW5\nWzQ9O0bU2SpbaYCxJNABRikVePYtJHJYtTuX1XtyyThaXON8bEQIPTpG0KmDvfk6OiKU6PBgOoSG\nEBoshAQLwUH2FoXgICE02F4Lqv7lrtJRzhrj/JIHhupfAA3llYaKSkO54wtjeWUlFc5jjp+en1dS\nWYnrmtrnK4z9vs605ZWGC4Z15+8/H9Wgz6xV98GIyHTgYWAoMM4Yk1rt3IPAzUAFcLcxZrHj+Gjg\ndaADsAi4x7Tn6KmUsiQ+OpwrRidwxegEwL4o647DBezOOcHhfBuH820cyrORbyvjSGEpe3OLKLCV\nU1xaXhUUGrmahwiEBgUR7AhSwUHiCljunwfVPR8shIeG1EgbEhREUPVrxZ4uJEg4qXfHpvj4vGqR\nAQbYDPwMeKn6QREZBswAhgO9gCUiMsgYUwG8AMwEVmMPMJOBz5sz00qp1i86PISUxM6kJFpvzjXG\nUGlwNGXbg429jxN7/6jjcVUfqbjOt+WVxVtkgDHGbAW3H/w0YL4xpgTYIyLpwDgR2QvEGmN+cFw3\nF7gMDTBKqWYgIgQLBAdpf011rW18Xm+g+k5CmY5jvR2Pax9XSikVIAGrwYjIEqCHm1N/NMZ84sfX\nnQXMAkhMTPTXyyilVLsXsABjjDmvAZdlAX2qPU9wHMtyPK593N3rzgHmAIhIjojsa0A+nLoARxpx\nvb9ovupH81U/mq/6aYv56mslUYvsg/FiAfCOiPwDeyf/QGCNMaZCRPJFZDz2Tv4bgGd93cwY07Ux\nmRGRVCtD9Zqb5qt+NF/1o/mqn/acrxbZByMil4tIJnAa8JmILAYwxmwB3gPSgC+AXzlGkAHcAbwC\npAO70A5+pZQKqBZZgzHGfAx87OHc48Djbo6nArqSolJKtRAtsgbTiswJdAY80HzVj+arfjRf9dNu\n89Wul4pRSinlP1qDUUop5RcaYBpARCaLyHYRSReRB5r5tfuIyFIRSRORLSJyj+P4wyKSJSIbHP8u\nrnbNg468bheRC/2Yt70i8pPj9VMdx+JE5CsR2en42blaer/nS0QGV/tMNjhGG/46EJ+XiLwqItki\nsrnasXp/PiIy2vE5p4vIf6SRa414yNdTIrJNRDaJyMci0slxvJ+IFFf73F5s5nzV+/fWTPl6t1qe\n9orIBsfx5vy8PJUNgfs/Zl/dU/9Z/QcEYx+l1h8IAzYCw5rx9XsCKY7HMcAOYBj2xUHvd5N+mCOP\n4UCSI+/BfsrbXqBLrWN/5//bu9MYvaY4juPfH7XEVmuoJmjReNHQNlJrG0FQsTVCKhIEL8SS4AUV\ny0sh9hBEQ2oLQkUsIQ2xhYilSisl1fJGpq1dE2Lr34tznumdxzzTeZ6Ze27L75NM5s6d85z7n3Pv\n3PPc89z7PzA3L88Fbi4dV9u+W0W6h794ewEzgWnA0pG0D/A+cCgp3dXLwKwa4joOGJOXb67EtU+1\nXFs9JeLqer+ViKvt97cBNzTQXp3ODY0dY76C6d504MuIWBkRfwBPknKkFRERfRGxKC+vBZYxdFqc\n/vxtEfEV6Tbu6fVHOmD7D+flh0k54pqK6xhgRUQM9XBtbXFFxFvAD4Nsb9jtI2kcOe9epDPBI5XX\njFpcEbEwIv7KP77HwAeZ/6VUXENotL1a8jv9M4Enhqqjprg6nRsaO8bcwXSvUz604iTtA0wlPVwK\ncFke0niochlcMt4gZbj+SCklD8DuEdGXl1cBuzcQV8scBv7jN91e0H37NJF373wGPlc2IQ/3vClp\nRl5XMq5u9lvp9poBrI6I5ZV1xdur7dzQ2DHmDmYTJWk7YAFweUT8QpquYCIwBegjXaaXdmRETAFm\nAZdImln9ZX431Mhti5K2BE4Bns6rNob2GqDJ9ulE0rXAX8DjeVUfsFfez1eSMmvsUDCkjW6/tTmL\ngW9iirfXIOeGfqWPMXcw3euUD60YSVuQDqDHI+JZgIhYHRF/R8Q6YB7rh3WKxRsR3+Tva0gPyk4H\nVudL7tawwJrScWWzgEURsTrH2Hh7Zd22z7Dz7o2UpPOAk4Cz84mJPJzyfV7+iDRuP6lUXD3st5Lt\nNYY0j9VTlXiLttdg5wYaPMbcwXTvA2B/SRPyu+I5pBxpReQx3geBZRFxe2X9uEqx2aRJ28ixzZG0\nlaQJ5PxtNcS1raTtW8ukD4mX5u2fm4udC7QyZReJq2LAO8um26uiq/bJQx2/SDo0HwvnVF4zaiSd\nAFwFnBIRv1bW7yZp87w8Mce1smBcXe23UnFlxwKfR0T/8FLJ9up0bqDJY2wkdy38X7+AE0l3aKwg\nTS9QcttHki5xPwUW568TgUeBJXn988C4ymuuzbF+wQjvVBkiromkO1I+AT5rtQuwC/AasBx4Fdi5\nZFx5O9sC3wNjK+uKtxepg+sD/iSNa1/QS/sAB5NOrCuAe8gPTI9yXF+Sxudbx9j9uezpef8uBhYB\nJxeOq+v9ViKuvH4+cFFb2ZLt1enc0Ngx5if5zcysFh4iMzOzWriDMTOzWriDMTOzWriDMTOzWriD\nMTOzWriDMTOzWriDMeuRpB0lXVz5eU9Jz9S0rdMk3TAK9dwq6ejRiMlsQ/wcjFmPckLBFyNicoFt\nvUt6qv67EdazNzAvIo4bncjMOvMVjFnvbgL2zZlyb1GaXGoppDxekp7LEzx9LelSSVdK+ljSe5J2\nzuX2lfRKzkD9tqQD2jciaRLwe6tzkTRf0n25npWSjsqZhZdJmp/LbJ7LLVWaOOoKgEhTFewiaY8y\nTWT/Z2OaDsBsEzYXmBwpU27riqZqMill+tak1CtXR8RUSXeQ8jvdCTxASi+yXNIhwL1A+xDWEaQ0\nI1U7AYeRMkQ/n8tcCHwgaQppcrXxrasr5Rkps0W5/ILe/myz4XEHY1af1yNN/LRW0s/AC3n9EuDA\nnFb9cOBprZ+RdqtB6hkHfNu27oWICElLSPOPLAGQ9BlpFsU3gYmS7gZeAhZWXrsG2HOkf5zZhriD\nMavP75XldZWf15H+9zYDfmpdAQ3hN2Bsh7qr9fbXHRE/SjoIOB64iDTL4vm5zNa5TrNa+TMYs96t\nJc193pNIk0F9JekMSOnWc6fQbhmwXzd1S9oV2CwiFgDXkeaQb5nE+jT3ZrVxB2PWo0gTSb2TP0i/\npcdqzgYukNSa5uDUQcq8BUxVZRxtGMYDb0haDDwGXAP9E1LtB3zYY7xmw+bblM02AZLuIn3u8uoI\n65kNTIuI60cnMrPOfAVjtmm4EdhmFOoZw8Y3j739R/kKxszMauErGDMzq4U7GDMzq4U7GDMzq4U7\nGDMzq4U7GDMzq8U//v3iRjA4dNkAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from bmtk.analyzer.cell_vars import plot_report\n",
+ "\n",
+ "plot_report(config_file='simulation_config.json')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 5. Additional Information\n",
+ "\n",
+ "### Changing run-time parameters.\n",
+ "By making changes to the config file, we can change the conditions and simulation parameters without having to rebuild the network, modify paramter files, and changing our run_bionet script. In fact we can iteratively run multiple simulations without any extra coding, only a text editor to change the json file.\n",
+ "\n",
+ "The run section of the config.json contains most of the parameters unique to the simulation:\n",
+ "```json\n",
+ "\"run\": {\n",
+ " \"dL\": 20,\n",
+ " \"nsteps_block\": 5000, \n",
+ " \"spike_threshold\": -15,\n",
+ " \"tstop\": 2000.0, \n",
+ " \"dt\": 0.1\n",
+ "}\n",
+ "```\n",
+ "* dstop - simulation runtime in milliseconds.\n",
+ "* dt - the time steps of the simulation. decreasing dt should increasing accuracy of firing dynamics, but also increase time it takes to complete.\n",
+ "* spike_thresdhold - used to determine when to count a action potential\n",
+ "* save_cell_vars - indicate cell variables that will be recorded (in this case calcium diffusion and membrane potential). \n",
+ "\n",
+ "Through the conditions section we can adjust simulation temperature (C) and the initial membrane potential of the cells:\n",
+ "```json\n",
+ "\"conditions\": {\n",
+ " \"celsius\": 34.0, \n",
+ " \"v_init\": -80\n",
+ "}\n",
+ "```\n",
+ "\n",
+ "And lastly, the input section lets us control stimulus onto the network. There are a number of different options which will be explained in the following tutorials. But even with a simple current injection we can adjust amplitude, delay and stimulation duration and measure the effect on the cell.\n",
+ "```json\n",
+ "\"inputs\": {\n",
+ " \t\"current_clamp\": {\n",
+ " \"input_type\": \"current_clamp\",\n",
+ " \"module\": \"IClamp\",\n",
+ " \"node_set\": \"all\",\n",
+ " \"amp\": 0.120,\n",
+ " \"delay\": 500.0,\n",
+ " \"duration\": 1000.0\n",
+ " }\n",
+ "}\n",
+ "```\n",
+ "We can even add multiple injections\n",
+ "```json\n",
+ "\"inputs\": {\n",
+ " \t\"cclamp1\": {\n",
+ " \"input_type\": \"current_clamp\",\n",
+ " \"module\": \"IClamp\",\n",
+ " \"node_set\": \"all\",\n",
+ " \"amp\": 0.150,\n",
+ " \"delay\": 0.0,\n",
+ " \"duration\": 400.0\n",
+ " }\n",
+ " \n",
+ " \"cclamp2\": {\n",
+ " \"input_type\": \"current_clamp\",\n",
+ " \"module\": \"IClamp\",\n",
+ " \"node_set\": \"all\",\n",
+ " \"amp\": 0.300,\n",
+ " \"delay\": 500.0,\n",
+ " \"duration\": 400.0\n",
+ " }\n",
+ " \n",
+ " \"cclamp3\": {\n",
+ " \"input_type\": \"current_clamp\",\n",
+ " \"module\": \"IClamp\",\n",
+ " \"node_set\": \"all\",\n",
+ " \"amp\": 0.450,\n",
+ " \"delay\": 1000.0,\n",
+ " \"duration\": 400.0\n",
+ " }\n",
+ "}\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Changing cell models\n",
+ "When building the network we defined the cell model and morphology through the 'dynamics_params' and 'morphology_file' options. After building and saving the network, these values were saved in the node-types csv file."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "
node_type_id
\n",
+ "
dynamics_params
\n",
+ "
model_processing
\n",
+ "
model_type
\n",
+ "
model_template
\n",
+ "
morphology
\n",
+ "
potental
\n",
+ "
cell_name
\n",
+ "
\n",
+ " \n",
+ " \n",
+ "
\n",
+ "
0
\n",
+ "
100
\n",
+ "
472363762_fit.json
\n",
+ "
aibs_perisomatic
\n",
+ "
biophysical
\n",
+ "
ctdb:Biophys1.hoc
\n",
+ "
Scnn1a_473845048_m.swc
\n",
+ "
exc
\n",
+ "
Scnn1a_473845048
\n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " node_type_id dynamics_params model_processing model_type \\\n",
+ "0 100 472363762_fit.json aibs_perisomatic biophysical \n",
+ "\n",
+ " model_template morphology potental cell_name \n",
+ "0 ctdb:Biophys1.hoc Scnn1a_473845048_m.swc exc Scnn1a_473845048 "
+ ]
+ },
+ "execution_count": 2,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "import pandas as pd\n",
+ "pd.read_csv('network/mcortex_node_types.csv', sep=' ')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "If we want to run the simulation on a different cell model, all we have to do is \n",
+ "1. Download new parameters.json and morphology.swc into components/biophysical\n",
+ "2. Open mcortex_node_types.csv in a text editor and update 'morphology_file' and 'params_file' accordingly.\n",
+ "\n",
+ "In our simple one-cell example, it is likely faster to just rebuild the network. However the advantage of the use of the node types becomes clear once we start dealing with a larger network. For example we may have a network of hundreds of thousands of individual cells with tens of thousands of Scnn1a type cells. The process of adjusting/chaning the Scnn1a parameter in the csv then starting another simulation only takes seconds, whereas rebuilding the entire network may take hours."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "anaconda-cloud": {},
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.13"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/bmtk-vb/docs/tutorial/02_single_cell_syn.ipynb b/bmtk-vb/docs/tutorial/02_single_cell_syn.ipynb
new file mode 100644
index 0000000..5e95a6a
--- /dev/null
+++ b/bmtk-vb/docs/tutorial/02_single_cell_syn.ipynb
@@ -0,0 +1,736 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Chapter 2: Single cell simulation with external feedfoward input (with BioNet)\n",
+ "\n",
+ "In the previous tutorial we built a single cell and stimulated it with a current injection. In this example we will keep our single-cell network, but instead of stimulation by a step current, we'll set-up an external network that synapses onto our cell.\n",
+ "\n",
+ "**Note** - scripts and files for running this tutorial can be found in the directory [sources/chapter02/](sources/chapter02)\n",
+ "\n",
+ "**Requirements:**\n",
+ "* Python 2.7 or 3.6+\n",
+ "* bmtk\n",
+ "* NEURON 7.4+"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Step 1: Building the network.\n",
+ "\n",
+ "Similar to the previous tutorial, we want to build and save a network consisting of a single biophysically detailed cell. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "from bmtk.builder.networks import NetworkBuilder\n",
+ "\n",
+ "\n",
+ "cortex = NetworkBuilder('mcortex')\n",
+ "cortex.add_nodes(cell_name='Scnn1a_473845048',\n",
+ " potental='exc',\n",
+ " model_type='biophysical',\n",
+ " model_template='ctdb:Biophys1.hoc',\n",
+ " model_processing='aibs_perisomatic',\n",
+ " dynamics_params='472363762_fit.json',\n",
+ " morphology='Scnn1a_473845048_m.swc')\n",
+ "\n",
+ "cortex.build()\n",
+ "cortex.save_nodes(output_dir='network')\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "collapsed": true
+ },
+ "source": [
+ "But we will also want a collection of external spike-generating cells that will synapse onto our cell. To do this we create a second network which can represent thalamic input. We will call our network \"mthalamus\", and it will consist of 10 cells. These cells are not biophysical but instead \"virtual\" cells. Virtual cells don't have a morphology or the normal properties of a neuron, but rather act as spike generators."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "thalamus = NetworkBuilder('mthalamus')\n",
+ "thalamus.add_nodes(N=10,\n",
+ " pop_name='tON',\n",
+ " potential='exc',\n",
+ " model_type='virtual')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Now that we built our nodes, we want to create a connection between our 10 thalamic cells onto our cortex cell. To do so we use the add_edges function like so:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "thalamus.add_edges(source={'pop_name': 'tON'}, target=cortex.nodes(),\n",
+ " connection_rule=5,\n",
+ " syn_weight=0.001,\n",
+ " delay=2.0,\n",
+ " weight_function=None,\n",
+ " target_sections=['basal', 'apical'],\n",
+ " distance_range=[0.0, 150.0],\n",
+ " dynamics_params='AMPA_ExcToExc.json',\n",
+ " model_template='exp2syn')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Let us break down how this method call works:\n",
+ "```python\n",
+ "thalamus.add_edges(source={'pop_name': 'tON'}, target=cortex.nodes(),\n",
+ "```\n",
+ "* Here we specify which set of nodes to use as sources and targets. Our source/pre-synaptic cells are all thamalus cells with the property \"pop_name=tON\", which in this case is every thalmus cell (We could also use source=thalamus.nodes(), or source={'level_of_detail': 'filter'}). The target/post-synaptic is all cell(s) of the \"cortex\" network.\n",
+ "\n",
+ "```python\n",
+ "connection_rule=5,\n",
+ "```\n",
+ "* The connection_rule parameter determines how many synapses exists between every source/target pair. In this very trivial case we are indicating that between every thamalic --> cortical cell connection, there are 5 synapatic connections. In future tutorials we will show how we can create more complex customized rules.\n",
+ "\n",
+ "```python\n",
+ "syn_weight=0.001,\n",
+ "delay=2.0,\n",
+ "weight_function=None,\n",
+ "```\n",
+ "* Here we are specifying the connection weight. For every connection in this edge-type, there is a connection strenght of 5e-05 (units) and a connection dealy of 2 ms. The weight function is used to adjusted the weights before runtime. Later we will show how to create customized weight functions.\n",
+ "\n",
+ "```python\n",
+ " target_sections=['basal', 'apical'],\n",
+ " distance_range=[0.0, 150.0],\n",
+ "```\n",
+ "* This is used by BioNet to determine where on the post-synaptic cell to place the synapse. By default placement is random within the given section and range.\n",
+ "```python\n",
+ "dynamics_params='AMPA_ExcToExc.json', \n",
+ "model_template='exp2syn')\n",
+ "```\n",
+ "* The params_file give the parameters of the synpase, including the time constant and potential. Here we are using an AMPA type synaptic model with an Excitatory connection. The set_params_function is used by BioNet to convert the model into a valid NEURON synaptic object.\n",
+ "\n",
+ "Finally we are ready to build the model and save the thalamic nodes and edges."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "thalamus.build()\n",
+ "thalamus.save_nodes(output_dir='network')\n",
+ "thalamus.save_edges(output_dir='network')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The network/ directory will contain multiple nodes and edges files. It should have nodes (and node-types) files for both the thalamus and cortex network. And edges (and edge-types) files for the thalamus --> cortex connections. Nodes and edges for different networks and their connections are spread out across different files which allows us in the future to rebuild, edit or replace part of setup in a piecemeal and efficent manner."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Step 2: Setting up BioNet environment.\n",
+ "#### file structure.\n",
+ "\n",
+ "Before running a simulation, we will need to create the runtime environment, including parameter files, run-script and configuration files. If using the tutorial these files will already be in place. Otherwise we can use a command-line:\n",
+ "```bash\n",
+ "$ python -m bmtk.utils.sim_setup -n network --membrane_report-vars v,cai --membrane_report-sections soma --tstop 2000.0 --dt 0.1 bionet\n",
+ "```\n",
+ "\n",
+ "Also our cortex cell uses a Scnn1a model we can download from the Allen Cell-Types Database\n",
+ "```bash\n",
+ " $ wget http://celltypes.brain-map.org/neuronal_model/download/482934212\n",
+ " $ unzip 482934212\n",
+ " $ cp fit_parameters.json biophys_components/biophysical_neuron_templates/472363762_fit.json\n",
+ " $ cp reconstruction.swc biophys_components/morphologies/Scnn1a_473845048_m.swc\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### Spike Trains\n",
+ "\n",
+ "We need to give our 10 thalamic cells spike trains. There are multiple ways to do this, but an easy way to use a csv file. The following function will create a file to provide the spikes for our 10 cells.\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from bmtk.utils.spike_trains import SpikesGenerator\n",
+ "\n",
+ "sg = SpikesGenerator(nodes='network/mthalamus_nodes.h5', t_max=3.0)\n",
+ "sg.set_rate(10.0)\n",
+ "sg.save_csv('thalamus_spikes.csv', in_ms=True)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The spikes file consists of 10 rows with 2 columns; the gid and a list of spike times (in milliseconds). Thus you can create your own if you want."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "
gid
\n",
+ "
spike-times
\n",
+ "
\n",
+ " \n",
+ " \n",
+ "
\n",
+ "
0
\n",
+ "
0
\n",
+ "
93.1650606428,223.986147783,259.778024671,342....
\n",
+ "
\n",
+ "
\n",
+ "
1
\n",
+ "
1
\n",
+ "
95.8688010168,201.659003074,404.670082776,496....
\n",
+ "
\n",
+ "
\n",
+ "
2
\n",
+ "
2
\n",
+ "
130.657751839,131.784430622,265.553356715,305....
\n",
+ "
\n",
+ "
\n",
+ "
3
\n",
+ "
3
\n",
+ "
69.1281914227,224.055770984,374.561526606,511....
\n",
+ "
\n",
+ "
\n",
+ "
4
\n",
+ "
4
\n",
+ "
66.2944104828,220.797101783,273.598966857,489....
\n",
+ "
\n",
+ "
\n",
+ "
5
\n",
+ "
5
\n",
+ "
129.642401853,149.509142787,245.219852214,280....
\n",
+ "
\n",
+ "
\n",
+ "
6
\n",
+ "
6
\n",
+ "
69.204438027,121.595641545,175.854451607,347.9...
\n",
+ "
\n",
+ "
\n",
+ "
7
\n",
+ "
7
\n",
+ "
106.530638435,278.850185694,478.194080473,528....
\n",
+ "
\n",
+ "
\n",
+ "
8
\n",
+ "
8
\n",
+ "
20.5105506698,75.7199163271,277.170399557,307....
\n",
+ "
\n",
+ "
\n",
+ "
9
\n",
+ "
9
\n",
+ "
102.63071719,316.347302476,456.002763012,513.4...
\n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " gid spike-times\n",
+ "0 0 93.1650606428,223.986147783,259.778024671,342....\n",
+ "1 1 95.8688010168,201.659003074,404.670082776,496....\n",
+ "2 2 130.657751839,131.784430622,265.553356715,305....\n",
+ "3 3 69.1281914227,224.055770984,374.561526606,511....\n",
+ "4 4 66.2944104828,220.797101783,273.598966857,489....\n",
+ "5 5 129.642401853,149.509142787,245.219852214,280....\n",
+ "6 6 69.204438027,121.595641545,175.854451607,347.9...\n",
+ "7 7 106.530638435,278.850185694,478.194080473,528....\n",
+ "8 8 20.5105506698,75.7199163271,277.170399557,307....\n",
+ "9 9 102.63071719,316.347302476,456.002763012,513.4..."
+ ]
+ },
+ "execution_count": 6,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "import pandas as pd\n",
+ "pd.read_csv('thalamus_spikes.csv', sep=' ')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The last thing that we need to do is to update the configuration file to read \"thalamus_spikes.csv\". To do so we open simulation_config.json in a text editor and add the following to the **input** section.\n",
+ "\n",
+ "```json\n",
+ "\"inputs\": {\n",
+ " \"lgn_spikes\": {\n",
+ " \"input_type\": \"spikes\",\n",
+ " \"module\": \"csv\",\n",
+ " \"input_file\": \"${BASE_DIR}/thalamus_spikes.csv\",\n",
+ " \"node_set\": \"mthalamus\"\n",
+ " }\n",
+ "}\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 3. Running the simulation\n",
+ "\n",
+ "Once our config file is setup we can run a simulation either through the command line:\n",
+ "```bash\n",
+ "$ python run_bionet.py config.json\n",
+ "```\n",
+ "\n",
+ "or through the script"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "2018-09-20 15:39:04,870 [INFO] Created log file\n",
+ "2018-09-20 15:39:05,048 [INFO] Building cells.\n",
+ "2018-09-20 15:39:05,179 [INFO] Building recurrent connections\n",
+ "2018-09-20 15:39:05,182 [INFO] Build virtual cell stimulations for lgn_spikes\n",
+ "2018-09-20 15:39:05,216 [INFO] Running simulation for 2000.000 ms with the time step 0.100 ms\n",
+ "2018-09-20 15:39:05,217 [INFO] Starting timestep: 0 at t_sim: 0.000 ms\n",
+ "2018-09-20 15:39:05,218 [INFO] Block save every 5000 steps\n",
+ "2018-09-20 15:39:05,354 [INFO] step:5000 t_sim:500.00 ms\n",
+ "2018-09-20 15:39:05,491 [INFO] step:10000 t_sim:1000.00 ms\n",
+ "2018-09-20 15:39:05,627 [INFO] step:15000 t_sim:1500.00 ms\n",
+ "2018-09-20 15:39:05,767 [INFO] step:20000 t_sim:2000.00 ms\n",
+ "2018-09-20 15:39:05,775 [INFO] Simulation completed in 0.5596 seconds \n"
+ ]
+ }
+ ],
+ "source": [
+ "from bmtk.simulator import bionet\n",
+ "\n",
+ "\n",
+ "conf = bionet.Config.from_json('simulation_config.json')\n",
+ "conf.build_env()\n",
+ "net = bionet.BioNetwork.from_config(conf)\n",
+ "sim = bionet.BioSimulator.from_config(conf, network=net)\n",
+ "sim.run()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 4. Analyzing the run"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
"
+ ],
+ "text/plain": [
+ " edge_type_id target_query source_query syn_weight dynamics_params \\\n",
+ "0 100 * pop_name=='tON' 0.001 AMPA_ExcToExc.json \n",
+ "\n",
+ " distance_range delay target_sections model_template \n",
+ "0 [0.0, 150.0] 2.0 ['basal', 'apical'] exp2syn "
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "import pandas as pd\n",
+ "pd.read_csv('network/mthalamus_mcortex_edge_types.csv', sep=' ')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "(if in the build script we called add_edges multiple times, we'd have multiple edge-types). \n",
+ "\n",
+ "Using a simple text-editor we can modify this file directly, change parameters before a simulation run without having to rebuild the entire network (although for a network this small it may not be beneficial).\n",
+ "\n",
+ "#### weight_function\n",
+ "\n",
+ "By default BioNet uses the value in syn_weight to set a synaptic weight, which is a constant stored in the network files. Often we will want to adjust the synaptic weight between simulations, but don't want to have to regenerate the network. BioNet allows us to specify custom synaptic weight functions that will calculate synaptic weight before each simulation. \n",
+ "\n",
+ "To do so first we must set the value of 'weight_function' column. Either we can open up the file mthalamus_mcortex_edge_types.csv with a text-editor and change the column. \n",
+ "\n",
+ "\n",
+ "|edge_type_id | target_query | source_query | ... | weight_function |\n",
+ "|-------------|--------------|----------------|-----|-----------------|\n",
+ "|100 | * |pop_name=='tON' | ... |*adjusted_weight* |\n",
+ "\n",
+ "or we can rebuild the edges\n",
+ "```python\n",
+ "thalamus.add_edges(source={'pop_name': 'tON'}, target=cortex.nodes(),\n",
+ " connection_rule=5,\n",
+ " syn_weight=0.001,\n",
+ " weight_function=adjusted_weight,\n",
+ " delay=2.0,\n",
+ " target_sections=['basal', 'apical'],\n",
+ " distance_range=[0.0, 150.0],\n",
+ " dynamics_params='AMPA_ExcToExc.json',\n",
+ " model_template='exp2syn')\n",
+ "```\n",
+ "\n",
+ "Then we write a custom weight function. The weight functions will be called during the simulation when building each synapse, and requires three parameters - target_cell, source_cell, and edge_props. These three parameters are dictionaries which can be used to access properties of the source node, target node, and edge, respectively. The function must return a floating point number which will be used to set the synaptic weight\n",
+ "\n",
+ "```python\n",
+ "def adjusted_weights(target_cell, source_cell, edge_props):\n",
+ " if target_cell['cell_name'] == 'Scnn1a':\n",
+ " return edge_prop[\"weight_max\"]*0.5\n",
+ " elif target_cell['cell_name'] == 'Rorb'\n",
+ " return edge_prop[\"weight_max\"]*1.5\n",
+ " else:\n",
+ " ...\n",
+ "```\n",
+ "\n",
+ "Finally we must tell BioNet where to access the function which we can do by using the add_weight_function.\n",
+ "```python\n",
+ "from bmtk.simulator import bionet\n",
+ "\n",
+ "bionet.nrn.add_weight_function(adjusted_weights)\n",
+ "\n",
+ "conf = bionet.Config.from_json('config.json')\n",
+ "...\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Using NWB for spike trains\n",
+ "\n",
+ "Instead of using csv files to set the spike trains of our external network, we can also use nwb files. The typical setup would look like the following in the config file:\n",
+ "\n",
+ "```json\n",
+ "\"inputs\": {\n",
+ " \"LGN_spikes\": {\n",
+ " \"input_type\": \"spikes\",\n",
+ " \"module\": \"nwb\",\n",
+ " \"input_file\": \"$INPUT_DIR/lgn_spikes.nwb\",\n",
+ " \"node_set\": \"lgn\",\n",
+ " \"trial\": \"trial_0\"\n",
+ " },\n",
+ "}\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "anaconda-cloud": {},
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.13"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/bmtk-vb/docs/tutorial/03_single_pop.ipynb b/bmtk-vb/docs/tutorial/03_single_pop.ipynb
new file mode 100644
index 0000000..e86d6b1
--- /dev/null
+++ b/bmtk-vb/docs/tutorial/03_single_pop.ipynb
@@ -0,0 +1,478 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Chapter 3: Multi-cell, single population network (with BioNet)\n",
+ "\n",
+ "In this tutorial, we will create a more complex network that contains multiple biophysical cells, but all of them having the same cell-type (we will cover hetergenous networks in the next tutorial). The network will contain recurrent connections, as well as external input that provide the next with stimululation.\n",
+ "\n",
+ "**Note** - scripts and files for running this tutorial can be found in the directory [sources/chapter03/](sources/chapter03)\n",
+ "\n",
+ "requirements:\n",
+ "* bmtk\n",
+ "* NEURON 7.4"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 1. Building the Network\n",
+ "\n",
+ "First we will build our internal network, which consists of 100 different cells. All the cells are of the same type (we'll show how to build a heterogeneous network in the next tutorial), however they all have a different location and y-axis rotation.\n",
+ "\n",
+ "#### nodes "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "from bmtk.builder.networks import NetworkBuilder\n",
+ "from bmtk.builder.aux.node_params import positions_columinar, xiter_random\n",
+ "\n",
+ "cortex = NetworkBuilder('mcortex')\n",
+ "cortex.add_nodes(N=100,\n",
+ " pop_name='Scnn1a',\n",
+ " positions=positions_columinar(N=100, center=[0, 50.0, 0], max_radius=30.0, height=100.0),\n",
+ " rotation_angle_yaxis=xiter_random(N=100, min_x=0.0, max_x=2*np.pi),\n",
+ " rotation_angle_zaxis=3.646878266,\n",
+ " potental='exc',\n",
+ " model_type='biophysical',\n",
+ " model_template='ctdb:Biophys1.hoc',\n",
+ " model_processing='aibs_perisomatic',\n",
+ " dynamics_params='472363762_fit.json',\n",
+ " morphology='Scnn1a_473845048_m.swc')\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The parameter N is used to indicate the number of cells in our population. The positions of each cell is defined by the columinar built-in method, which will random place our cells in a column (users can define their own positions as shown here). The rotation_angel_yaxis is similarl defined by a built-in function that will randomly assign each cell a given y angle.\n",
+ "\n",
+ "One thing to note is that while yaxis is defined by a function which returns a lists of values, the zaxis is defined by a single value. This means that all cells will share the zaxis. we could alteratively give all cells the same y-axis rotation:\n",
+ "```python\n",
+ " rotation_angle_yaxis=rotation_value\n",
+ "```\n",
+ "or give all cells a unique z-rotation angle\n",
+ "```python\n",
+ " rotation_angle_zaxis=xiter_random(N=100, min_x=0.0, max_x=2*np.pi)\n",
+ "```\n",
+ "and in general, it is at the discretion of the modeler to choose what parameters are unqiue to each cell, and what parameters are global to the cell-type."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### edges\n",
+ "\n",
+ "Next we want to add recurrent edges. To create the connections we will use the built-in distance_connector function, which will assign the number of connections between two cells randomly (between range nsyn_min and nsysn_max) but weighted by distance. The other parameters, including the synaptic model (AMPA_ExcToExc) will be shared by all connections.\n",
+ "\n",
+ "To use this, or even customized, connection functions, we must pass in the name of our connection function using the \"connection_rule\" parameter, and the function parameters through \"connection_params\" as a dictionary, which will looks something like:\n",
+ "```python\n",
+ " connection_rule=\n",
+ " connection_params={'param_arg1': val1, 'param_arg2': val2, ...}\n",
+ "```\n",
+ "The connection_rule method isn't explicitly called by the script. Rather when the build() method is called, the connection_rule will iterate through every source/target node pair, and use the rule and build a connection matrix.\n",
+ "\n",
+ "\n",
+ "After building the connections based on our connection function, we will save the nodes and edges files into the network/ directory."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "from bmtk.builder.aux.edge_connectors import distance_connector\n",
+ "\n",
+ "cortex.add_edges(source={'pop_name': 'Scnn1a'}, target={'pop_name': 'Scnn1a'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 0.34, 'd_max': 50.0, 'nsyn_min': 0, 'nsyn_max': 10},\n",
+ " syn_weight=2.0e-04,\n",
+ " distance_range=[30.0, 150.0],\n",
+ " target_sections=['basal', 'apical', 'soma'],\n",
+ " delay=2.0,\n",
+ " dynamics_params='AMPA_ExcToExc.json',\n",
+ " model_template='exp2syn')\n",
+ "\n",
+ "cortex.build()\n",
+ "cortex.save_nodes(output_dir='network')\n",
+ "cortex.save_edges(output_dir='network')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### External network\n",
+ "\n",
+ "After building our internal network, we will build the external thalamic network which will provide input (see previous tutorial for more detail). Our thalamic network will consist of 100 \"filter\" cells, which aren't actual cells by just place holders for spike-trains."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "thalamus = NetworkBuilder('mthalamus')\n",
+ "thalamus.add_nodes(N=100,\n",
+ " pop_name='tON',\n",
+ " potential='exc',\n",
+ " model_type='virtual')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The external network doesn't have recurrent connections. Rather all the cells are feedforward onto the internal network. To do this is in a separate script which must reload the saved mcortex cell files using the import function. Then we create an edge with the thalamus nodes as the sources and the cortext nodes as the targets. This time we use the built-in connect_random connection rule, which will randomly assign each thalamus --> cortex connection between 0 and 12 synaptic connections."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from bmtk.builder.aux.edge_connectors import connect_random\n",
+ "\n",
+ "thalamus.add_edges(source=thalamus.nodes(), target=cortex.nodes(),\n",
+ " connection_rule=connect_random,\n",
+ " connection_params={'nsyn_min': 0, 'nsyn_max': 12},\n",
+ " syn_weight=1.0e-04,\n",
+ " distance_range=[0.0, 150.0],\n",
+ " target_sections=['basal', 'apical'],\n",
+ " delay=2.0,\n",
+ " dynamics_params='AMPA_ExcToExc.json',\n",
+ " model_template='exp2syn')\n",
+ "\n",
+ "thalamus.build()\n",
+ "thalamus.save_nodes(output_dir='network')\n",
+ "thalamus.save_edges(output_dir='network')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 2. Setting up BioNet\n",
+ "\n",
+ "#### file structure.\n",
+ "\n",
+ "Before running a simulation, we will need to create the runtime environment, including parameter files, run-script and configuration files. If using the tutorial these files will already be in place. Otherwise we can use a command-line:\n",
+ "```bash\n",
+ " $ python -m bmtk.utils.sim_setup -n network --membrane_report-vars v,cai --membrane_report-sections soma --tstop 3000.0 --dt 0.1 bionet\n",
+ "```\n",
+ "\n",
+ "Also our cortex cell uses a Scnn1a model we can download from the Allen Cell-Types Database\n",
+ "```bash\n",
+ " $ wget http://celltypes.brain-map.org/neuronal_model/download/482934212\n",
+ " $ unzip 482934212\n",
+ " $ cp fit_parameters.json biophys_components/biophysical_neuron_templates/472363762_fit.json\n",
+ " $ cp reconstruction.swc biophys_components/morphologies/Scnn1a_473845048_m.swc\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### Spike Trains\n",
+ "\n",
+ "We next need to create a csv (or nwb) file containing spike trains for our thalamic filter cells. Then we must edit the \"input\" section of the config file to reflect where input spike are comming from."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "from bmtk.utils.spike_trains import SpikesGenerator\n",
+ "\n",
+ "sg = SpikesGenerator(nodes='network/mthalamus_nodes.h5', t_max=3.0)\n",
+ "sg.set_rate(15.0)\n",
+ "sg.save_csv('thalamus_spikes.csv', in_ms=True)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "```json\n",
+ "\"inputs\": {\n",
+ " \"tc_spikes\": {\n",
+ " \"input_type\": \"spikes\",\n",
+ " \"module\": \"csv\",\n",
+ " \"input_file\": \"${BASE_DIR}/thalamus_spikes.csv\",\n",
+ " \"node_set\": \"mthalamus\"\n",
+ " }\n",
+ "}\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 3. Running the simulation\n",
+ "\n",
+ "Once our config file is setup we can run a simulation either through the command line:\n",
+ "```bash\n",
+ "$ python run_bionet.py config.json\n",
+ "```\n",
+ "\n",
+ "or through the script"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "2018-09-20 15:59:16,953 [INFO] Created log file\n",
+ "2018-09-20 15:59:17,128 [INFO] Building cells.\n",
+ "2018-09-20 15:59:22,347 [INFO] Building recurrent connections\n",
+ "2018-09-20 15:59:22,962 [INFO] Build virtual cell stimulations for tc_spikes\n",
+ "2018-09-20 15:59:30,596 [INFO] Running simulation for 3000.000 ms with the time step 0.100 ms\n",
+ "2018-09-20 15:59:30,598 [INFO] Starting timestep: 0 at t_sim: 0.000 ms\n",
+ "2018-09-20 15:59:30,599 [INFO] Block save every 5000 steps\n",
+ "2018-09-20 15:59:52,769 [INFO] step:5000 t_sim:500.00 ms\n",
+ "2018-09-20 16:00:14,740 [INFO] step:10000 t_sim:1000.00 ms\n",
+ "2018-09-20 16:00:36,662 [INFO] step:15000 t_sim:1500.00 ms\n",
+ "2018-09-20 16:00:58,599 [INFO] step:20000 t_sim:2000.00 ms\n",
+ "2018-09-20 16:01:20,658 [INFO] step:25000 t_sim:2500.00 ms\n",
+ "2018-09-20 16:01:43,358 [INFO] step:30000 t_sim:3000.00 ms\n",
+ "2018-09-20 16:01:44,182 [INFO] Simulation completed in 2.0 minutes, 13.59 seconds \n"
+ ]
+ }
+ ],
+ "source": [
+ "from bmtk.simulator import bionet\n",
+ "\n",
+ "\n",
+ "conf = bionet.Config.from_json('simulation_config.json')\n",
+ "conf.build_env()\n",
+ "net = bionet.BioNetwork.from_config(conf)\n",
+ "sim = bionet.BioSimulator.from_config(conf, network=net)\n",
+ "sim.run()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 4. Analyzing the run.\n",
+ "\n",
+ "If successful, we should have our results in the 'output' directory. We can use the analyzer to plot a raster of the spikes over time:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEKCAYAAAAIO8L1AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztvX+UX8V1J/h5tgUmRiBLEEQgmsZ2t9hsWpBYTEhgvIDl\nhUNG3btmcpTjDmjjzSGTaYIy8ljgZIy6z5yNs3Ygq2AnG8aJw7CTmR3HcWKHzFqQSYzPzI4MTrAV\ng1DHmAXnh3GiSHgSGx9bb/+ovvnevn1v1a167/vtbnV9zvme7/f7XtWtW7fqvap69/NuNW3boqKi\noqKiQuIVK61ARUVFRcXqRB0gKioqKipU1AGioqKiokJFHSAqKioqKlTUAaKioqKiQkUdICoqKioq\nVNQBoqKioqJCRR0gKioqKipU1AGioqKiokLFq1ZaAS8uuOCCdmxsTD33/PPAV74Sfp9zDvC1rwEX\nXghs2zY4x48DS48By9Nr/2U+fozSc334Mamrds46r9WB503JtNJ583kgdQTK5Fo6ldaxBCl79523\nj/Yddb37bA/tGsux27D6hKYX0G+/zskTuzfE7ndf+cpn/qpt2wvzNF5E27Zr4vPGN76x5Th+vG1n\nZ8P34cNtOzHRtjfc0La7d7ftzEw4NjsbvmdmwvGpqfD7Qx9q28nJ8L17d8h7+PByufR/ZmZ5PnmM\n8msyOA4fHsjQ0vDyuA6zs3pZXKY8LiFlcz1jOqcgbTQxEWydK0urO8Gqo9ZesXp46kllvec9abta\n/YX6occGqTw5Olu6euwyMxPazaM7yaPrLCXXkqf1yVR6ft172icmLwbrHlMiy3uNWrrTfUyWbV3T\n/F4B4Im28L674jd+70cOELOzQfvZ2cHvycnlx7TzVjoLMfn8mAeefJZOucdj5XeREZPZl5xh6V3a\n3jnySmzQxW6e/N7zpf0510axc970XcvOQal9uupRer/gx7oMEK+cm5srWnmMGg888MDc7bff/vf/\nx8eB06eB6WngyBHgssuAV70KOOssYP9+4PLLgSefBO68M/w+cgTYuhW45hpgZgZ4+mlgwwbgzW8e\n/N65E9iyZXnZGzYszb9nT/i/Y0eQ9eyzoZzXvz5dj/Fx4MQJ4FvfGugq823YMNCdn8s9bpV/+jSw\nb9+grgsLwCc+Eepz1126DSzwvGSXyy4D2haYnMyTxdv0F34h/Kf8Vh3Jnl//uq88rf4SVNbb3ga8\n+KJu14UF4ODBoOt55y2VRzq9+CLw8MM+vajMW24BPvjBpXX3INUHUvUmnc85B7jqKl8/8PabmG68\n/TZuDO2u2VSrx9at6X6f2z8syHtA7nXivUapX1H7U779+4Fzz9XrIWXLdrn//vm/mJube6Co4qUj\ny6g/cgVhjbCxFYT2X+aLlRGTXTJbXIkVREyfvlYPpbOsmD4lq4tSdFnlyfO5K7tRz1C7yPCmX8nV\nS046j4xhrI5j6Tz3mlietu22glgzTmoNCwvAyZPA1BTw1a8CY2PA614Xju3dG77p9/PPA0ePhu+3\nvhWYmAAuvTQ0+dhYGG337QsyDx0Kv8fHQznT08Ajj4T0J08Cu3aFUXx6Opx/7LHBb6/Ou3cPZkn8\n3KFDA1lSn337BnVaWFiqn1cHLg9YWt70NHDHHUvrngLJmZ4GHnwwtEXbDuxZAq0+sTpadikFtfcF\nF4QVolaPmD7UxjfcAHzpS+l24f241HY5fSClg8eOlN6yjzcdP79rF3DffUvroF2PBE+dvXpq+Xi5\n09NhNUjl5sLbPvx6+pEfAV56KehO15dWD57njjvCN78eOqF0ZBn1R1tB0Eg5MTEY3en3zMzSkXRm\nZnkanm9mJsikdPT/+PHlvovNm5fLpfQcmmMwlp7PpCgPHaP6UH6aHXD9YrMT0oWXL/OVzrS4Dtz+\npZB1Jt29s9ASJy8H7yuyPGlH7bzWTzzlke2sMmOO4L5WbR6dc8qL9Xde7uSkbtNYu6dka2m8fcG6\n7kr7tlUPTR9uX899RiuDp8d6dFJz4+7ePbjA6Dd59cn4/CKkNMRgig0QvAMTE4HfiDw3fN4pUgOK\nvPC0GzvvULEbo6aL7PA8XymTievA7V8K7Sbc5SaZO/DxviLL0wYv7TzdaFPtwsvbvTsus2Rw9EJO\nhDx9KaduVn/g5Wo2jbV7yQDh7Qvadddl8mPVI/Y4dWJicB/z1FW7363rAYKMQvS8GO2U01k51ZTn\n1eienJ4my+L0t927fTPXFF3Tog1a+XIorla9LF294LI0fXJlS1tYNvFQXEtojjEqspcuKunWKcqo\nl9oZ07eU4urRwUrXh26cyplqzxzZXn1j5fC+WEqX1eSl+qzs97G6pu4z63qAkA4ubRYpHUyePDKv\nlo8f885QvY6mvvJ59SiVI/PGZkRe2V7dPHJL6qW1cw5S9uhLT2/ePu2U229yzvctu0SHUhle5Mjx\n9qPUNddlgFizNFcCp5/t2AFs2hRorW0bHMknTgTK14UXhrR79gBPPRWomJSW02NjlFOiznL6I1Fo\nr7gipE1R6SwKZIyGGKMT5tL4LHpmDlVWawNpIy7HQy2N1dWqo0XZ5TTBnLJlffbvD8QEq014ORxc\n3z17dMqmRWe8886gryVbQ6oP5FB7UzRbKctDr43pFus7nmvCQ6nuSgEmGUeOxOnwHuT0R2kbq3wu\n88SJ0Hd27hzQ7x96qNJczRWBtVqQ311XEDmzzb5WHd7znrTDXEGUyirRr4+ZXh8zytzzXW3Yl91X\n8kW5kn5c+jJjLnKv2T6g2cZbV24XrNcVBM0iaHVw221hZi5n+HzWTy+cnHNOOLdpU5glbtgQX0Hc\ncsvSlQe9TLR1a94LNAsLwIc/DHzjG8Cb3gTcc489s6TZAM32tRfIuI65L8qR/PFx4Oqry18oSq0g\ncpD7MmDuiiE28+dlpWaLsdUL5Y+9QCZfCuQzvptuKl/1xF7qi9U550U5Lu/qq/2rE63t+CrAu9rK\n1Vd7kSxnhUYyjhwJ17i8ZlMoKU/qTSsIXr6Uy1ffBw6Edvn614GjR997Ym7uZ37ZXzJD6cgy6o+H\n5jo2tvQ/p3Jq9FdimsSofcQGkLIli0nLH6O5pmZUnPZHZfFz3MkVm0XFHHAkz6LQpuRobCMP28JC\nrC6WXH7c44z1zCatNkrJl/3RYwNJcbXYUV45Xjad5iz1rh60fqiBypiairddqu5Wm3mZPSk6t8dJ\nbdXBA6+9JHj9tLrGVqFLqbJjL7aF990Vv/F7Px6aK93EiS7Ib7CS/iq56lbjyQFi9+6lHc5DieQd\nO5ae6sRpf7ysWGe39I/dEFMU2pQcOcB46XgWYnXxDBCem79nELHaKCVf9secAUL22dzHGCn6tKyz\ndXPx3MS8g0lqwPTW3WqznHcDYnRuzyOwrhTXEsJDaoCIMaKWPmo652hbeN9d04+YtmwBbr45vPF6\n5Ej4fs1rgNtvB/78zwdL1jvvBD796ZDnp386PCI6diwsbV/72uCgPnQoLFUlyNFNDu3zzlsaf+nK\nK0PZ27cDN95oO4/oOMm75BLguuuWLzm3bAlvlJ4+HZbNe/YMHkU8+GB41MVjP9FS+6KLdHnWI6Ut\nWwaPZD75ybAsPXgQuPXW5emsutCxu+4KjxoOHQKuvTbod+BA/iMmqy4LC0t15HWcnBzo4InNQ30m\nttS32ijmcOVv3t54Y0hHsjxlnX12aOvPfrYsJtbGjaHumt1PnAh99Oqr7fbkjvmPfjT+OIT3Ueux\nJy9jzx69T/C633VXiGigtZ/VZrzOlmNf61NSXuyxpKzDzEx+rCyyV+4jXF6/K69c3r6yHnQ9Hzo0\neGR5773AQw/9yxfWrZO6bZc6cDTHlXTwSEdOruOnC82VyytxQsZovN5HA6m8feuXgy769OGMTMny\nHs/RpQ/beWbBOX28z77s1S23/bx5++gXw26jVPrca6A6qRnIgTQ+Dnzbt4VRltMT6TxRUWmWu39/\nmB0AYYagzQw0xw93jJGTWtJcLceUJwKmlteiXubQ/bRoqeRk1I6VOEljEVBz5PD8Xid8F6ouR6yN\nLIcnpw0Ttdq7EqB2katD0iXl4Ez1qRyaa4zaK/Xx1jPWLl0IDjyv5dgvjVQci6oas08MnnaQBABa\nddA9K0VCoDz8HrWuo7lao2bsfA5NNTUztPLnzkBTaVLySmi2XVcNXfXoW7e+VhA5s9E+bVkqq88Z\nco6N+2iXvlYQXdJ48vW5Oi0pNyeysPyP9biC4KMmPXvWYqbzGTbRU7kPgWYtNDOQz1Tl7Mf6L2cW\n1vPq1AtJ1ozH2iuh5KUuOZPxzrI8q6Lc/TGkfKvusWf/McplKaXRs3/B5OTyMnNWdLIOnOqa85Kh\nZZ+cuntXX3IfFs/MPEVz5XuJeGf63pW41hZe2m/OC4EpXb3tIK9zWpnGVpep1ev8/DpcQdAoKams\nxDaSwff48Rg7gI/Ykg3QZ7C+889PrwhSlNJY+i4yLVizqBTbwotYXg+LKVdmiR6p82Qjat8SqqvV\nTh7mVYwGGYOnH5WkjzF4uAxJ6e6jfEojmYo5dpHXSElAxJI+KOsXa18t+jM/hg4riFcUjSqrAPv2\nAbOz4ffRo8DmzcDx48Bzz3WXOzkZZBIrhf6/4x2Dsui8R0ctDv2pU0GuFtudl084dAj4wAfCby7T\nSt9FZkl9KgLI1qdO5efdvj3eTqn+punibS9PPypJf+hQSJPq60D/5VOaEyeWXrM5duHXSI5+XVF6\nXee2YxKlI8uoP5YPgqKpXn992FB8bCy81EKRGyn6Ko+u6Yn+SjM2KUNu6l4SzXViYqCjlp5HcaRj\nmp5cZm5US62eqVmqhVQ01xI5Up/SaLbeKKU5Mj3nrTaWulF7xyKaevTPtXsqAqgnbx+Rarv0nZx+\nz6/ZHMjIz11k5FwT/L5j3acs2fIYajTXwbIq5eCJOaxj8q30vHyPQzrHAWrVwZMvVpcuMlJyhyXH\nkjsMR2UqfdfzWrou9ctJF0s/yrK0c8NwUpfoq+UdJb1VK9dLotGOdRkg1qyTmjA+PnisRLGYKCKm\n5ji96aalNLCZGeDpp+24O3LTcIrh1LYDJyWVL2M5WS+XWQ5XzREWo0DG8nFYEVxzZMQg6YalMZ1i\nDtCUkz61cX3qpS6vLVLO0RLa8b59diwsb/TP3PbT5JY4qrvYU9rS86Jjbp2t9vI6juX1l4p2a8nI\nvSZkrCmL/KG9CCuv83XppI6NtnI2EpuNxlYAVlovpTWlb85sscvsfxiz7FjeLrJS+buUNYqVVpcZ\nZ59tUIJRz+C79psu/bq0rD72CCnJ06WuWM+PmPgzTv6cTtsJjvsNeLrY82JNDn+eGXvGagW4s9LH\ndmOLPe9N7Tbl0S/nuXJMt5Svw4NYjBnNPxR7LlziZ4nZ07t7W8xnZNVRq0sfOnvg9XXk2jOn/+Vc\nR23rq7N1TZX6pbRdBlMoKY/y0fXF/RFW/7Cu4XU9QNCoOTGhR3ElQ2kB8CYnB1Eap6b0huf0Mi4n\nFmFV6sZHdJKnlRejssUC6XmptvJC087lBIvT8mtRL72zJ+tGYNlYytXyl8zcrGB9KXm8/BS1UdqO\n90+rjTw6l9CLc/LH+pMEr5fUX97Y5PXqsUFpsL6cPhHr1150aRtuGy3qtFY/eY/pEqxvxW/83k9s\nBcFjKqXCZNM5MjYNKvQd64QkhzeU9yKQ8rTyYhEbYzfuWAfkOsRuqJ6ByCNbi3rpnQVbF65lY8+N\nJHf10rYDe3rbVNM/dVPQbMf7Z2zQi+k87AEi1p8kvJMnmc4z8Ht11vpOTp+I9WsvurSNvOdo78jI\n+sl7zLoP9334cHjsMzERloD0mz8WouUZLdfoMdGHPjSgyVrLW05B1B4x8fIsHUkup+V6los8Xxea\na0p+yY1Us1Hq0YpHvz6pniX1Onw40KXHxtIUSs2GsUcClpyu9uvyiEleI7mPXVKP0bTHljFbWW2W\nc03k6OnJ+573dLdvySMmee/xPJqWj8NW9QoCwD8H8HkAfwLg3wF4NYDNAB4BsLD4/dqUnNiGQXIW\nwmd/8jg/xn97ZkIyLz/ndWp6yovl85ThQUkej6w+5ObUPZYvJ68mp9QpqPU5b5mjdIJ2LbtLu5SU\n3We/8KJru3TRwVvfVLouPoih0lybprkEwK8AuKJt21+cn5/fA+BsAP8zgKfatt0zPz9/CYC3zM3N\nPRqTpdFcJS2S4i1t3z6gotKeEJwmyqNGalv5caSoeNZWhBa1dMMG4FOfCnq+8512ZEYtnyciZozm\nau3voO3/oMGiBva55ahGS/Rup9lnZFrP9pIWBTU3PhaPn8O3isyhCZfQlGVfu/PO+Na2pWV6+q4V\nC41Di4PloY+W0Ex53hTNPEeOx7a5EYK1+E28D63aaK4ALgHwAsKK4VUAfhfA/wjgGQAXL6a5GMAz\nKVldaK5yVM2Z8aZGZ++oLo/nrga6rCC6ns/RresKIncV0NeqISWz7/yeVemwV4Qlfa3vdLl1X4lV\nRB/5c+SU9mmrT6HDCuJVRaOKf/D5s6Zpfh7A8wC+BuBw27aHm6a5qG3bv1hM9pcALiotY2EBOHkS\nmJoKt93LLgv/d+0Ko+f0dEg3PQ089tjgP//9yCPA888HWePjS+Xv2xfknTwZ0lFZJ0+G9Pw8z09x\nXqangTvuCP/Hx8P/Rx4JM20ZC4bqMjOz9Jx1nCDr5j1Pu6BxG3FdJXj8p5j8mC4paLpaNo6l5998\ntzetXpYOO3fG7ZFTBwmpI6+fZefSMq3683J4Go/+qTI96Xi/pr5HaaRO8rqi87HrQisnN44Y6bBz\n59L7SS7ktRbTQ2sXq3xuI36d7N07kEGxpIpQOrJ4PgBeC+A/AbgQwAYAvw3gRwCcFOn+xsh/O4An\nADyxbds2ddQkjz1Fa6Uompx14KG5xkZnyQqwWEweqquHQinlpFhKKYaFJZfrFmNjpcDld6EDclle\nuqdVt9w0HGRvK+Ju28ad3yWslVi/yMmfojprkH3YS3W22sqrG28XyTz06O1p1y79musuo0TnoFQH\n2S6xazgW9RWr9T0IAD8E4FfZ/9sA/BJ6fMQkG5CMxTeO1zqiRnNNdfDdu+33IDSOMjUyv3hSFEqt\nI3ku/ljn9dBEc2+ilvwudECuk5fu6bn4ci9Q3kaWPWI3ry4DRNcbWYrqrEH2YS/V2UPBjulmTdxS\nba7l7zKIxNDHAFGqA8/HtxqQ1zA/3vcAMdRHTAiPlq5umubbEB4xvXlxRfC3APYC+LnF798pEb6w\nEL7p8dLkJNA0YbPvvXuDgwcIy7KTJ8NvWnq99FI4v2sXcN99wUlmPYbYuxf43OeAt74VePTR8Bhr\n48ZwjpZ1JE8uHcfHgfe/f6msxx/XHzGNjwMf+QgwP7/0ccrBg8GprT0C2rcvPB575plB3SRILi1x\nuQzSjR4D3Htv3uMUwo4d4UP2tHTxYu/eUGfPIwEq20pr2TVWNrXRz/983uM23ifpf8qelOeGG4Av\nfansMQb1Uc3usg9KyD68dy/wO4krkj/aAOJtZek2Ph762zveEc6NjS1vo/e/P/zWHvXx/JbN+KPe\nnMeMUvf9+8O1T3XPkcH7SsmjynvvDbbR+jm/tvftC3HhrH5QhNKRxfsBMA/gGALN9SEEFtMWAL+P\nQHN9FMDmlJwYzdUTgyn12xMtNRXR1TPzK3VSlRzvQ7YHMYdrF1l96z0Kh6anP/WRp0+dZf4ufTQ3\nrTyX66Addpv22bdzUVK2lg6reAWBtm0PAjgoDr+MsJroBD7zIef03r3LHTXT08D99wMTE7oj9bHH\nwgzhiSfijuO9e4EHH1xa3sKC7oSzHIMex54lr8SBrdmMf5fI0GSSzSU5IBeWfTSnP5DWWzrxuKyY\nDjHiQqoM6pOxVY2sm+zHubNUr8PY0p87ix98MN0XYsQBS7YmT8rR5MZWa8N2UnPCwqOPll8jXsTI\nAh4SgnWdFKN0ZBn1JxWLyUNxTa0mPCOypzxrJI8dT6UZxsx/GDKG/ULRMGe6Mk/pDLlLmSv5olzu\nLLkv+5fab7WuCLvAaovcVRQ/htX6olyfsPaDoBdt3va2sBfELbcATz21/EUr/nKafFHuySftOO/8\nJR/aY4JeyLvqKjuOvfVyjuelHfliUWwPAo+8VOz7Li8SkXxt742SF4piewfIl4c8L0qVvEDmeVFO\ne0FPvtTo3TuA22+UL8oRYvtwdC2zpL/nvBjp6bd92KbLfhCW/ta1SH2J31esfVa0e4M8tmpflOvz\nk1pBEANDY2JIVgqnYsaC57VtOlifTJMa2WPMC4sVwmcxJbRKyq+xU2SZOcH6NPldZlox21h19NY9\nRx8PC4nLtZhqXuaLbN8+dc6JRZXLvkr1S286WW7ObNmjc9dAhlyGdZ/wwHPte2noMQaZtDdWsw9i\n2Ni3LzybO3o0bEx+4kQYYYHlL4icf35I9/LLy+Vs3w685S3p565UlrYJu5aefwPxTdxpg/SZmaWb\nqvNyDx2KM1Jiepw8ObAJyZBlamly5HttoyFmm1KUvHSWK5dvbJ/bNiSDt2+fOnfVLQZvv8ztv7H6\nD6s9vfDcJyx4rn1gYB/Zx3he615B6bm9O6F0ZBn1JxZqQ4u4GIvCam0sZI36PIqiTB+L1KjNClKb\n+/D0/H9f0Vy5rpruJdFPZWTO3KiVqXrEbNx1o5wcPSSsaKW5m8uURlP16JwTtTfHlrFIrVparf1K\n+w2vQ0qP0kiqEn30s5jtU9eidu1q9daOY7W+KNfnJzcWEz8ec/jkOtFyHGop51EKXZxUXWXmoC+Z\nVt6+6t5Vjz70jckZBc11GP2oNG1pv8m5robRx4eF1L3FU28tTZcBYs07qfkm8eQ43rNneVTDa69d\nHs1Vc2hzueRMijmLpIM65rwEfA5M6ZCNOQ8XFoAPfzj83r8/Hc2V6yT1827krump2Zl0yd0gntuL\njqccdNTmpc5DDq9TMxbN9ciR8L1zp9/ZS0SLEmdqrs4yqmiMCGHJ8RIbNN1SUZJjfUargxW1V9Oz\npJ/TPaZty4kcqTJlf5K6a2QYaVvNNtVJDZt2GjvveTkutWrIWVFY+sbKTsmkc54yu8wuU+lj9epj\n9pWaPfU5w+sqy9MmWvpRviiX22+7lFky8/XILi1/Na/wYvlKV05YzysImq1t3RpmI20bZpM0o6Xf\n55wzWF3QbJwomdpsno/EjzwSju3ZE9Lt3KnvLcFHcDmTIdCsgPTRZmvWzMHS87nnwm9rBeGZXebO\nIKWNpE1jsxxepjar0o5rMridaN8Paffc2WLKXh553v0kZHmjWEFY+42UUJ29+5Dw68WigUvKqmcf\nj9J+nUt77do+OdeW7F+pvVasduP5HnpoHa8gOIWV79vKqW2pSK6SuqYFsePpOM2VyqENwvugE1oB\nuDxUXA1UhxiFtQsVkOfV5OQ+H47JsKK8poLq5QRK80ZGtZyOubbsIyCcp0/xvqTNZPumudJ5K4Cj\nzJ/bVp70Wpqc2Ty/Fq2AnH3oKdNSv0oF4mtb/Tg/hg4riFcUjSqrEOefH0bSzZsDvQsI9C9gQA+j\n35Tm2LFwbPv2pVQxopAR5XBiYpBucnJApeXUsmPHBnm6wqLDST292LdvYIu+dCwpX+ptHY/JAJbW\nYd++YCeL0pc6nwuuM+8nqx2cRlnShzR4bD87G/qtJ39uW3nSa2ly+h1di5xCX3L9eesl+1fftO9s\nlI4so/7IFYSkyF1/fZipyM3FOeWQqIS7dy/9TlHHNBmSKkoUWovCRvJStDuL7hbL02XzeC6/lGbJ\ny+9KB0zR/TT5qfrl6uSlqWoriBJbyj7koY5aOmt24X07VhdOC/eU7+nLs7NLaeUpvXPps7t3h+s+\n1ralfZLXL1aHHDm59HFJ1deowqn7BdYjzVVz3miOJOlgsr5jjk5LhtfpleNMzX0ck1t+7HwfcYD6\ncvBajrgSO4zSKVmat6sNcxyYfZXfR7/q0mb8uvekW6l+3Udey4aedu8yQKxZJ7WMWXLkCHDFFcC3\nf/vA+cs3+ibn6f79wLnnBifx5ZcDr3oVcNZZ4fhNN8U3oScnFcn4+tfDvhAxCqp0CGpUNQ5erxMn\nfHFnUk63GE2U01RjjvMYUo60HGi6ppzRluOVzpU431NEAk/eyy8Pt6+U01fSKC2HewyWEz/l6M2h\nVGs6x9J57Gj13ZjTnOtM+7xYBA1uB0ntjSFFw81BSSwo2S4UC0qzk7xfcNr55OQ6p7nGRllrBiDT\ne2cgXaiysWOpclMrlZzZVsnMPEfPrisIj66e9J5zuTrk5vWuIrqu4Lro6+1jpeV1Wd16+nuX6ykn\n7ahXECXlW3bBelxBEFIvymmzIpoN7N8fRlpgMAORNDMZaVPKm5kBnn5afylKzuBKX2qjmU9shhx7\nicea7fGZjbV6smC9SNh1BUF2k7PHGFU2NpP1zKK1ulkyvTRX6l+XXpqeucZolF6abuksVb5oqPUx\nDd4owrG2kef5iplWjtqLnDmrndSLq976dVlBlPTBVIRbrV9YK+11vYKgkZI2mJ+aWr5vtKSiEr11\namo5Bc8auScmBk4fjdap0c+kY5A/M82hZWrHrPrHqJkWRZSO8XMpWCsyskfpHsAaHVPS/Tx16wIP\nzTVWP05L5vTN1Cya07RzKZlemivX2WrD3D6Q6stW28j8Xrp27NrM1dFbPx4Buis8hAHZ3719SrMh\n1qOTWjMmGZBfbPxmQ+fkNzeobDwpn8tLDRCyEekiiIUM1jpH7AYp9dM6sMWB19738C6DJUMmtvl8\nDrSbSuwi9/D7cxG7UcXaQub3Dr4xrr134M69uUrZnnrF9Lb6cqxtvDx/TV+ZP+d68oLntd7lKIF3\nwsD7u7dP1QFCweHD4SWjsbEBzZUoikQPI1orp6y95z2h4W+4IU07veGGkJaorpT+8GFbhkWDjFHz\nLLphjP5HOkxN2TdQjeqYoo964aHj5cpJUfkk+ozqmqK5xuxI+VP9SitzYiLQtbtSjTU9PQNNrg29\n9E0PNbmk30h6cOz6LaVwU36i0XftXx6baWmsa1zre7LvrvsBQj660ZahMQezx9GcorjmOiRT5UlH\nk3Y+dixOM/EvAAAgAElEQVT3XO4KYthycs55zpfokdumpX2C5+nTdrk2GVZ6j24l7efJU9IWlow+\n+lepzax8nr7XZYBY805qYBD7xkNz5Y5UTlu99NLl1FJOsSSqnow1ZFH5LOeiFTvJ2lIzFbXS66TV\nHKWWk1raIIXSLSu1dsxxRHsckF1iMfF+YUXcjW05GsuvyYj1ixInteYcjckrjVGUisWkOb61WEy5\n/S9ny9NSCjfJSMU7iyHXUa6RB7R4VoDtzOZ9b107qa1RU46u2m9v5NFRzOr7WAXEkFNm6Wy86yw+\nV5+cGeSwZqZ9tkmXVUAf6fpeQZSeH2WdvRjWyjiVPrUC8vQhrNcVBI8W+fTTSyO6ygiR1147oKO+\n+c1hNH7b24AXXlhKUdVmJXwPh6eeWk4pzXlJyaK6xuLWc1qfXAV4o3Dm6Jg7k9T2hSiJmW/VJfe4\nlNmVYqjVN7Zq8+z5IXWj1a7sW179vRFoYyut3D0PPC9o0sry4EHgt34r3X45LzZ6+2kJBRiwqdwl\nNNecSLk8Pe2vYkWSjdFhT58O+j/++HtPzM39zC/nab2I0pFl1B9tBUGjJTE/gEFETKK0klefaKCc\nkcBZTDH6GMkg2TzqZi5LIof1xP/HykmxP7gszoywHJe5Mx5evlZODqz8uce7oC8WE/W5lB1jLCYv\nYnbwtKfs4x62jqff8XSxGTD1nRw6qbftvXpa8vl9hO4TucjVQd7bJHXfkssp/QOG19iLbeF9d01H\nc6UoiSdOhGiuHBRdFQhpTp0KvycmBtElt28fRGo9fnwQUTIn2mWfERdl2X1GdtQiSlrRSHNtkCqn\nj/y5x4eFnHY4dcqXbtgRQ7u0Z5+YmLAj+gKhL1KEZQ+G3faa3eg+MWzwe9vkJHDvvXl1PXaMR7H+\n8peLFSkdWUb90VYQnDJK9Fb5TRFGiSLGaa9EebUoolQGp8lyuqykh1q0Vn4sRkm18qSocV56oqQ9\ncplevr0mS9qgC811dlan8nopsFY9cupHbbR7d5kcTxvL9LLP5urttbslT0Ys9rSfVzerf2p9SFKL\nY2V0odl6wfukJ3KsJSPnmuBlpujjsm5rjuYKYBOA3wRwDMDTAL4fwGYAjwBYWPx+bUpO7BGTXIJJ\np07M6ZN6XJBKr8mOORlLHKBdnYF9lBHLn7JBqU6ec9565OjF+9UonZJe0kSXMj326cOp21eZJX2i\nNF0qb6mcPq6tM9ZJPT8//ysA/lPbtm+fn5//1wBOAXgXgKfatt0zPz9/CYC3zM3NPRqTozmpOf3s\nttuWUlZ59EMeH+imm3TaquVQ5LQxHgW2VRzhWhTO1Ebksdg8kkprRbZMOfVScVs4zdAb8ZLXizs+\nhxWzJhb102PfHGd1ioqc2iLVG6dIOo7JsSgj2XZ1Usu6WdGCtfb09icNMZqr1MXqNzGattf564kM\nm4qTRQSVEgp3LlFCi6kUi+aauo5XLc0VwPkAvgigEcefAXDx4u+LATyTkpXacpQcguTE4c4vKzYT\nPQbgDmsZN4aP4JSPOxJjTkvrkVMqveao6+Kk9czOqOwuISt43az4OyXItbFW35xHTFZ5lq01B2TK\nKamd18r16p0bpoLXJ7csrd9oefg1GJOb4/hP2StXvnd13jXURu4jXNnXrPrLa1Y7htX6iAnAlQA+\nDeDXAfwxgA8CeA2AkyxNw/+L/LcDeALAE9u2bVMNyRkS8sbNDaXFZuINv3u3foOUNz052MRu0LFH\nTrH0Wme0OrnnIvE8l/dezDFog2kfjylybewdNHLLs2zd1wChlevVO1Wex07esrR+o+XxTjo8kxyv\nvXLlp27cdH737m4DRO5jJtnXYvXn16x2bDUPEDsBfBPA9y3+PwTgX8kBAcDfpGRZKwge90ZuIUrn\nY05ruZWg1mFiW2rmboEZc5hRPksny3lb4jyT9ezLucydapquqcHHWnVZulnpS8omxIgHHtmeNvHm\n6eII5nm9ts91pvbhJE4RPXhdZHyyUTqpS68PLicnL6+fVr5Wf62uq3mA2ArgOfb/HwF4uM9HTHx2\noDlz5PnZ2aWjuWdkj6WPrRI0mbnleeXlztZz9fbK6rvuJbqV1kPLXyKrpE1K21HTOXasRE7XtKl0\nOfbOuTZK6tRFt2EgVb7HHl0GiFcVOS6caNv2L5umeaFpmu1t2z4D4M0Anlr87AXwc4vfv1NaxvQ0\n8Nhj4U3D3/iNwff0dDi/b1/YlvCll8JboidPArt2BYcOpXnssfB7YWHwLsT4eDi3sBDyTE2F7717\nw/fJk+Hc9DTwyCPA88+H/+PjS99jsPSlsgm8bMo3PQ3cccfSukgd9+0LZT/zzHKZMZBdqB4pvTWQ\nLjt36vbUyozJJ1vPzCxP47EbtVluPSR4WWNjS2VZfYQfoz5x+eU+HajeN9wAfOlLee3I80u7aW1c\nIseC1SYSlh5kN9o6lK4vypMq06uvV09Ld5Lx4IN+20ho/caTnttG3rekfvxeQf87v/tSOrJ4Pwh+\niCcAfA7AbwN4LYAtAH4fgeb6KIDNKTmlK4hUGu8IncrTddblnQEOa+ZfgpRtSuXlrD6GMavL1aNk\nVqvJLI042nXVVpK2D9kls/OueUrR56o0N72nf8T6IFbrI6Y+P9aLcvQMju/xIJ/ByZeA6EU3+Vwv\n9lye+wXkM9OcZ8fWS1ipZ9uWH0I+p9TKjOnSZQ8FzWeSehbukaels+R2eS5sIWYXT/1K9lXgdrT2\noYjl9/poYv89euemj+lXKitXX+3ZfY5927a//VJK2pTuU9q+F/K+IL+H7qQG8HEAH7M+pQXnfmIv\nyk1OLqerEmOBswE4u0b+1hqO5Ev2kqSh5jAsOINKzgRiHddiqdBxHntFyrXy8llGyUWjyY/ZIjV7\niunA24LrG2s/j9xYfSieF8/nqUuKUWTpJ/twzmzVW6ZMx+vjkZGbnl9/Wn1k3TVZVvt52l9Lkzub\nl/eQ3GvE2095edImlgx5DWu2HvYA8T8sfg4B+L8B7F78/AaAXygtOPdjrSDIIJyuqj3yIJorv9nz\n37HOK6mykoaaw9Hm72BYHV7TJTVAyIHRk5dfeF2W0KkBwnuDjukg2yJ2QeTIjdVHu1F76pI7QJB+\nknKdcyMqHSB4fbw3/Jz0qZujrLsmy2o/T/traXInDPIeknuNePupLI/bxJKhXcPS1iN5xKQV0qXg\n3I8cICT9jLYE5LFk+CMkvnyX52PbEcrHTHJbyNTy3kNDi6WX9dSWmNbykzCMxyJcRuoRU4681KOI\nXL1LlvdWXCBLfu4jP6s87fGoF554R/K//D0Miqv3+pDUbqsOuTp7+lRMd37txR7xeNvY07bWYyPN\nRh57jGqAeBrA69j/ywA8XVpw7kcOEHxWQb9jDlM+upY6f3g5lDY1O5Xnu6S38ubKLNErJrcvJ7VX\n11T6nPNd85S0lyWj1Ekd09lru1w7dbFRl/Jz85ReM96ySvTORax/eK7jLgNEDs31nwP4w6ZpnkV4\n+/kfAPjxjPy9QtLPiCJINNedO4FHHw20NE5R27ULuO++wfmpqXDL37HDR0t95JEQ64nTXGMUOkm5\n1GixBEnb4/9jNLuUDtp5rpdWroeSRzJ27gw2TdFcPdAokbH65drfg5hMjWZM8q324pRDzZ6cim31\nw1KdY/WX53IosTkUWks3eTzWjpz2SeU/91y6r2l6eumxPK9GMaU0/NvSPYc+bOWlexfXQdqwC6VX\nRc5oAuBsAFcsfs4uHZVKPrk0V+9KIjVr865CcmdentmId7bUdRbddTXRZSbaVZ8+ysuRWTIj7zLT\n7UPnYcnoOnvOsVHuyjKWt2T1M4zVXW5ejw36XkEko7k2TXPD3NzcF5umeSuACYRQ3ZsBTMzPz/93\nc3NzT/c0VkVhbTlKkUwpKittzcc37ZYRV0+fXh7NlaIkyuiOPEomRYXlsnOjl27YAHzqU8CmTcA7\n3xmPXMm3Gj1yZHkkSb7lqhbpkcvVollq0TZp68ucqJVcb03PHGjRaWPRMOU5LTqnN/ooIRYZNVYe\nj6TJ7eCJjpq73adH5z7rraHLdreUP9ZvUluleqPm5vQnrY6nT9vRVD3I3XJUK9+ykbZt7Y4d4T43\nkmiuAOYXvz+kfH6tdGTK/ZTQXLmTUGPacGqoDL4nR2SLFhtjcmgOLIvmGmMUpWiqqUiTFpPDc8wD\nyTAqoQLGys9hnXSZaRJiTDOpC+8fnj4R01kLNplLzeVl5uiR2/YaqyxHN00GpzC3rW7bXJ1z20LT\nz8vYisnJva5k+6fKl2Xw9FgNL8oB2NuXLO1TQnO1DCZpjLELk5eh0WJjDafdmKybD0/r7RyULhVp\nUrvQvMc8oHwp2rBXTozrnYJ2Yy292XoeA2oXf+7NhMuwJipenUsHiNy2522esm1qgkMy5GCTurF6\ndO5yY+dt0IecnOvKO0m0yuhrgOhtw6D5+fkPzc3N/UovwhRoj5i2bAHe8IawfKUNg97+duBv/zYs\n1zdtGmzus2tXWCIeOABceWXIc+BA2ATooosG6a67Drj11sES7sSJsL/rZZcBZ58d8vBNXa67LqQB\nli8ftaXsxo1hqbh9O3DjjcuXiuTc5A5N2ptWlrFlC3DzzcAFFwzqYz1iog1pHnxwsGQ/cSLU/brr\nlm50w495QI8QDhwIdeq6YRDXUz4S4JvcaI8ixseDTbjuZCdvfTZuTD8GJF0OHQr68kd0n/zkoL94\nHinwfnzvvaHc3Md8Gzcu7wOTk0FX/rgh9Rjo3nvDo9cUqK8AQceYk1rTjZd54EDYyOuTn1xa5y1b\nwnWb2kQqpjOVPTOjbwiWqiNdk5/9bF6b5uqplc0fS42N6TaUj4nvuy/k5feMo0ffe2Ju7md+2a8x\nQ+nIIj8A/rgvWdqn1Emd67S2Zowxx3SJ0ys1K0rNXL3nYun6eBwj83RxyHl06EtnT/kpmTFdcldS\nXW3YtX/0XaYn3TDrLNP0tbItkVHaPz328bQ7MPZiuwpWED8+6hUEMBidyTnNndS0Rah0RpOzmbYR\npNXGVVctn7FJ+XIL09On7dmetdWntp0lwXJoxRyIHudiyllHM3Nre8gYNEd+ibM0tUUl2SdX5xJn\n7ZEjwDXXAPfck7eVJR371reAs87S21jTjzsXSxz9Vh/wbElr1cWrc0rPmJNakkd4+al2y9m+lxNL\ntFVoDFQO3UtKthzNdVJL+27dGvrFhg2BlGJtpas59h9//Ke/WFcQzpWCtmrwPudMybXy5oz61vlh\nzxC7zMJT9ug60+yaflhyrbTeFYgmo4/ZZp91GXbaWN2H1a+HuVrqK79lixJ7YJX4ICbn5uZ+rxdh\nCqwVhJyxkS9Czma133KVoc0M+Azk3HPDLODaawerkZtusmcHmg/CQ2nUZskxKmtqBWHN9iwaYekM\nS9qGdPHSClN0XKmPTG/p3WXT+NSMT0vrXYFoMjT7eWD1gdjKQtoqh+ba1Ub8nEXjtK4f0ttDybZW\neTn9Idc2lg269EFaQVCfsvq6puf8/HBprvtjn9KRKfdjrSCOHx+wkYC2HRsL3zE6K1Fip6bSFFG+\ngqB8vAyNgRIDyYsF/tIorzGmlZfhIPPyummsLe9sR2OTlbA9rLqQnhpjRlIkPTbNqY/HBlJn3u+8\nZVIe3rdykGIKeVYWuUwdb/qYPVNUVyudzNOHnpbus7PxaLM5cnLYgRrzksrPuaYx5FAbG4tGnhHh\n0CHg+PF4mmPHQprNm4GjR4GXX156PIZ9+8Kr6zyfLP/o0YHsQ4eA97/fJ29yUn/1/tAh4AMfsNPz\n87GyeJn0bckmvak+lm4aeJ6xsbRNc8FDQcTqHQt7kGOzEhtoOHYM+NjHfGUOC5ZNuu66l4OYPald\nZmaA2Vm7jWW6ffuA+fnR6P6BDwATE/3IAfrpCznXdCeUjiyj/sQ2DNq9u21vuCHMXnlETBpRZRRW\nvrnQxETIa0WE1CJOyjJS0VSlvNSG9jLyoxXdkcryRDSl9Np7Hlx/bwRSqVduVM6YzFj0T5lf2ibV\nht56xdJrNtTaw7vxj9bHciO65vSBUhkl6VO2lJtv5bR9TvmxiM2eOvIo0CUvgZZsIsXLi0XrlXrJ\ntBjyfhC/GPuUFpz7ib1JzR1csYiHXZzUsTJKHHYlDuNSR6T3fBfHXcz+Xj1y03nzjFpeF4dkqTO0\nD526OlK76jXM8vugqJa2jVfXWPqca18e6zJAeB4xfabjImVo4MssiuxI0VxlxEPaRJ5H2dy7N/yO\nRdGUG9jLqJtWpEZrc3vP5vRaRMZYRMjciKZaRNKTJweyrMijsUimBBnZlefxPtaw6q/Z0yM793FK\nzNaeyJy5ETVlH8vR1VumJ0Itj5TqgceuqXK5La3orFZ0YY+dZaTcVGTdmP6lbVMSzZVHf77jjnBd\nadFkU5GaOyN3RAHwbaWjUZeP5aSWo2YqZrp3ZLZG4z5G9pwIsrFjnnNe+X2Vk2tfr365NumCvm3Q\npTwvuqwghmXH3HL7Xjlr6VZj23jypl7kjcnGKPaDaJrm+wH8KoBzAWxrmuYKAD/etu0/62Gc6oSF\nhbC/wsTE0hUEnxlRXPe9ewe/H3kkfE9N2XHtU/HWU3H4+YzF2j9BQsr0zGhjdeBp+UyR6wakZ5HW\nzITP8KTufc3erZmSd08C7x4XVlmec1J/72xV1gPw65rSy7M60PpqX7aMtZE8Z6XlfUiuGlPtb/Wn\n0pl8TpsQcleU2spYu2dodctpGxe8IwmAIwC+E+yFOAB/Ujoy5X4sJ/Xs7ICCxumgMvoqp6nJYH3n\nnz84x+VqtDGLbpbac5fT93JoqW3rCwjo2S/XomNqNLoc9CUnRoe05PLjMUdsqp28aXOCpsngczF4\n6bqe/Dn6lqTV+rSFFMWVn5OBMTXZsT4c0zU3AGUulTyGXB209PyeJYNrcltpxzCiHeXQtu0LTdPw\nQ9/qYYwqhqSgnX9+eIGEKKc7doTndhZFdfv2EHzr6FFdLrCcNiaRopVpFM0UcmbdHtrsWkFXemmM\nSphD/+tCFeR5d+wYUDJzMEoKagm8tGMgTXGV9G36D/RDCy1tyz5pqbk6xNIfPz44prVDyf0mCu9I\nAuA3AfwAgD8CsAHAvwDw70tHptyPtoKQFLTrr19OddU2hJf5iCIraaVEC6WZMdE5OWXOS6HkdMwY\nzVWjpHIaaYoqGINFx+SU4K4Uvlw6n9SP19OyBdeTp4mVnUN19dItLZt5ytL6WCkVs23jNEjPLN/T\nz2QeT9qcNpHXmmZn69rsk76r6dalX5fooJU/NhY+Vhtzm/HyMIr9IABcAODfAvgygBcB/F8ANpcW\nnPvx0lylQ0dz8shjXWiupU6vnPJS5ZQ6wfpyTnZxAubKKrVDjl4lzs9cGal+lYs+HKE57ddXOnk+\np8371jWWr89+3aX83D4VvkfziOleAHe0bfs3ANA0zWsXj729h4VMEbgD6f77Aw3tda8DXvOaAaWN\nKK204ffOncCjjwanbtsOYiJddpnuJNQ2VpdU2hwH1L59waH+zDNBF+n4itHqLIcc0Xiffz7urJPO\nK+1RRq6Ty0tV9MKqv+VsjTnIOXIe23gooRa1ls5btERLH6Ji8n7odTKW0CilLpzEIeukwet4jTmS\nNYc+J5RQestJ7dGhi2247rt2xdszhlwdrD5EVH2LJMKvDX4ddnrU5B1JoERr1Y4ZeV8J4I8B/O7i\n/80AHgGwsPj92pSMGM01tXqIrRq6RIEtnVnEZowls9Lc2YVHt9wVUWwGmIPc2WKfs7xUWalz/Hzp\nfhAlq4g+6t213UvSdl0te3Toqz+OcnWXe59JrbowohXEK5qmeW07WEFsBtz59wF4GsB5i//vBvD7\nbdv+XNM0dy/+vytDlyWYngYefjj8js3uH3ssRGV99NHBbO2rXw1O7v37gSee0KlwJ0+G2TkwWHnw\nFYqHYkrgtFSSw2fDsZfuYi/LWLMLWZcYHW6YL/R4Yc0KY3Ti2Oy3hPbnXYlYq7KTJ4E///PQr3Jf\nltNe9Ewhh+4roVHBPTK8Zcb6NF890Eqf/9b6Kre/Z7XalaYqaabaqt8jI9e2vMzYkwppX201MaoV\nxG0AjgH4V4ufYwBudeS7FMDvA7gBgxXEMwAuXvx9MYBnUnJyaa6zszr9kkd25VFgPRujc/mcxmhR\nTDUHIeUhai3PI6l0lJ/TdTVdPRQ8qYucdcToiDFIHTVbeB3glg45VEmJkhkk7ytSb40CLWVT/li/\nkvIs+3ltp/UBT16qw8xMXvt7+wufDXM9+HHevzk1WNqGH4vZX+oZk5HKp9FMS66RLhGS5b2Mw0P7\nxSic1KEcfBeAOxY/3+XM85sA3gjgOjZAnGTnG/5f5L0dwBMAnti2bdsyI1IHke8zaO8syAaenAxs\nIutmrZVBeeki5heVdiPQOnDs5mG9Y0GcZmImpAYWDdqAoA0YuXxx7QZj3Qy8j7diNxPLxrEB3nOD\n5YjdDHhdLNmxSYBVZ8t+XttpfaDkBtqFq2/JTw36vO4pTr/H/pqeloxUPkpD9tm9O78/lVxbMs+a\nGCCyhQP/GMAvLf5WB4jF/3+TkuWhue7evZyCSjdWopbyqIxE6yN6rEZB0yicGmXWE72UGlrSarWy\ntP85x2L10PQrpblKaqJGO8xZQVB7aXbT6Iyl9MOYTrForJaOsfypSLOURqujt36p9o0hhwKcmz6W\nlp+TUWxzKa855ZdcL13p27nXluxnmn20vqX13dU8QLwHwJcAPAfgLwH8HQI9tpdHTHyU1xw7Ml3M\nIeZ5RKGVkfMII6ajLEv7n3MsVo+cfB5oNuoiM0fX0vJi+UrtaZ3rIi939VVi99y8JX3eSmtdE54+\nVWqbkvbo61rJhcc+qfSrdoBYUtDSFcT7ANy9+PtuAO9N5Y/5IGL7EWizdjlzmZgIKw9tpqfFg+dl\nlKwgrL0j5OzIOxv37AUgZeXOGq2yZfldXyiy9LLklr7s51lBpPYZsFYHsg4pvVKzbE+dSl7ESvW1\nVN7YyihVLykrNkPusnq0+n6O3sePp/dw6RueFYR1f5D3mLU4QGxZdFwvAHgUjhfuPDRXbbaRGoFT\nq4FUutyZ6DBmhN5ZWtfVg2cm13WmlTtb7Hv1EiurD327lteXjD5myCX9LlfWMFd63jz0f5QrCE/7\neG3WZYDIisXUBW3b/iGAP1z8/dcA3tyHXBnNlFMdOQWMXpSjKK+cakovJ2k0SaKo8ZeY+EsztEeE\nFa2S6+KlE+ZEafREc7X04Oc8dNBYVE063uVFOQ8lUtJvOY1R0iNL4X1Rju+lwW2fSxfm5ZXQclPR\nfjV5kvZcQpX17kPioZha5acoxymdcyOpanpPTw9ebi2lbxO87SvbR6uHh/pNL/4Wo3RkGfXHWkHQ\naMnpp5IBIql0RIuVrCRtdsDZFpSes1sslgblpTSkS4pOqMmLlSHrb8mUenDZs7NLaYAecDlcvodR\nlZLpZTFp9cplYWmI0Vw9M7lcG/D0VvvE6hSzT4peWWpDj2ytfjnprGuBPxpL6dClX/C26NKvLZk5\niNmGM7S09FgLj5i6fqwBghtK45BrVDq6we/eHaeptu3SDka0WE51i3VA7SaaohNq8mJl8DqmZMqb\nOT+ndbIYrEGny4VkXfDWcVmvLm+7csRortoNWx7rMkBY7ROrU8o+sZtjqQ1zbrylA0TsWuCPFGM6\ndOkXfU18LJk5iNlGm9zVAYLBcnTROUmX498pB5bMn6LMyRmO7AwlDuUuUUpTDtOUvh6kbJQDjzOa\nl2vVJaZrKk2M5uqRWeL0tdrI2yal9imxYW66VP8kOZ5rSVJePU7wkv4o69eVfOEtx0LMNhpRg9tp\n3Q8QBK/TJnY8dUyeT/336hhLE8uTW14XWTHEbFQqpy/dcuXkluXtR14ZJXUdRr36QE7/LOmbfVxv\nnnzDsp1Xrtc2WrouA8Qr5+bmOnoxRoMHHnhg7vbbb1fP0XaFL74Y4hG1bXAinzgBHDwYnDinTwNf\n/zqwcSPwC78AbNgAHDkyiOZKx6engfPOC/K2bAmyP/GJQbprrwWefRa4807g9a8Pcp58cvB/fDyU\nxfMfPBiOb9kyKHfDhhBrZcuW5XX5xCdCjKa77grnx8dDXb7+9VAvnid2js6TPidOLJdNZZKdeN29\n4Drv2RPqJ8vwQtozpZu0r3VMs4elm6eNuHytzXmf0dqlb/tJu2n6pto3ZjdL71h62ZfpeuTpud6X\nX27XQWs3j501u3jqKcuL2dcDq0yrP2r3Da18aeOrrw7ypqfD/Wx8HLj//vm/mJubeyBfa5wZKwga\nNemZnDaa8md2sW9rhmKlK53B9zlT6joL6To76mv1oOlSMkPsS4euq7bc5/ld7DfM/lGa3tOWXeru\nsXNf/aPvfp2bPvfa5cexnlcQNIJeeGG4pC+5JIymbRtmY+edN4h2CgAHDgDnnguccw5w1VWBGvj0\n08DWrcA11yyftdEMnad/9lnglluAD35w6eokNYOn2eZzz4Vz+/cvn41YM4XYDCY1443JkLNXmnXk\nzPz5KkausHJgrZ7kjMiagfEVY8lKiED2vOKK8FtbtcVWIZTf6lMSfdgv1j+6zlJLyuTlUttp7UIy\nbrkFeOqp+MrJ0nf/fuDSS/X20FbYnlWkLDdnRVhiq1j606ft8mX9tGtg3a4gOEOBR2blbCaehrz6\n3MNPv2Oju5aeB2LLYTikWBV8VmTR1ix51nmyQ2w2ZbEhvKC8Y2N+W0hYdefnpG6p+pU43mMsJglN\nvqdPSRkWE88LzXapunN7pWiTGkrZSdo5y9ax9o31lxI9LVA5dM2XyMnVQbsurb6h3Z94OVgLL8oN\nA3yT87GxsKH3+eeHUVRuhH7++cCpU7asiYm8F6xOnRqUMT9fprNW3r59eRucp+rHN1/XdkDj5e3Y\nAczOdn/RrBSxuls7wqXq12Xz+e3bgbe8JW6PmHxvn6I23Lx5ad/NgWa7VN25vXi/BMptVgrL1rH2\nzQG3m0oAACAASURBVL1WSsHLGRXkdTk5Gb9vDAtr+hETLRXvvBP49KfDsZ/8yeCs3r8f+OhHwyOX\nZ58FfuInwvEDB4ArrwzLN3pcdNll4XPddfrScePGpekvvDAsAQ8dAjZtAj75yZD/7LPTy09aEl50\n0fLy6C1LWlbyZTEtHwF9ibl1a3gEdvCg/ThkfBy4+eal57dsCW+Gk+y77srbUIXb6MgRYNu20KE9\ntpDgushHdidOBPlXX71UJl+KX3XV8vrlPk6gujz5JPDud4f+lGpPKZ9ssX07cOON6XKpDV/zGuCs\ns0IfuOoqn66EEyeAY8f0x3NW3bdsGdiL98u9e32P6MhOBw7EH5tY6RYWwrWzYwfwsz8L3Hrrckc2\nf0z14INL30DesgV4wxvSj268emqga/KWW8Kj6GuuCbrmPmKyrl8LvG4HDoR+dORIOCb7FJe9d2+w\nFy9nfn6dPmKSyzGvEzrleI4t91KyPI8GPI6lHOdaVwdaHzK0NujToTfMunvKL8lf4pQsLXcl+kBX\n53cf7ezRoYttSq7vvvTQ7gceZ7wsB+vRSc0dVlu3htH1mmuAt789zPLJcXXnnWE2dMstA+cyOcMO\nHAijLTmgNSoed5xq6bduTTvKODRHrHb86qt155o8ZsmTZaZofblONIu+y9sg19Eao2NazurULDmX\nuiltcfq0nj8ml9vittt8jv8UeSFH5xw6J69Pqh/xtAcPDlbnqXa2+laMYOBpZ2/f99ZLAyepcAp9\nLg28hGbM68zvcffcY1PUp6eX06TX5QrCGjFTM40UTbXr/xS6zqZKyh/GLCumR+mMbRR65uYpmbmW\n2qKPma5nNt617NxZdUm7rlS/jskoXUV0XWnFznnshvW4guAzSqKw7tkTVgecDkYzE5rt7N8faK40\nC6eRmSii9KKJfEHGorXSCkK+2GXN2ixKqnc2JfWjGc6LLwIPP+x7Wc56YSf2wp2E9QJelxflYpTW\n2Cys64txEnzGe9NN+ouPXA9p0xzqJqErlVKbpefUPaf9JfU7VbfYS14HDw6uCf5CXeyFTj7DTuks\nr99cCjdvl02bfPWV8NrWuk7JHhbNVaMS1xflFsEphTLaKqeI0bFcmquWn88krCBaFlUwVZ5Gg41R\nXGdnB9FpY7MUSmtRWbsENdOikebQLSXkLCimW0rvLjRXjZKo1U/qm0OTlXUopbmWUjk1GqmH5qrZ\nIVc3kiHrbMnO6ReybG9bEOT1kgqImYI2s4/1Te2a4vcgqWvsyQjWcywm3gF4tFXJ6aZz1gAxMRFv\nKIrgKqOxamF4YzfiVHnaxZG6wPjAmOK8Wxd/zgVv2YgCouW+bS4hL5yYbim9Sx4xxG62Wv2kvrLP\npOwpb5Rd2yAH3D6pvsuRM6HwvMMjJxRWVF9vv5Ble9uCIK8XCohX+rhKGwxifVNeU/IepulqTcrW\n5QAhoxbyKKIyoqiM6BjbIjMVxdGzmXksgmROdMtUHm071NKoll03ZY9Fc/XKtmZUsZmWJzpu7gqi\nr+i5qS05ZXmedszVOVV/fl6LBFpSZqwMecwqy1MfT/meKLcxnbl+pddISb/OKT+l17ocIGLOmZTj\npjSt5791LKa7B6lyPDJjaUp00vKn9m3wyOhql67oYqfcNinN49UrR15u2V1sX3qN9NHnS23Sd9vk\n5i1t4y4DxJp3UnMnqRZxVYtjQo5lciLyc6VOau4EkzK4c+3ECeDDHw51sOiMmtM15oTUqG2avJiD\nt2v8ohgd0OssTUXC1WzjccCX1sWKnqvRLalcTk+mWGApWiPvU3v2xGN7WfBQSVPUSkm4KC3Tkp+K\nobVhg03GIFkpKrhE11hMGhkml4ZcQpTQqMRWtNsUxXldO6n5COp9kS2WPiVbjuSxmbOUwdPnzp76\nmtXm5s2BxxYl8vqyQ9eyvWm7rhxGuZLqUm7XVatM431hNXc10Ee/8Fy3fSJ139HSWnbBelxBECQN\n7fLLB3Q0TrmkMBl8JH7b2wZhOTj1VQvlQGk5dZHkpyhw8oWXT30qpH/nO+0omHLWk4rmGpvJeSJ9\nclqcdyZu0Q699EePPKkbn1la9NLYngh9vTQmVw3WatZLa+Sz4dyIrjF9U/X2rkI1eFYQ1mpMs58W\nYoantfZqSemcS+HW7MZffuQvqnnQ5WVNegHXah9rzw2+8njooXW8gqCRkhgg9M3pqBY1lSKPTk3Z\nsxdJk+PfqSiLqUifMcaJnBV4ImJa8rgsyzlIaaStYtDkSpZXDjR5nH5p0R7peF+rixSLidfTms3l\n0lW7UF0t+/BzJMtyjMZkWOgSzZX3N9LHYi9529+ClOtxUudcfymU2Nbq295rYOnTkXOOtoX33RW/\n8Xs/1gChXVj8/Qf5m1NTaYDg703IBpQ0OaKayZupN0S1d4CwqJMlA4R1s9VuHDk3eE1uzgDjkUf/\nLdqj98LPYTN53oOw6MIWdTMFObDk5I1RTi1qqJYut926DBDaQGvdBL3tbyFnIiH181x/KZTYVuvb\nOdcAp+UCYy+2hffdNf2ISUY/veii8Pho27YQFfTZZ4F77x1Eb7333vDog6I7vutd4dHSxo2DfDLC\nKr2NffbZYQm3fftgqfmzPxseM1nRWTWn68c/DnzjG8Cb3mRHhaR6UeRKHvVSRmulc7FosjwSqrWc\np0ivk5Ph+H33paO6UjRQ/ubr3r3AX/5letmvLbs1eRRdVovaSdFfycaAHvGVy/Ys8Xn0Xvn2LT2u\nAPTItzwKJ0UU9jxaoEcKc3OhT2r9yUIsqqmst/VYSIsGm4I3QqkWTZX0Ihn80Rpdp6n6eXWmOpNc\nj9NY2q1LRNgS28o8gC5DS3fkSIgAQISHxx//6S/Ozf3ML+dpvYjSkWXUH20FoTnYYo5qGnlj+VKP\nJ0odZTxdrlPN8+jEq7/neC5S9knl8ZzzyO27PpZNU+V42iSme0k9utq91HZ9tEtpH+6jr3nRRUYf\n7eltNy0d1quTmschOnVqqYNac0oTFZUcOJzmajmZZYwcSaFNRbWUzrjnngsriFe/2t4eVIuvYlEf\nPdsu5kTTjM12LWebd+N5rY7aNpjS8eh1wsZomjmOwphNLaegRkn2RvgttaGVvzSyao4jNydCaSqu\nkhYR2doXQm5XmtpqtzSaa9ftWDly8mrUZy3mm1U3WdbCAnD//Q9umJv7qfflab2I0pHF8wHwnQD+\nAMBTAD4PYN/i8c0AHgGwsPj92pSslJM6tmqwVhclNNfcvSCsET535tNl5jfsNF1nv6kyvKuN3FVJ\njg7WuS4z25J6lOjsTdeXjTzpUvXN6Y+e1UnXldGoVhCWXbw20vV+Y9uW3sNLM7qEAxcD+N7F3xsB\nHAfwXQDeC+DuxeN3A/jfU7KsAeLw4eBkvuGG4EDWwj3wUAazs+GbhzagfDzcAI9pwtNQ3sOHB2n5\nMQ4tLAPpOzWVDn/gec0+FfrBkqfV0zon5cTCInQNR6CFXojpI0OqWHm94SNS9tRCs5SGFvHUw+tc\n7yOcSU6YD295qZAZWmgWrz6ea6mvUBvWNe5BTn+w2t4b0kfqffhw23ZxUg91gFhWGPA7AN4C4BkA\nF7eDQeSZVF4vzXVychDdVAaPkyymqanlERK1UZkzGLTgfBZrJTbCx1gqlIYzZbqwmDS5sYBzubOl\nmH280OrMoV3Umi29rJ2UHqngf1xPWUauDXh/9NQxJqMPlk0ODTSlV649+UCeajtPnT3tFYN2f/Gy\ny6SMkrwpdiGvC283bhushVhMAMYAPA/gPAAn2fGG/xd5bgfwBIAntm3bZhpQ0lz5TV82MDc2pZM3\nfHlxxG6AmnzZuB7KXqxT8N9dBohYZ7M6mgd9DBBanTm0i1qzpUyXW5dU+2h6dqVEyv6YqmNMRh88\nfc8N1KtXrj1zBipPnT3tFYN2f8l9zFRCcyXEJnW8frLd+hogXlXkuMhE0zTnAvgIgJ9q2/alpmn+\n/lzbtm3TNK2Wr23bBwA8AAA7d+5U0wDBSUNO5KYJ31ddFSiXDz4YHNa7dgXq5s6dwG/9FjAxAfzo\njwIf+lBwjAHBoTM+Drz//QPZCwvhe2oqfO/du/QcxWi55BLgO75jQLeUVFVN35MnB2Vq2Ls3OM9J\n5qZNobw77lgqd+9e4HOfW6qbhvHxkG9+PtiEnHBAkPvYY+Fb2iCFgwcHv3ft8umi6fb+9wdbfu5z\nQQ/CwkKw1czMwBaU5957gXe8Y5CezvN0uXpImRp425DuCwuhbWQfSdGFqf3e9jbgN35jebk7doRP\nrE5aH4j1QQ6SOz0d0ktbahgfBz7ykdCXYv04x54A8NJLg7pq/ZDXife7WPm8z1tyPfV86aWBQz4X\nnjaU4P2e7mUkg9uB6sKv4bExLumcs/M1XkTpyOL9ANgA4BMA9rNjvT9iStFbZTorT0q+JdfrZNPk\nWWV6z5UsmT0z9Bx46pUjx1u/lN4l9SotT+sXOW3SdyTc0sdrfdnKkybXZjJN1/K96CKjNG/MNp72\nHtybVqkPAuHx0b8B8H+I4+/DUif1e1OytAGCnrVNTdlOZHrswR3Tu3eH5fx73rPUea0tOaWTWnOA\na46ymEN3YqJtr79ed8zNzi53qKacVR4nmOa8ssrwwHJylzrzYs7kWP34uZiTOufZr7c8S3fL8Rqr\nu+xjpHMXJ3XMIa3ZKUfnnHxee+b0Y7KNh6BRWi9uoy77pZTmTe1bozmoNbLDqg21AeBaAC2AzwF4\ncvFzM4AtAH4fgeb6KIDNKVmxF+U8L8Xl0lO1crwjuUdWziw+Ncsa1szLU4++ViKjnrX3qUsftu1a\nj5IVS5fVRk7Zw2q/Ya4e+rxehrGCsORqx7AWnNRdP9YKgmattOVoagXBR2PPCqJtl+/4NTW1nELr\npbDNzARK7sSEf2eo1Cy5dAVhUeS8s15tltuF5mrN9GLnUrbJrVdKpmeVUmIDWlnu3u2n5Fo6p1Da\nh6z8qVm6l6KtzZI912Wq35esIKTs1bCCoP4h+6K24uTH1u0AQUbifoXzz2//fnXA6a4a04aC9VEe\nbYQ/fnx5JFeLEZXqgDS6U3mSfcFXNlwWP67dPGI0SUsHrrtkZOXS8Xg+bvNckByN6moxVmJ5tDp7\nwMsqmf3l9Im2XdrHSmepMUZP6kbLr6GYHQmazWN2SrWdpJ/HKOfeOmt1y+nXso599Ovc60q7Z2n2\nkHaQrKkuA8RIWEzDxKFDwNGjwPnnh3Abp04BmzeHYy+/7JNx6lR4nV1jGBw6BBw/Hn5v3x4C4h09\nOkg/Px/OHT8e0sbYEfv2BZbB0aPp81wWP/6Odwzyy7KOHQM+9jH9HC+Dvkn3VPkp8Hxem8d0O3kS\n+MAHwu+UDp48XZhNXfJ6+gSwtI9NTJQzsGLyY/aka2hyMvxP2V6zeYmdZH+zyuliD6ob3RO8/VrW\ncWKiXIfS60qDp3/w9ty3b9BGRSgdWUb9ib1JTY+LYm9UW4+YPG9iklzuQORvWKZkSHljY+HjfSua\nH485cVPOds9jhdhsM/YIR7NzLkod1aWPESwd5GPEXHje8E2l7/JYTCL16C3XuR5rJ61Mj25Udszu\nuY+2eBoeLSEHlP/66/XHwjkyujzm8j6m08rCen7ExJdvmuNZnucOH4+T2kqbI0OTV+qkjsnMddb1\n5RTs4mj01qP0XKkOXei6fdi1r7bJyeOVkdvfctLn0r9L+34OYtdsTv7cvB4betqwywCxpqO5Aku3\nBH3hhRD18JprQix0ioR63nkhcqu25egLL8SjQcptNGNyeaTG2Gbtzz0XfvPNz2NRJ7tsKSrroW3e\nzrf0jEW7tMrpGok0Vf9URM6+thoFBnb61reAs87K36Ce9ElFGdXKfPFF4OGHQ/vwbUxz9M7ZVlNG\n8/VGPfWUJbfZ1dqH2obstWNH2G/BilxsRXRN9bcukVip742Nhb7dtn77cr1z20bqbV1Xsm7aNX3/\n/XXL0SWOam3E1UZjmadkJC+Z9efOhPpYQZTqmpu/j9lSzrmueWPyus4Y+1pZ5pa7Uque3LSp1XkX\n2aW6xvKO0r7esj3HUVcQYTXwhS+E8AfvfGfYUYlG7Y0bBzOUPXvCb9oD4tJLg4zYTJHPCG+7bekM\nh282z2cHqf0O5Kb2sdl8aqbvnSFZ+z/IOlh7Q1h16mMFEatjlxWUZ/cwTd6RI8AVV4TfObM+at8L\nL0z3K63Ma64B7rknb4bKy7VWAKmVVO4sV9stUZMf0yu1Ok+12cIC8OEPh9+p67d0BcHtwveCGeae\nEMByu8XuM9pxXt5DD63jFQRRvIg6yumFMgja5OTgGFFVrSiN3CHGKWZTU8tnPDmB0iTFVuaxZEnq\nmiePBK8Tn71JimHpbJLbtyRyJZelUSg1mTG7lEL2qZJZn0Vl1lBKxdR0tsrztGlOPya9ZRvl9k2N\nyqn1fSpHyxurVx/9o0t/KNVB9nnv9U+24tdhlzep1zzNlXDq1IDqSvRCAqen7tgRRlqivp04oVPg\nODWQ49ixIN+ixQ4LkrpWKoPTEi2KYS69kMvi9i2h9FkUSosm2IddLMTozxZSVGYNpVTMHPRBGZWQ\n/alv+Vo5ml1i1M8++0dJfyjVQbs+PTLIVjMzg+sw7HBeiNKRZdSf2ItyPLYSpwtKCp18Y1PbMIjL\npVkLpyFqVMAcimGKdmdR4nLjA2k6WPGXcmMHafI5Ddei6mqzQK+uMZofpculhlrIpal2zS/f7B/l\n27ocuXRMT5k59OQUHVceLy2/hEKs3Ve8+UtprjGqsNXvtSgAqzYWU58fa4BoW9vZJc/HHNY5Ti6v\no9o6V+J0HaYTu4vzTebPdaZ5dRm2M7JPOV0e0Y3SCdpVTtc2GYWdS66/lIyuDucSeHXQjmM9O6m5\n03fTJp2Kxh02p08vdf4QBe+WW4APfjDtZJN0wBQ9UHMM79wZp8aSk447bGNUwZSjLuWAzKW7WvrG\nKI0eZ3EXqm/p5vQaPA7FmNNX2zg+5SA+fVqnTHtpuprOuRRfrovVTlxmioqbahPZLy1yhFW+1ddi\nZXBCiMcJTv1bElNyiA999E3Zvh7KOR2fn1/HTmo5+0rF1LdG4pLVR2wk9+jax0qHjvcxu+46syqR\n4S1/2KugXFk5unZZPY0ibw6GucIYRvpcnbuW1Wf5MRl1BSHgobm++OLgm1YLcgawZ8/S2QDNQng+\nbUSmFYY2m/DSA6WuOS+9EE1Xm1l/6lMDem/u7NqaEaZmc5rMrjOl2Mw9NUP2zGa9s2nPi3cxKmZs\nxRrTTVtZeme7ms4WrdkzO7fS5MiUdtDS8jScmm5dS7npLdt4sFIUV2B5W8iVkHWtavejdb2CkLRR\n+p6YWEr/4vtUa9Q6i8JGIzLRYaUsrkMqkqaUZUVt5TpIept0kPEVRAnFMRVp0ztj74OqyWVa9eSy\n6VhOBFLPLC7WnqkypR1IlmdWnEONzdE5V2/vTDenzBRFkyIRp+wly/RQc3Ppu1od5DVfIqMkmizP\nY9VD0lylrbGeo7laOH48vHwzOxsokxQt04JFYeN0M4p2mZLFYVFLNUqjRhWUFDlJ+du3L+zjHNOp\nNNJmLI883wdVk8vU6imprjnRX/uiYabKlHbYsSP0wVi5JdTYYendJ12Yl5mSTZGIZ2bS9holSI/n\nn8+77qWM3GiupRFge2/H0pFl1J8YzZVoo7T1KP3mtFaNpnr4cEg7NpaO6MppsXLDH2+kRSnLmvXG\n6Kcaxc5LcSyNhpmanXMdulA1NeqethmKRtP16OiFtjlLTFeZl/cVD7WR2sHaSMqrcwnVWeqQQ8X0\n9jvP9SGjAHspr556d6WY5tTVK8+T1qLnSx1SUV+xnqO5tq3u/LUcTNpv7yOAVMRYj8MoxxFXsiVq\nTrquztguOnvLipU/DCcslzsqx+awbJdz3pumJL3n+ijpm550pX2kVJ8+4Ll3edITugwQa95JDQzi\n2GzdGpb0bbvUGc0dTBQr6JZbgKeeGsTM2bMnTnMlGu1VV4VlMMk7fdqmmXaNPrl/f4gVlXII5sRi\n8ji6PPGXOLhzmtumS9wbTkm0HJEeJ2gJYo7/nJhGe/b4YgrFyAtdaK5Sry6UVA0p+rRFE9XiCEny\nCPXNFPXaU++SSKqS+m1R04cBi5zCab28v/PrWKP+rmsnNR9BrVVE7FiMHstlp1Yn3tlF6Uyur5l0\nzmrHi5JZc45+pTOnUsTadJgz8RRF2yOjT7t3zdfH7HclVkZa3r7jfeWUra1icq5j1BXEIPrmt3/7\n4GU5mpWcc459TFsVyFUARZwkGXxFctNN+v4OFjwzGm1WlEsBlYjRM72zR2s220c011hdLJt5Z+u5\nL4zFIqsOY/+B2B4UXWiuHH1Hc/XQfb007RhtNVX/Ye0HIetn7fviRW4f1K5JaSuL5iqPP/74e0/M\nzf3ML+drjTNjBSGprlr0Vo3mqqXTaIAyPdFUaZTOodFxGppFOdRmAbEyulL9+CyphCpKx2N0Ri94\nGRpFODZjshyB3voRPLbqszxepqyj17nZNZprLs01J4KwJVejcOfo4ElfQt9t2zI6bQxdVpVE0LDo\n7lI2HRv0qbEX28L77hlJc5XRW2dndZqali5GmeTpS2lkng3ihxkZU4OXUmfplUNn9OhC355ooTFq\nLE/T16bxwy5PRiZNRTIt0VvDsKLixuRqFO4cHTzphxntNwddIiRr0Vxj1waP6Do7C3zgA1/+cqne\na/4R08IC8PGPh98/9mNhC9ELLgDe/e6w1ALCEm379rDM3bgR+MEfDMvFd78buPHGwfF77gGuu275\nkpY2HLrgAuDtbw959+8HPvrRsKwbGwtLvwMH0stPenzxhjcAt9+uL89paX3o0GBJSrFqgOWPAEi/\njRvtLS5j+bdsAXbtSj9i2LIFuPnm5ecoHtVLL4XHdQcOAA8+WOYwPnEi1OXqq5cul4HBcctW1uMO\nqp83fs7GjaE9Z2aWExe4DazHHydOBF5/jsOX2nD79tAnUxtPWTpbfdBqOwI9YrroonANpHSO9Sev\nXHmOHKz33hv6ESH1aFOm71Ivq35jY+lrLAberz155TVJj7WprrxfAAPZPDbcwYPArbcC8/P/8oV1\n66TWHNQpZ7LmcPY4wiyndomTuE9HdReZOedTcvtwGJc40ft0UHN5pY/LSvTxtmGfZXaV0YcDvdTx\n2lf53nwr1T6evpiyIdazk1o6qPkWouTImZ4Obxt/4xvAm940WAWQU5U7JDW6J6c9vutdwLnnLt0m\n0XLAafDETqJ0MiJoLNJpartKK3aSjPeSE7uHQLM02kZ1GHFrrON9RnGVZVkxszTIuFC51Mou25xy\nna0IwV1jUHVJ7yVYWLGVPFvdlpYfAy9361bfddunDrxvHzigU4U9W5OuSZorgJsAPAPgTwHcnUrf\nRzTX2MpCo5KV5E+hdLbfdQadsyLxyrTyDOOFr66zyBIdcuqR0159lRkrP3U8R0bX9Dn9bBj69rm6\nGuUKIufeFDuGtfYmNYBXAvgCgNcBOAvAZwF8VyyPNUDIEA88xAGFFaCQGhTGIPZquhWOgnaAk6+/\nx17lt8JixHYbs0IipMIV8NAiMTvFdtaydqlKQeoswyZ4YdnSezx3py8LOeFQLNvlhHfg6a1d/7yh\nTmJhGLw6jCrUhnYupw/2Ub4H8h5Q0r9idUmFbYmF9PFc12txgPh+AJ9g/98F4F2xPKlorhs3hu+p\nqcEsjGipU1M6zZUfp2iSspGsaJOcfmZFetRGd06Z1QaUFJVNUmPpuKTeSlj5PVTSFFLRJEvkaLpb\n0W/peF+rCU903ljUUdlnvBRVSs8HCW+dNJ1z2sKysQaPDbw6SL099E3Sz0M95eWXRBnump/LsfqC\n1sbavUDeZyzb9jlANCH/aNE0zT8BcFPbtj+2+P9WAN/Xtu0dIt3tAMjxsB3hkZTA6y8DNm0e/P/G\n14GzXg28/DXgGy8DGzcNjgHAyUVuwqbNS4/T77/+CvDc88vl0/mTi6/NnX1OSAsAWy4cyP7CFwd5\nzzk78Ce+/GXgay8v11eWNbYtyHr5a8AXvhDybrkwpPvyl8P/V74y5Ke855wdnmqefc4gH5XFQbrI\n/FQmL4Pr6wHV6eTi6168DqVypB2pjtxm8nip/l49gIG9qK5aWbLPyHa2yuN9N7dOms6yP8XyWzbW\n4LGBTGvpIPWO9Ud+7rnn4+0ky//WN4FXvipdt77zSzlafu0+Yd0LgOW2krZdVtY/aNv2wnydsbrf\ng2jb9gEAZc6VioqKiopOeMUKlftnAL6T/b908VhFRUVFxSrBSg0QjwMYb5rmsqZpzgLwwwA+tkK6\nVFRUVFQoWJFHTG3bfrNpmjsAfAKB0fRrbdt+fiV0qaioqKjQsSJO6oqKioqK1Y+VesRUUVFRUbHK\nUQeIioqKigoVdYCoqKioqFBRB4iKioqKChV1gKioqKioUFEHiIqKiooKFXWAqKioqKhQUQeIioqK\nigoVdYCoqKioqFBRB4iKioqKChV1gKioqKioUFEHiIqKiooKFXWAqKioqKhQUQeIioqKigoVq3rL\nUY4LLrigHRsbW2k1KioqKtYUPvOZz/zVGbknNcfY2BieeOIJ+/zdD//97+d+7gdHoVJFRUXFqkfT\nNP9fad76iKmioqKiQkUdICoqKioqVKyZR0wrifr4qqKiYj3ijB8g+M0dWHqDrzf+ioqKChtn/ACx\nGlEHpoqKirWA6oOoqKioqFBRVxAdUVcDFV1R+1DFakUdIM4ArIYbzGrQoaKiol+suwFCOq1Hnb/i\nzEYdKM9MxMguoyp3JfrTuhsgKirONPRx81rpG1HF6sQZOUCcibP8egFXrFXUvrt2cUYOEBUVJag3\nstHDmsxV+68OrOkB4kxbKazUc87VDuvGXe21sqgD6pmPNT1AVPSDvi/0euOoGBVife1M74ejqF8d\nICpWBc70i7li7WEtPaEYlq51gKioWEWoA+WZhZL2XE0DUx0g1hFWU8erqChF7cejQx0gFtG3CsQC\nXQAABt5JREFUw7POBNc2+ogCXPtARZ9YiYGxDhCrGOv1re9SvesNeTmqTQKqHcpQB4iKMx7DpMOu\n1UF4teNMtOtaHKTqAFGxBN6b6Zl4AcewEvUd9nseJXU6E9q9vj/jRx0gDPR5IZwJF1Uphunb6SPd\nSmE1DDiedKXttdrtX+FDHSB6xKhmZCt18Z0pF/1aXOqfKViNfWg1D9YrjaZt25XWwYWmab4K4JmV\n1mOIuADAX620EkPEmVy/M7luQK3fWsf2tm03lmRcSyuIZ9q23bnSSgwLTdM8Ueu3NnEm1w2o9Vvr\naJrmidK8dU/qioqKigoVdYCoqKioqFCxlgaIB1ZagSGj1m/t4kyuG1Drt9ZRXL8146SuqKioqBgt\n1tIKoqKioqJihFhVA0TTNDc1TfNM0zR/2jTN3cr5pmmaX1w8/7mmab53JfQshaN+1zVNc6ppmicX\nP/eshJ6laJrm15qmebFpmj8xzq/Z9nPUba233Xc2TfMHTdM81TTN55um2aekWcvt56nfmm3Dpmle\n3TTNp5um+exi/eaVNPnt17btqvgAeCWALwB4HYCzAHwWwHeJNDcD+I8AGgBXAziy0nr3XL/rAPzu\nSuvaoY5vAvC9AP7EOL+W2y9Vt7XedhcD+N7F3xsBHD/Drj9P/dZsGy62ybmLvzcAOALg6q7tt5pW\nEP8QwJ+2bfts27bfAPDvAUyLNNMA/k0b8F8BbGqa5uJRK1oIT/3WNNq2fQzAiUiSNdt+jrqtabRt\n+xdt2/7R4u+vAngawCUi2VpuP0/91iwW2+S/Lf7dsPiRDubs9ltNA8QlAF5g/7+E5Q3oSbNa4dX9\nBxaXf/+xaZr/fjSqjQxruf08OCParmmaMQDfgzAL5Tgj2i9SP2ANt2HTNK9smuZJAC8CeKRt287t\nt5bepF4P+CMA29q2/W9N09wM4LcBjK+wThU+nBFt1zTNuQA+AuCn2rZ9aaX16RuJ+q3pNmzb9lsA\nrmyaZhOAjzZN891t26o+My9W0wrizwB8J/t/6eKx3DSrFUnd27Z9iZaJbdv+HoANTdNcMDoVh461\n3H5RnAlt1zTNBoSb579t2/a3lCRruv1S9TsT2hAA2rY9CeAPANwkTmW332oaIB4HMN40zWVN05wF\n4IcBfEyk+RiA2xa98VcDONW27V+MWtFCJOvXNM3Wpmmaxd//EKF9/nrkmg4Pa7n9oljrbbeo+68C\neLpt2/uMZGu2/Tz1W8tt2DTNhYsrBzRNcw6AtwA4JpJlt9+qecTUtu03m6a5A8AnEBg/v9a27eeb\npvmni+f/TwC/h+CJ/1MAfwfgR1dK31w46/dPAPxE0zTfBPA1AD/cLtIP1gKapvl3CEyQC5qm+RKA\ngwjOsjXffo66rem2A3ANgFsBHF18jg0APw1gG7D22w+++q3lNrwYwINN07wSYWD7D23b/m7X+2d9\nk7qioqKiQsVqesRUUVFRUbGKUAeIioqKigoVdYCoqKioqFBRB4iKioqKChV1gKioqKioUFEHiIqK\niooKFXWAqFiXaJpmU9M0/4z9/46maX5zSGX9T7HQ0U3TTDZN8+vDKLuiogvqexAV6xKLAdt+t23b\n7x5BWf8FwFTbtn8VSfMogLe3bfv8sPWpqPCiriAq1it+DsDrFzeGeV/TNGPN4mZATdP8L03T/HbT\nNI80TfNc0zR3NE2zv2maP26a5r82TbN5Md3rm6b5f5qm+UzTNJ9qmuZyWUjTNBMAXqbBoWmaH2qa\n5k+asLHLYyzpxxHCr1RUrBrUAaJiveJuAF9o2/bKtm3fqZz/bgBvBXAVgP8NwN+1bfs9AP5fALct\npnkAwE+2bftGAP8CwC8pcq5BiBJKuAfAjW3bXgFgih1/AsA/6lCfioresWpiMVVUrDL8weLGMl9t\nmuYUwgwfAI4C2LEYNvoHAHx4Mb4bAJytyLkYwFfY//8M4NebpvkPAHhE0RcBfEeP+ldUdEYdICoq\ndLzMfp9m/08jXDevAHCybdsrE3K+BuB8+tO27T9tmub7APwggM80TfPGtm3/GsCrF9NWVKwa1EdM\nFesVX0XYm7gIi5vNfLFpmh8C/n5D+CuUpE8DeAP9aZrm9W3bHmnb9h6ElQXF558A0Glzl4qKvlEH\niIp1icVZ+39edBi/r1DMDID/tWmazwL4PPQ9xh8D8D3N4DnU+5qmObroEP8vAD67ePx6AA8X6lFR\nMRRUmmtFxZDRNM0hAB9v2/ZR4/zZAD4J4Nq2bb85UuUqKiKoK4iKiuHjZwF8W+T8NgB318GhYrWh\nriAqKioqKlTUFURFRUVFhYo6QFRUVFRUqKgDREVFRUWFijpAVFRUVFSoqANERUVFRYWK/x9TZ1uC\nZAe8JQAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from bmtk.analyzer.spike_trains import raster_plot\n",
+ "\n",
+ "raster_plot(cells_file='network/mcortex_nodes.h5', cell_models_file='network/mcortex_node_types.csv', spikes_file='output/spikes.h5')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "In our config file we used the cell_vars and node_id_selections parameters to save the calcium influx and membrane potential of selected cells. We can also use the analyzer to display these traces:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZgAAAEKCAYAAAAvlUMdAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXd4VFXawH8nPYGQ0EsChI4goFRpVlRUBHuvnysqtl3d\ndXV1FV1d69qxoNgVBEV6UZDeey9JaGmEFNIzaXO+P6ZkZjIzuZPMnZkk5/c8PGTuPeW995573vO+\n7znnCiklCoVCoVB4myB/C6BQKBSKxolSMAqFQqHQBaVgFAqFQqELSsEoFAqFQheUglEoFAqFLigF\no1AoFApdUApGoVAoFLqgFIxCoVAodEEpGIVCoVDoQoi/BfAnbdq0kQkJCf4WQ6FQKBoUO3bsyJZS\ntq0tXZNWMAkJCWzfvt3fYigUCkWDQghxUks6XV1kQojxQogjQogkIcSzTs4LIcSH5vN7hRCDa8sr\nhGglhPhDCJFo/r+l+fhwIcRu8789Qojr9bw2hUKhULhHNwUjhAgGpgFXAf2A24UQ/RySXQX0Mv+b\nDHyqIe+zwEopZS9gpfk3wH5gqJTyPGA88LkQoklbaAqFQuFP9LRghgNJUspjUspyYBYwySHNJOA7\naWIzECuE6FhL3knAt+a/vwWuA5BSlkgpK83HIwC1TbTO5BSVMWnaBh7+fgfpeaX+FkehUAQYeiqY\nOCDF5neq+ZiWNO7ytpdSZpj/Pg20tyQSQowQQhwA9gEP2ygchQ6sT8pmT0oeyw6cZuLH69lxMldT\nPqNR8uqigzw5axfHsop0llKhUPiLBj1NWZo+ZiNtfm+RUvYHhgHPCSEiHPMIISYLIbYLIbZnZWX5\nUNqGT1peKXd+uZmnft5NZoGBkzklACx6fAzNw0O4ffoWft2R6jRvfmkFxWUmfb8r5Sxfrj/O/N3p\nXPPhepbuy3CaR6FQNGz0VDBpQGeb3/HmY1rSuMubaXajYf7/jGPFUspDQBFwrpNz06WUQ6WUQ9u2\nrXWWXZPmUEYBn6xOIiXXpEh+2Z7KhqQcFu3LYPz7a/l5WwodYyI4Ny6GeY+OZkjXljw9Zw9vLD1M\nlbHaQ7nr1FmGvbaC4a+t4NuNJ0g6Y7JaZj80knM6RvPIjzv5YEUi6uN3CkXjQk8Fsw3oJYToJoQI\nA24DFjikWQDcY55NdgGQb3Z/ucu7ALjX/Pe9wHwAc9oQ899dgb7ACd2urpFjqKjiri+38NayI1zx\n3lrm704jKauIzq0iWfLEWFo3Dyctr5Tm4aZ5FLFRYXz3wHDuHNGFz9Yk89D32ykyWyzzd6djNEoG\nd23JSwsO8M9f9wFwfpdYZk6+gBsGx/HeiqM8PnMXhooqj2RcsCfdqgAVCkVgoZuCMcc/HgOWA4eA\n2VLKA0KIh4UQD5uTLQGOAUnAF8AUd3nNed4ALhdCJALjzL8BxgB7hBC7gd+AKVLKbL2ur7FzMqeE\nnOJy/jm+LwPiYnhy1m4W7kmnbfNwerZrzm9TRnH78M48cnEPa57Q4CBeve5cXp7Yn1VHsrjp042k\n5JaQeraUHm2b8+39w3lmfB+79OEhwfzv5kE8e1VfFu/L4ObPNmmeMPDuH0d5YuYuLnt3Dd9tOqEs\nIIWiFs4Wl1NRZfRZfaIpv5RDhw6VaqGlPTlFZVRJyYH0Au7/ehu/PjKSgfGxvLn0MF+uP85lfdsx\n475htZazLjGLKT/uJCw4iJzici7u05Zv7h8OwJ6UPIrLKxnVo41dnhUHM/nrz7uJCA3i07uGMCyh\nlds6bvhkAznF5XRv04xVR7KYMLAjb944kGbhana6QnGmwEClUdIpNhKADUnZ3PPVVtpHh/PmTQMZ\n26vuIQIhxA4p5dDa0jXoIL/CuySdKWTMm6sY+fqf/GPOXgDaRUcQGhzECxP68cvDI3nluhphLaeM\n7dWWeY+OpkVkKAAx5v8BBnWOraFcAMb1a8+8R0cRHRHKHV9s5sctNRcLSymZufUU01YlcTSziKFd\nWzHj3mH848o+LNmXwXXTNlhjPN5i1tZTXP/JBr7ffBKjsekOyJoaOUVldrHEhsSR04WMfWsVo9/8\nk5fm78dQUcXqI2eoMkoiwoK5e8ZW3l9xVHc5lIJRWJmzI5WKKiNXnduB7KIyAFo1C7OeH5rQijjz\naEgLPdo2Z96U0dw2rDPXn+84Q905PdtFM+/R0Yzu2Ybnf9vPv37bR3lltUm/4+RZnpu7j7eXH6Go\nrJLYqFCCggSPXtKT7x8YQW5xOZM+Xs+SOs5Myykqs3MhlFcaeXnhQfam5vPvefv5y3fbOVtcXqey\n/cXGpGzmbE+hpFzN2tfK60sPMeTVFVz09io2Jnnmac8uKuPXHalkFZbpJF3trEvMoqzSyKRBnfh2\n00lu/mwT6xKz6dshmsWPj+XmIfFEhQXrLodSME2QKqPkszXJvPfHUasiATiRXUy3Ns34+I7BfHrn\nYJ68rFe93U0xUaG8ceNALu7TTnueyFBm3DuMRy7uwU9bTnHnl5utL+v+tHwAvr5vGBf1bstl51SX\nO7pnGxY9MYZe7aOZ8uNOXlt8kEoX/uYl+zL4YfNJu05324lchr62gpGv/8nyA6cByCwwUFpRxevX\nD2Dqtf1Yl5jFhI/Ws/PUWY/vhT84kV3MnTO28I9f9jL+/XXsONkw5PYnxWWVfLH2GGN6tiEsJIg7\nZ2zhneVHXLYlRx75YQdPz9nDpf9bzbxdjhNn9WP2thSu/Wg97yw/wtHMQqIjQnjv1vP44p6hnMgp\n5vDpQpqFhxAZFsxbNw3kwbHddZdJKZgmyOojZ3hj6WE+WJnIuHfX8Lu5Mz2db6Cj2UK5akBH/nZ5\nb7/JGBwk+Of4vnx0+/nsS8vn2o9MCzlPF5QRGiy4qHdbvv2/4TVcbR1jIpn90EjuGdmVL9Yd544v\nt3Cm0GCXJulMIVN+3MkL8/Zz9QfVne6GpGykhJjIEB76fgcvLzzAKfMMtY6xEdw3uhu/PDwKIeCW\nzzbx5bpjAT+xYMfJs0gJU6/th1FKbv18E5+vSfbI1Sel5ImZuxj66h98/GeiT4PE/uBETjFGCXeM\n6MLix8dyy5DOfLwqiTu+3EJmgcFt3vJKI7tO5XFFv/b07RDNX3/ezfO/7fNodmRdeef3Ixw+XcDH\nq5KYvT2V0OAghBBc3q89ix8fy+ierblmQEcAhBAIIXSXSSmYJsjx7GIA5jw8ks4to5j8/Q5eXXSQ\n7KJyYm1iJYHAtYM68esjowgLCeLWzzfz2Zpk2jQPJyjI9csRFhLEK5PO5f1bz2Nvah4TPlzPthPV\nuwwknTFd/7+u7kulUXKLudNNziomLjaSJU+O5b5RCXy94QR3frkFgLbR4YApfrT48bFc0rcdry4+\nxEPf7yC/pELHO1B3SsorWbrfNHi4bXgXFj8xlsv7tef1pYf5y3fbyXXh6iuvNLLtRK51mnlGvoEF\ne9KREt75/Si3Td/coLcGWn3kDIv2pru0SCxKpGNMBJFhwbx500DevWUQ+1LzufqDdaw56nqBdqY5\nsD7unPbMfPACHr6oBz9uOcVNn23UdTp9oaGCM4Vl/O3y3vz4lxHERoUyqkdr6/kuraP48S8X8H9j\nuukmgzOUgmlEbDuRy9cbjtfq+03LKyUqLJihXVvyyyOm0f6X64+TlldKeEjgNYn+nWJY+PgYLu1r\ncodpHQ1ed34c8x4dTVRYMLdP38yM9ceRUpJm7hxvHtKZJU+O5Qpzp7twTzqdYiMIDwlm6sT+fHqn\ndXNvWkVVx6JiokKZfvcQXrjmHP48fIZrPlrH3tQ8L16xd7jryy2sOJQJQERoMDGRoXxy52BemdSf\n9YnZXPPhOjvFa+Ffv+3j5s82ceFbq1i6L4PUs6b79d6t5/HBbedxOKOAqz9cx0pz2Q2JA+n53Pf1\nNh77aRcTP95AYmZhjTRrjpgUiG388YbB8Sx8fDRtmodz71dbeXv54RoKKjmriKdn7wGgZbMwQoKD\nePaqvnxxz1BO5pRw9YfrWLbfdWywqKySf/6ylxfm7avVUnLEMrGle5tmjO7Zhu3Pj+PdW87zqAw9\nCLzeRFEnjmYWcuvnm3h54UEueWc1c3emunTfpJ4tJS42EiEE4SHBvDLpXD6+43yCgwQD4mN8LLk2\nYiJD+fzuIbxxwwBev2GA5nx9O7RggVk5/WfRQR6fuYvEzEKiwoKJjQqlRYSp0315Yn8ARnSrHvVd\nNaAj6565hM/uGkK7Fva7Dgkh+MvY7sx+eCRSwk2fbuLbjfqvxdmYlM1t0zfx9vLD1q13nJFfWsHO\nU3lc2rcd39xfPa1cCME9IxOYO8VkFd42fTOfrE6yusyklCzbf5rhCa2IbxnJIz/u5K4ZJisurmUk\nk86LY+HjY+gUE8kD327nlYUHKausVvipZ0u45J3VjH9/rduRvrfJL6lg2f6MGu5QyzWVlptkPJBW\nAMA/ruxDZoGBaz9ez+xtKdbnVmCo4IctpxjRrRWdW0bZlWOZgHLbsM5MW5XMHV9s4XR+dX0vzT/A\nVrPCtp0MY3FRdWvTjId/2Gmd1eXIj5tP8vP2FGZuTeGK99ayYE+65uv/duMJgoME/TuZ3t+Q4CDC\nAmCw6H8JFF5h+4mzGCXMuHco/Tq24KnZe3hi1m7yS+3dNym5Jaw5kkWv9s3tjk8Y2InEV6/i7gu6\n+lJsjxBCcNvwLow/t6NH+VpEmJTTs1f1Zcm+DGZtSyE8JMjqgxZCcO+oBHb++3KeuKyXXd7OraIY\nf24Hl2UP7tKSxU+MYWyvNry04ACP/bSLAoN+LrP3Vhxl87FcPlmdzBXvrWXrcecbjKaeNbljbh4S\n73SCxblxMSx6fAzjz+3AW8uOcP8328gpKqPAUElRWSWX92vPr4+MYsrFPayz+NqblWz3ts2ZO2UU\n941K4KsNx7nhk43WTUtXHcnieHYx2UVl3PvVVv71274as9dOZBdz3bQNPPjddqcWRF3475JDPPzD\nTi56azVfrT9uF2N6bu4+znlxGfd8tZW1iVkECZh8YXeWPDmW8zu35Jlf9/LXn3dTaKgg7WwpVUbJ\nPSMTnLphI8OCeePGgbx36yD2p+dz9YfrWH3EtFvVsawirj8/jnXPXEK/Ti3s8nVpHcUvD4/igTHd\n+HbTSW78dKPVVW1hb2o+3do0Y8VTF9G9bTOemLmLp2bvtroqbSkuq+SnLaeslvPW47lcPaAjnVtF\n1UjrT5SCaSSknC0hNFhwSZ92zJx8AX+/ojdL9mVw9Qfr2HIsx5puzdEsyquMPOUkgB8U5JvAnz8Q\nQvDwRT344YERAFzat32NNK2ahdVp1BcbFcYX9wzl2av6suzAaa79aL11tptWdp06y6XvrOauL7ew\nL9V13iOnC7nrgi7MeWgkIcHCas3YBt53p+RxzYfrAWhjjh05IzoilI9vP5/Xrj+XTcdyuPrDdSw0\nj5rbx5jWPz0zvi+zHxrJfyb1t24LBCaX29SJ/fnynqGk55Uy4aP1zNmeQurZEsKCg1j3zKU8dGF3\nZm49xQSH+/H1huPsTslj87EcrvloPTMcFIJWVh7K5JcdqRgqqjh0uoC42EhG9mjNK4sOct832zhT\nYMBolMzbnUa3Ns3Ydeosi/ZmYJSmXSTat4jgh7+M4OnLe7NwTzoTPlpvnT3YIabGPrl2XH9+PAse\nG0O76HDu+3obry0+SHq+gc6tolx28mEhQfx7Qj++vGcoaXmlTPhwHfN3V88yyyoqo210ON3aNGPO\nQyN54rJezNuVxtUfrGOXw6zFVxcf4l+/7WPixxv466xdpOcb6Nm2uWOVfkcpmEZAZZWRlYcyiY0K\nIyhIEBwkeOzSXvz6yChCggW3f7GZd5YfoaLKyJkCA0ECElo387fYfmFUzzYcffUq/nuDtgWjWgkK\nMimwWZMvoKzCyA2fbOTLdcdqdJw7TuZy5Xtr+cecPWTkVwfKP1uTTEa+gSOZhVz/yQY+WGE/Wyun\nqIwJH62jwFBJXGwUQxNasfiJsdw0JJ5pq5K58dNqK+K3naYdrYd3a0XvdtFu5RZCcOeIrsybMppm\nYSG8MG8/AC2jqid7DO/WirtHJjjNP65fe5Y+eSED42P4xy97+XzNMVpEmqbCPnf1Ofz4wAhKyqq4\n/pMN1tlryVnFDOocy6q/X8yFvdrwn0UHueerrXbuJgtfbzjOw9/vYF2ivbttf1o+D3y7nb/P2cO4\nd9ewNzWfK/q3Z8a9Q3n1unPZejyHK99fy3ebTmCoMPLAmG6sfOoixvfvwE1D4q3lBAcJHr+sFz8/\nNJLKKsn7KxIBaN/CtWK20LNdc+Y9Oprbh3fmi3XHAYhvWfs6sXH92rPkibH069SCJ2ft5p+/7KW0\nvIqcojLaNDfFfUKCg3jq8t78/NBIqoySmz7bxEcrE60LPw+m5zMwPoaHL+rBvN2mQUE7DTL7muCp\nU6f6Wwa/MX369KmTJ0/2txj1Zv6eNH7aksLYXm24dlAn6/EOMRHcPLQzWYVlfL3xBGsTs8ksMFBe\naeShi3q4KbFxExwkCAnSZ2wVFxvJDYPjSTxTxDcbT7DtRC4je7QmOsLUYb+0wOSnT8oqYuaWU7Rr\nEcE5HaP5cGUSAzvH8sMDI0jPK+WbjSdYczSLoV1b0rp5OIv2ZvDjllN0bhXJ5At70CEmgrCQIC7v\n14E+7aP5dWca3286SevmYWw+nkOrqDDmThlNRKi2xXRto8O5cUg8RzOLOJZdzJOX9SLWZmKDO5pH\nhHD9+fGEBQs2JucwKD7W2ol3bhXFjYPjOZZVbL0fe1LzObdTC24Z2plrB3WifYsIZm1LYeZW0/X1\nbm9SiqXlVdwyfRNJZ4r4bVcap3JKuKB7ayJCg/nz8BlWHDrDf68fwIakbIrLq7h1WGcGxMcyMD6W\n8ed2ZGNyNrO3m5TtnSO6MCA+lgmDOnFF/5ouz7jYSG4aEk96XilFZZVMuaSHpjYSEhzEuHPak9C6\nGSdzS3jkoh7ERNU+EzM6IpQbzo/DKOGbTSdYduA0yVnFjOjeys66tsiVetbUJjYlZzOqR2u+WHec\nEd1aMXVify7t247isiruGNHFunOG3rz88ssZU6dOnV5bOrUXWSPYi+zVRQf5fvNJDr4ynmAX03cX\n783gubl7KTBU0r5FOFv+Nc7HUjYtpJTM3p7CywsPEhIkePX6AUwc1InL/reanu2a8/zV/fj7nD1s\nPZHLlf3bs/xAJveNSmCqebLB4r0ZvDBvH8XlVTxzZR/ySir4ZHUSR169itDgmh3f6XwDT8/ZzYYk\nkzv0uvM68f5t59dJ7uLyKjt3mCckZxURFRZMxxj7kbyUkjnbU5m68AAl5VXcPCSet28eZD1/LKuI\nv83ew56UPK47rxMvTzqXMwUGLn9vLW/dNJCU3BI+WZ1Mm+ZhvHnjQHadyuPDPxM5+upVVFZJlu7P\n4OoBHe0UanmlkfdXHGXOjlTmPjJKc3xCSulTV/H6xGyenrObzIIyHr+0J09f0adGGilNrr5/zztA\nRZWRskojT1/em8cdYoa+QuteZErBNGAFs3BPOmdLyllx6AypZ0v48+mL3aZPzyvlxfn7Ob9LSx69\npKdvhGzinMgu5m+zd7PrVB6TzuvE/N3pPDCmG/+e0I8qo2TG+mO8uewIVUbJ38b15slx1R1GVmEZ\n//ptH38cNE0HDgkSJP33apd1GY2SGeuP89qSQ7x+wwBuH95F9+vzlOPZxfx3ySHuGN6FS/raTz6o\nrDIybVUyH/6ZSLvocK4Z0JEv1x/n10dGMaRrS/al5vPU7N0kmqfkNgsL5sAr4/1xGV4nv6SCGRuO\nc8P5cSS0ce2+PpVTwgPfbiPxTBEf3n4+E208Fr5EKRgNNGQFszEpmzvMiwABzunYgqVPjvWjRApX\nVFYZ+WR1Mu/+Ydpc8J/j+9p95uDw6QKmrUrmkYt61Jh9JKXkt11pPDV7Dxd0b8WsySNrrc9QUWU3\nS66hsSclj7/9vJtj5llW254fZ13oaqio4t0/jjJ97TESWkex+h+X+FNUv1BZZWRDcg5jerZx6bHQ\nG6VgNNCQFcz0tcn8d8lhfnhgBDPWH+OC7q2bdFylIbAnJY+P/kzkqcv71FAktZFfUoFEao6LNHRK\ny6t4c9lhTuQU8/V9w2ooy/1p+YQEC/p28Ow+KryDVgWjPpzRQMnINxAVFszonq0Z06vm1veKwGNQ\n51i+vLf2b+k4Q0vguDERGRZsjUc549y4wFwQrLBHKZgGxqK96Xy+5hj70vJpGRXaYN0gCoWi8aMU\nTANj2qpkDmWYtrvo08H9GgeFQqHwJ0rBNCCklJzMKeb+0QlMvrC73wJ8CoVCoYVaFYwQIh64DRgL\ndAJKgf3AYmCplLJxfxwigMgqKqOkvIpubZrVWGegUCgUgYZbBSOE+BqIAxYBbwJngAigNzAeeF4I\n8ayUcq3egipgjnlVctcmus2LQqFoWNRmwfxPSrnfyfH9wFwhRBgQeKu5GiHHsop45/cjdGgRwbke\nTnFVKBQKf+BWwbhQLrbny4Ekr0qkcMqhjEKkhBn3DaV188Db1E6hUCgcqc1FttfVKUBKKQd6XyTF\nlmM5TPlxJ2EhQdw3KoF7RyVYP6SkYi8KhaKhUJuLzAhI4CdgIaYAv0JnPluTjMS0HfjrSw/z3aaT\nSCkJDhJ226grFApFION2P2op5XnA7UBzTErmNaA/kCalPKm/eE2Tk7klXNC9Fd8/MIKfHhxBy2ah\npOcbqDL6dpdXhUKhqA+1TlOWUh4GXgJeEkLcCnyHaUbZ2zrL1mTJyDNwifkzt6N6tGHBo2NYsj+D\nIKVcFApFA0LLOpg4TOtgrgfOAn8DftNZribJ6XwDL87fT2lFFa2aVW9qGBQkmDDQP9tyKxQKRV2p\nLci/BogGZgP3A5aPu4cJIVpJKXN1lq9J8fGqRH4/mEmXVlEM7tLS3+IoFApFvajNgumKKcj/EGD5\ntrDFTyOB7jrJ1SQ5llXM+V1i+W3KaH+LolAoFPWmtnUwCT6SQwGknC1RlotCoWg0aN7sUggxEEiw\nzSOlnKuDTE2Siioj6XkGJg3S9t1whUKhCHQ0KRghxFfAQOAAprUxYHKRKQXjJdYlZlFllG6/x61Q\nKBQNCa0WzAVSyn66StLEmbcrnTbNw7hmQEd/i6JQKBRewe1CSxs2CSGUgtGRU7kl9OkQTWRYsL9F\nUSgUCq+g1YL5DpOSOQ2UofYi8zpZhWV0b9vK32IoFAqF19BqwcwA7sb0DZhrgQnm/90ihBgvhDgi\nhEgSQjzr5LwQQnxoPr9XCDG4trxCiFZCiD+EEInm/1uaj18uhNghhNhn/v9SjdfmVyqqjLy17DBp\neaW0jAqrPYNCoVA0ELQqmCwp5QIp5XEp5UnLP3cZhBDBwDTgKqAfcLsTN9tVQC/zv8nApxryPgus\nlFL2AlaafwNkA9dKKQcA9wLfa7w2vzJ/dzqfrE6mW5tmjO3Vxt/iKBQKhdfQ6iLbJYSw7KhcZjlY\nyzTl4UCSlPIYgBBiFjAJOGiTZhLwnZRSApuFELFCiI6YpkO7yjsJuNic/1tgNfBPKeUum3IPAJFC\niHApZRkBzJHTBUSEBvHn0xepjSwVCkWjQquCicSkWK6wOVbbNOU4IMXmdyowQkOauFrytpdSZpj/\nPg20d1L3jcBOZ8pFCDEZ864EXbr4/2OcmQVltIuOUMpFoVA0OjQpGCnl/XoLUheklFIIIW2PCSH6\nY9rt+QoXeaYD0wGGDh0qnaXxJVmFZbSNVl+oVCgUjQ+3MRghxAtCCJdTm4QQlwohJrg4nQZ0tvkd\nbz6mJY27vJlmNxrm/8/YyBOPaafne6SUya7kDgTOFBr466xdbDqWQ2yk+oiYQqFofNRmwewDFgoh\nDMBOIAuIwBSUPw9YAfzXRd5tQC8hRDdMyuE24A6HNAuAx8wxlhFAvpQyQwiR5SbvAkxB/DfM/88H\nEELEAouBZ6WUGzRcu1+ZuuAAfxzMpHvbZozqqYL7CoWi8VHbZpfzgflCiF7AaKAjUAD8AEyWUrr8\nhLKUslII8RiwHAgGvpJSHhBCPGw+/xmwBLgaSAJKMH0SwGVec9FvALOFEA8AJ4FbzMcfA3oCLwoh\nXjQfu0JKabVwAon1idncNCSe129QS4kUCkXjRGsMJhFI9LRwKeUSTErE9thnNn9L4FGtec3Hc4DL\nnBx/FXjVUxn9gaGiigJDJfEt1caWCoWi8aJ1HYzCi2QVmia3tW2ugvsKhaLxohSMHziRUwxATJQK\n7isUisaLUjB+4LXFhwgPCeKcDi38LYpCoVDohtsYjBDiGSnlW0KIjzAtrLRDSvmEbpI1UorLKjl8\nupCnL+9Nl9YqBqNQKBovtQX5D5n/3663IE2FjHzTxLvOrZRyUSgUjZvapikvNP//rW/EafycKTAF\n+NuoAL9CoWjkaP1kclvgn5h2No6wHJdSNogt8QOFX3ak8vc5ewBo3Vxtza9QKBo3WoP8P2Jyl3UD\nXgZOYFqpr/CA6WuT6d62GW/eOIC+HaL9LY5CoVDoilYF01pKOQOokFKukVL+H6CsFw+QUpKSW8ol\nfdpx67AuavdkhULR6NG6XX+F+f8MIcQ1QDqgvu/rAfmlFZRWVNExJqL2xAqFQtEI0KpgXhVCxABP\nAx8BLYC/6SZVIyOzwMC8XabNoFXsRaFQNBW07kW2yPxnPnCJfuI0PjILDFz+7hoKDJUAdIqJ9LNE\nCoVC4Rs0xWCEEN+at8O3/G4phPhKP7EaD8sPnKbAUMkX9wxl8RNjGNG9tb9FUigUCp+g1UU2UEqZ\nZ/khpTwrhDhfJ5kaFRn5BkKDBZf1bUdQkArsKxSKpoPWWWRBQoiWlh/mr1xqVU5NmqzCMto0D1fK\nRaFQNDm0Kon/AZuEEHPMv28GXtNHpMbF2eJyWkapwL5CoWh6aLJgpJTfATcAmeZ/N0gpv9dTsMbA\nrlNnWXn4DC0ilbGnUCiaHpp7PinlQeCgjrI0KoxGyWM/7aJ9i3AeHNvd3+IoFAqFz1Hfg9GJo2cK\nScsr5R9X9uWyc9r7WxyFQqHwOUrB6ERGvgGAbm3UtvwKhaJpooIDOpFVaNqWv21ztTWMQtFYqKio\nIDU1FYPaSJlEAAAgAElEQVTB4G9RfEJERATx8fGEhtbt8+5KwehEQalp+7bYZnV7MAqFIvBITU0l\nOjqahISERr9hrZSSnJwcUlNT6datW53KUC4ynSg0bw3TLEzpcIWisWAwGGjdunWjVy4AQghat25d\nL2tNKRgdkFKyZF8GcbGRBKsFlgpFo6IpKBcL9b1WpWB04EhmIYlninjs0p7+FkWhUCj8hlIwOpCe\nVwqgvlqpUCh8xosvvsiKFStqHF+9ejUTJkxwmuf111+nZ8+e9OnTh+XLl3tdJhUg0IHsonIA2jQP\n97MkCoWiqfDKK694lP7gwYPMmjWLAwcOkJ6ezrhx4zh69CjBwcFek0lZMDqQW2xSMK2aqT3IFAqF\nd/nPf/5Dnz59GDNmDLfffjvvvPMOAPfddx+//PILAMuWLaNv374MHjyYuXPnOi1n/vz53HbbbYSH\nh9OtWzd69uzJ1q1bvSqrsmB0oNBQQXCQICrMeyMBhUIRWLy88AAH0wu8Wma/Ti146dr+Ls9v27aN\nX3/9lT179lBRUcHgwYMZMmSIXRqDwcCDDz7In3/+Sc+ePbn11ludlpWWlsYFF1xg/R0fH09aWpp3\nLsSMsmB0ID3PQPPwkCY120ShUOjPhg0bmDRpEhEREURHR3PttdfWSHP48GG6detGr169EEJw1113\n+UFSE8qC8TLJWUXM253GpEGd/C2KQqHQEXeWRkMgLi6OlJQU6+/U1FTi4uK8WoeyYLzMykOZSAn/\nuvocf4uiUCgaGaNHj2bhwoUYDAaKiopYtGhRjTR9+/blxIkTJCcnAzBz5kynZU2cOJFZs2ZRVlbG\n8ePHSUxMZPjw4V6VV1kwXia7qJyI0CDatVB7kCkUCu8ybNgwJk6cyMCBA2nfvj0DBgwgJibGLk1E\nRATTp0/nmmuuISoqirFjx1JYWFijrP79+3PLLbfQr18/QkJCmDZtmldnkAEIKaVXC2xIDB06VG7f\nvt2rZT7zyx7WJWaz6bnLvFquQqHwP4cOHeKcc/zrnSgqKqJ58+aUlJRw4YUXMn36dAYPHqxbfc6u\nWQixQ0o5tLa8yoLxMnklFcREqg0uFQqFPkyePJmDBw9iMBi49957dVUu9UVXBSOEGA98AAQDX0op\n33A4L8znrwZKgPuklDvd5RVCtAJ+BhKAE8AtUsqzQojWwC/AMOAbKeVjel6bK/JKKoiNUgpGoVDo\nw08//eRvETSjW5BfCBEMTAOuAvoBtwsh+jkkuwroZf43GfhUQ95ngZVSyl7ASvNvAAPwb+Dvel2T\nFvJKy4mNVAssFYrGSlMKK9T3WvWcRTYcSJJSHpNSlgOzgEkOaSYB30kTm4FYIUTHWvJOAr41//0t\ncB2AlLJYSrkek6LRFaNRUlxWSX5JBdlFZWQWGEg9W8KJ7GKOZhYpC0ahaKRERESQk5PTJJSM5Xsw\nERF1n7Ckp4ssDkix+Z0KjNCQJq6WvO2llBnmv08DPv/g/d60fK6btsHleTWDTKFonMTHx5OamkpW\nVpa/RfEJli9a1pUGHeSXUkohhEdDCSHEZEzuOLp06VKnejvFRvDcVX0JCQ4iNFgQEhRESJAgJFgQ\nFhLEhb3b1qlchUIR2ISGhtb5645NET0VTBrQ2eZ3vPmYljShbvJmCiE6SikzzO60M54IJaWcDkwH\n0zRlT/JaaBcdwUMX9ahLVoVCoWgy6BmD2Qb0EkJ0E0KEAbcBCxzSLADuESYuAPLN7i93eRcA95r/\nvheYr+M1KBQKhaKO6GbBSCkrhRCPAcsxTTX+Skp5QAjxsPn8Z8ASTFOUkzBNU77fXV5z0W8As4UQ\nDwAngVssdQohTgAtgDAhxHXAFVLKg3pdo0KhUChc06RX8gshsjApqbrSBsj2kjj+pLFcB6hrCUQa\ny3WAuhYLXaWUtQabm7SCqS9CiO1atksIdBrLdYC6lkCksVwHqGvxFLWbskKhUCh0QSkYhUKhUOiC\nUjD1Y7q/BfASjeU6QF1LINJYrgPUtXiEisEoFAqFQheUBaNQKBQKXVAKRqFQKBS6oBSMQqFQKHRB\nKRiFQqFQ6IJSMAqFQqHQBaVgFAqFQqELSsEoFAqFQheUglEoFAqFLigFo1AoFApdUApGoVAoFLqg\nFIxCoVAodEEpGIVCoVDoglIwCoVCodAFpWAUCoVCoQsh/hbAn7Rp00YmJCT4WwyFQqFoUOzYsSNb\nStm2tnRNWsEkJCSwfft2f4uhUCgUDQohxEkt6ZSLTKFQKBS6oBSMnziaWViv/KXlVZSWV3lJmrpR\nWWWkyqi+iKpQKJyjFIwfWLovgyveW8vivRl1LuPcqcs558VlXpTKc3o+v5TrP9ngVxkUgUVWYRl5\nJeX+FkMRIDTpGIyeLNt/mt8PnObdW8+rce6I2Xo5klnINXSsU/l6WA4Jzy7mvlEJTJ3YX3Oevan5\nXpdD0XAZ9toKAE68cY2fJdGHiooKUlNTMRgMTs+XVRrJKiyjfYtwQoMb/vg9IiKC+Ph4QkND65Rf\nKRidePiHHQA8eGF3wkOC6N62ec1EUnL5u2u4oHtr/nPduT6W0DnfbDzhkYLxNfvT8pESBsTH+FuU\nRonRKNl+8izDu7Xytyh+w2iUFJVX0iKiZqeamppKdHQ0CQkJCCFqnE87W0pQcRmdYiNp0zzcF+Lq\nhpSSnJwcUlNT6datW53KaPgqNsC56oN1XPq/NdbfhYYKZqw/bv2deKaI7zfXPiHjh80nef63fS7P\nP/Xzbq7+YF2t5czZnsKivem1pgtUJny0nms/Xu9vMXxOcVklRWWVutczfd0xbvl8E+sSs3SvKxBZ\nl5hF938tYeDU38ktrunqMxgMtG7d2qlyaWwIIWjdurVLa00LSsHogLOGaWHqgoMUGkwdha2T63+/\nHyHh2cXsPHXWab4X5u3nxy2nXJY7d1caBzMKapXtH7/s5bGfdvH7gdO1plUEDoNe/p1zX1pea7qK\nKiMJzy7mxy0nySos87iepDNFAGTk171T8RZrjmbx1OzdXi3zP4sOcveMLS7PPz5zl/Xv3GLn968p\nKBcL9b1WpWB04OK3V7k8l19a4fT4R38mAfDZ6mRdZHJk8vc73J7/dHUyCc8u9oksdWVjcjb5Jc7v\nZ104mllYI0BdZZQBEbSu1BhzKzIPXp7/bT/DXlvBIQ2DDqfoODnwrWWH2XYit9Z09361lbk70zSV\nuS4xi4oqY63pZqw/zrrEbE1l1gk/6p4XX3yRFStW1Di+evVqJkyY4DTP66+/Ts+ePenTpw/Ll9c+\ngPEUpWC8zJ1fbqbAUHdXhsRkzaTllXpPqDrw5rLDXivrj4OZnMgu9lp5YHIZ3fHFFv7y3TavlXnF\ne2u5bpr9rLi3lh3mvFf+cDkw8AZSSt794yinckq8XvaxLM/uuy/6x09WJ3PzZ5u8Vt7W47ncPWMr\n//v9aL3Lsr1+6aGS9feygVdeeYVx48YBUF5prFXhHjx4kFmzZnHgwAGWLVvGlClTqKry7jUoBeNl\nNiTluD1va3E6a8CHTxfw0Z9JPPrjTqf5G6Jr68HvtnPxO6u9WmZllenmHTldv/VEjpxw6OQX7zNN\nJS/QUcGk5Jby4cpE/u/b+itLR4+GrKMpUtd8/iCnyOTK8vYgxhMKSitIPFNInhv3eG1UVhmRGrTa\n1JdfJqFHL0aOGs3tt9/O22+/TWpuCXfefQ+//PILADNmzqV3n74MHjyYuXPnOi1n/vz53HbbbYSH\nh9OtWzd69uzJ1q1b6yy/M9QssgDDaB50lFU6H31M/n4Hcx4e6UOJAhtfdYOejmadUWCoYODU33nv\n1kFcf358ddnmqyh38czrQ+rZUnaczGVIV22zwhpKeOHI6UIqqoycG1c9m9CZUiwqq+TT1Un8dVxv\nTdOGbWMOtT3ylxce4GC6yQVZUWW0e35hIUEeT1MuLqvknI4tePvmQS7TbNu2jV9/ncuc5euICoHr\nLh9LvwGDyC0pp8hQSVllFQaDgZf/+SRf/LyACWMGc+uttzotKy0tjQsuuMD6Oz4+nrQ0bS5JrSgL\nxoeczjd4xQXhGHdIyfW+a8WClhGVX7DcyDqIl55XqjkA7s0O1+IC+2LtcafntVgNJ3OKPVoD9cbS\nw9z4qffcUYHCle+vZcJHptmE7p7Re38cZdqqZH7dkeojyeqG5YmW1+LW2rBhA1dPuJbwiAiaR0dz\n7bXX2p2vMkoOHz5MXOeudO3WAyEEd911F2BqO2cKfTt5Q1kwPmJ9YjZ3uZm94oi7jt3xzNi3XE8q\naKzUp+Mf9cafgGeLAb3pMnIsSXgw7Ljo7dVMubgHz4zv6/S8J2VppbS8itMFBrq1aeb1sr2Js1fG\nUGGKKWiZAOApL11bvV4sq9BgN/PO03UwlVVGDmYUEByknwmZX1pBfmkF7aIjAIiLiyMlJcV6PjU1\nlbi4OK/WqSwYH7EvreaK9zr7x31oVdhW1e/FZS5jQ/5C7zvhzQ7bW9bQluO1z8CqL7bPffL327nE\nyzE07+L6xjYUl59WRo8ezbIliykzGCguKmLRokU10vTt25f01FOknDBZyjNnznRa1sSJE5k1axZl\nZWUcP36cxMREhg8f7lV5lQWj0ExJeRWL92UwrZ7lrD2axbGsIu4bXbfVwU0dPQcYzhSqrtN6vULt\n90P/IZlvNNmwYcO46poJ3HTFGNq3b8eAAQNoEWOzq4U0be/y4hvv89h9t/LvFs0ZO3YshYU1J8P0\n79+fW265hX79+hESEsK0adMIDg72qrxKwfgIZyOpuvYTvoyK6FHXPV+ZZqooBWOP1vbgi+fvrA4p\nZaNfZFifacp26PiQHnvyb9zxyNOEyQruuv4qzjtvMAD/ee8TuraKAmD0JeOYf8k4BsbHWvPtTc2r\nUdbzzz/P888/r5usSsEoGiS+7ua8aTQ4WiBe7bN1vDFSBqrLKSCF0o2/PjaF/QcOUFFexl/+734G\nnX8+p3Sc6FMflILxEc5egQCdn2WHqTMM3BdY73iUpUP1Ri16BOAV1Th7RvW554G6FmjGN99xMreE\nmMhQurZuFhA7TbhCBfkbIIE6c9iX+MpV09BUQn1vi7v8gdrstFyzr98Zj6traA1NI0rB+JGG0KYC\ntVOxEOjyeUIgDRwCSxZtwjhLZrVANZahTVkF0M1xwNuS1fdaG52CEUKMF0IcEUIkCSGe9bc87nD3\n6Nw/18Bt4ArXeMvo0rN/c2vBBGjH6suBWkREBDk5OQF7L7yJ5XswERERdS6jUcVghBDBwDTgciAV\n2CaEWCClPOhfyQI1OFo7TeA90oQ3O5SGcE9t4w9CmGT2l9iBNLkgPj6e1NRUsrLsv5dTaKi02xC1\nLDKU7Ajt3atRSjLzDAQJCMqPdJu2tLyKnOJyCkKDKTkTZv0NUJkTRmRYMJlnTZvlHiqsLsvZsdqw\nfNGyrjQqBQMMB5KklMcAhBCzgEmA3xWMM+o8TdkHb7qlUwlUrDvF6CxjIE7LdXfJekgrzHUGcntw\nRX3uh7PrDQ0Ndfp1xy/XHePVxYesv1+45hz+cn53zXUVGCq4ZurvRIeHsO/lK92mXbovg0cW7OTK\n/u35/O5BLN6bwaMLTAugP77jfCac04mrzJ/asN2twtkxvWlsLrI4IMXmd6r5WIMjUGawBIocjvi6\n3w/Mu+AbLErWX21Be63at1dyTd0alrcGIlrkrI4r2f8ORBqbgqkVIcRkIcR2IcR2RzPX1zh7YTUF\nGXWQxRFfWQj1Re9Oz6tLVFwU5s0Oov4dXc38gd4W3F2zmm3oXxqbgkkDOtv8jjcfsyKlnC6lHCql\nHNq2bVufCebpfHx/v8yB6BpqLLhSiprjPD5oHLZV+Lsp+DKgXtv3mgIRr+0+oAONTcFsA3oJIboJ\nIcKA24AFfpZJoQOB9iJpwRcLLettv7idRVbPwnUm0OVzR7WVqMlJ5vJMoN2CRhXkl1JWCiEeA5YD\nwcBXUsoDfhYL8O4osCG/SA2VhnLP9RBTmMP8gR+DcVOGrxdaelhfXTwGDaFJNioFAyClXAIs8bcc\nmnDSQgKtIws0eRzRXT4djA5HmQPeHekQVG6I6N5MvPapas/rCmQam4us0eB2EWaj2Xy8IVH/e15b\nx6C1Br+tR/FXvbVUrNdeofV5zzxVxvWJMwWywlEKxo843ZwvwBpLoE5TthDY0mnD00fuC0vCtgrP\n4gP+w5/S+fe1ra490J6RUjANEF8ttFQ0PZw9dm/uKF0XahvkuGurnrZjr23n451i3NcRWLrEKbrG\nYIQQ7YDRQCegFNgPbJdSev8D2U0In35wLEAbsa/E0kPPBugttcfJgw/UtmDB3ei9LiP7+lyvnrt0\n2Ngrpt8BPBjURcEIIS4BngVaAbuAM0AEcB3QQwjxC/A/KWWBHvUHIs4Cuc4aveWQv01dYd0gJMDx\nkYg+cUsFwO121llZp1f7Sb5aYzDuLBgfOa8c329PXcuepA74SSE26GXBXA08KKU85XhCCBECTMC0\nIeWvOtWv8BIB0Of5leptUvSsw3tl6TEwqXaRBWZrCATFrHCOXgrmf1LK085OSCkrgXk61dtoUO+M\nNhrUVjHm/+urBPy2HqUJNMq6Wjw1pik3gXulBb2C/LuFECuEEA8IIWJ1qqPJ4kv3mb9ddY2JBuTZ\ncD6LzB+CaMCbQX5/48ngwbrZpZNjgYJeCiYOeBsYAxwRQswXQtwmhND+IYJGhrN2HmiNwY4G8mIG\n9D30EK2di64fHHO22aXFTeinm615izYvlOEt6nqvPAvyBz66KBgpZZWUcrmU8n5Mm09+hem7LMeF\nED/qUWeg4/FIKkA6zgARowa+7ux83an7G7vNLi3H/CJJ7VQrQO+W68smVpe6LFlsg/6BFifTfR2M\nlLIc0we/DgEFwDl61+kv3HV6Ti0YD8uoTqNdJq041ht4XZ5/0MPF4urxBYI1FoibXda6Dsb8v9EL\nAtpevyflOd42/33WWr9664JuCkYI0VkI8Q8hxE5gkbmuiVLKwXrVGchonVooHf73N4HWYP2Fd0aG\nXvoola+fSYDPInP7PZh6lFufqw3MO+V79FoHsxFTHGY2punKO/SopyHh7B1QnXf9UbfQHn12U9ax\ncA14Yy8yrcqxzgrJcR1MXRdaepLWXEkgexv0mqb8LLBOqilIVrQ2goC7Y4EmTyMkEGc6OV0E7GMZ\nhNAY9DbfP2cuLcfPC3tCfVxuelp7gdheXKGLgpFSrgUQQnQDHgcSbOuSUk7Uo96AxkmrqGtD8clu\nyg2kEes9htElAO9C5EDQ5c73ItMniK6V2qoNxEkSHlOvIL/NsUBoRDbo/T2YecAMYCHQ6Pcfc/dw\nPZ2mHCjGX6D63X0tlTceh7c2jfTFdFy7dTB+isEECUGVlLW+C/WxUtzhSXm+DPI3JPRWMAYp5Yc6\n19Eg0GoRaDHLfbKbMv4dtQYKgWjJBfp6FG8RJKAKMGqMwTiTz9N9u+zT18dF5uuMXsnudfRWMB8I\nIV4CfgfKLAellDt1rjfgcGbGOxsRamkg+kxTtv8diB2rM3z1QjVlReuvdTDWTzVrvPnuLKy6yO6R\nBeNDE6YhuQT1VjADgLuBS6l2kUnz7yaFp7PI3LpB6i2NdgK9X21IHX9t3YJXrqWeZTgb8fttJb9W\n15ebdL6apuzY6Xt6pzxyP1onNZh/BrC+0VvB3Ax0Ny+2bNI4j8E4mfViPee6LG8sKKshi0s5GlAP\nriPeiD9YOmrH5xeIt9jpSn7/6Jda27vVnesmTZ1WytfHgqkjWtpZkIPCD2SLRu+V/PuBJrPZpbum\n4awBuvMt+zu4Xt0Z+lUMvxPkxRlUtXfU9a+kvu3G31+vtEU4jNRdEeTGh1f9uQWN62DsZmT5fiW/\nJ3uRSScWjB6Dz/qgtwUTCxwWQmzDPgbT5KYpa43BWDC6m3OnSwzGvlD1DRATQUHeq6+2e6rnNUkp\nNQW8HUfH4N1ZWp512tqUgzslEuSh7EE298iTyw1yuLd1fW+0KAjHdmRXd4C9rnormJd0Lj+gcPvy\naIzBaLEYfDFK8ebI3RVaOz1/EuTCrVUfXA0etNZRV3ePllvt3C3lmRVQmxxa8YZisz4/jaZ4kJ0F\no70eb30PRouYju9mk7NghBBCmlhTWxo96vcXbl1kmtObjvq6oTjWZnnR9JTDKCE4sPWLVzdStOCq\n2Wt1R7qTxdUpo5QEafDVB9JGilpjP+4UkVY3W3X6uu1MXNOC8Yy6KDNLO6ir1eUL9IrBrBJCPC6E\n6GJ7UAgRJoS4VAjxLXCvTnUHJJo3u5T2/ztN4wV5XNVrwRcxmAYxvvDiJ5Mtl+t4T6WHg4q6KDut\nOZw9d2/GZeridqo9yO+6bI9jMLY/PBA2SPd96W3qcnhGtjI3CQsGGA/8HzDTvF1MHhCJSaH9Drwv\npdylU91+wxsr+S2H3DUUXzQi68hdRw0TWK+C73D5/DTeELezpTyt0wFnVoM3ZxR6VIZGK7ragnEd\ng9FuwWgVziGf4zRlHRt3jecRwF4AvfYiMwCfAJ8IIUKBNkCplDJPj/oCBXejJOcNt2Z6S6Nx24no\nEeT3Q3cfaKMtZ+gxRddVZ6d3DEYT7lxknldbL7Tfe9dWprNJC+6oq7upRgxG180u7WMwdjIH2Cul\nd5AfKWUFkKF3PYGO1oWW0nrOjZ/dOyK5xd0Otd6iXkX76EWqfm4NIwZT3zzOZm55dRaZB2m1brLp\nTj5PY4m2nXW92r6O7dM6M8782zub2+iDDz2HjR/3LjIn05SdKRgXfnrNFdWRmkUG9joYX1lc3rRg\nXLk/rXE3jdfkPsjvYuqIhy4iexeZ9zaLqU8w22U6t2V41o79NamxLorXcl/s4rsBZsIoBeMjnFow\n7lxkyoLRLa8nCC8G+S04llVttWrMXwdhPI/B6GPBeIKnas0b4gkvuZs8zerJe+YYVwrgZTD6Khgh\nxFNCiDg962hsaLFgfPGi+2KrmPooL1/Fb/SIwVQ5PFwtcTf79G7OuTju6Qjenfu2PtRl6q/mZ12H\n6duO1NXd5DhL1NP3xjPLznGrmLqV4wv0tmCigd+FEOuEEI8JIdrrXJ9fcesic2LCOEtfZTV73dXj\nu1ak60JLP+X1BHczlDyl2jp1PO5ZOY4KypO6a8PZvl5edRPWwUWmfTdl12XUhfpsFaNnXUYHxWKb\nM9Cm/uuqYKSUL0sp+wOPAh2BNUKIFXrW6U/cziJzmr4mRhedUG356kvNdTAWeXSozFpn3Qv39ntU\nmyzeGb27qtuLldRWRy24t2B83XlpC/JrcTE6yq6l7XlmwTjk9fBWeZK8enpyzfsTWOrFdzGYM8Bp\nIAdoV9dChBBThRBpQojd5n9X25x7TgiRJIQ4IoS40gsyexWts8i0dOhuXyQv9bwCD90TLtArluTt\nkZqr4rz54TVXa4osnV9dNmSsUZaLIjyOwdjIYhHb14NjT9ewOLt/rnYa1vaeeWLB1G8lvyfvWY2F\nujZ5A8yA0XeashBiCnAL0BaYAzwopTxYz2Lfk1K+41BPP+A2oD/QCVghhOgtpayqZ10e4b7jd3bM\njcVTx/UIUtbNLeD4cnoryO/2ntTjI9refo9cXqcXXWTOVl6byvasHHeP19V1aLZEnYyKLW5bX7vI\nLElra4NSiwJ0OGeUkuBaHFuO27+4wzFpcJBnL6En98XiInXqIvOoVv3Rex1MZ+CvUsrdOtczCZgl\npSwDjgshkoDhwCad67XD3cP1xV5Wlnq07DlVG97yu7udUluP18HbQX5XHbCo5bwnuIqdeHot7rYd\nclWH9hiMOb3NMYvl5ZXNLj0ow1XMyhFne3JZcHWrXI4nbNJ7omC01usKzywYR3efZ3X5Er1jMM/p\noFweF0LsFUJ8JYRoaT4WB6TYpEk1H/MpVVXuO35H6touQtyMjupaZqWXOj9HarO26lyul18qV9fp\nzena9bYuNOBaiWnLL5yMLLxpwXgyQaF6RqX7PJVVFgXjpiyH367K9Fa78lQ5eVKvZTduZ59/CAuw\n3WMDbh2MEGKFEGK/k3+TgE+B7sB5mHYH+F8dyp8shNguhNielZXlVdnLq1z7fJydctYULC+guwYa\nHOz6sdX1BTFU2HsTvdWpVLi5J/XptOsyk8odrkSx1KOngikprwQgIiRYUznu+i6XHafWRZyW9mfT\nW3tzP7pKN4OwGrJobIOV5h7XmVvK1fuk5XF68swdB2iedvOetGfHWWS2eUPc9A3+QPetYjxFSjlO\nSzohxBfAIvPPNEzuOAvx5mPOyp8OTAcYOnSoV3upSjdfCSuvrHkuLKRmY7A0lqgw152NewumbpdU\nVmEvn6UjsDTmunbohgrX96Q+N99RIdYXV4OD8irvjd7LzG2gsKySval5DIw3fey1qMykYCLdPHNb\n3D3/+lowlo7Stg6DWW5v3AN3Aw5HrJMLamkpFpmdKRhLfY7vmmtr0iZgrllSqHS4Lk8tGEOl9vZs\naUcWZWLbt7iK/fhr+nJgqbtaEEJ0tPl5PaZPMgMsAG4TQoSbd2/uBWz1tXwVla4fYn5pRY1joW5G\nG83DXet+t66AOrajMocGbnkxLS95oaGm/HUp15b6tPnSOioYVy9aabnz8ioqLfeh/i9oTpH1o648\nPrN6M/HiMlPd7gYVtkSGuW4bLl2dGjVMlbWzrm6blg7M3QBKKxUeDFSqrUf36SyDIWexKauCcXAd\nuSrSbiDowSN3HEB6GOO3DvC06KWzJeUAxESGAtqUdpmTAa4vCDgLphbeEkKch+nRnwAeApBSHhBC\nzAYOApXAo76eQQbuXWR5peU1jrmbaeKus7Ht6xwbdl2nMDtaGhXWkbvpf0cFqXUk6mgZzdtVbVjW\nJ2hsUQjOrEB3lLhQJBY3lSOW6/TGADCrqLoNZBYYrH8XWy0Yba9jZKjra7aU5YhW+aucWDAWXCkv\nT6jwoKOzWHa1KXfLICbcSVuwKG9H15GrMm07Yk8GFY4duKeuKkv+UA0flslzUDDlNm5HKaXT99xV\nu+7DQuMAACAASURBVNCbBqVgpJR3uzn3GvCaD8WpgbuHmFtUU8G46/CbubFgbN9z244K3Hfa7kb9\njpaGJa2lrmyb0Tdod1E5mv5//bl6zkd9Ou2SCtedijtO29yvUzkldG4ViRDCjeKx3AfvWjCWzgGq\nO1LHQYWrT0q3i46ocUxKSXmV0eV1aFXmlthbkDN3kxdGwZ64yCw43nrHtmcZ/LSICMURx3ZrLdOF\nGLbvgdZHXlll5IfNJ+2ORYZqs0Yt5Bab5IyJqnkNNdOarjcqLJiyyireWHLIes4oqweHtpwpdH4f\n9KZBucgCnQI3bqTTDooA3Jv+bi0Ym84i8Uyh5jKL3ChAiwXj2K9YlGByVjFQbcK7i63YkltcrVgd\n74+zuJQ7bDsnQ7lFwXj2Ip/Or34OF769iv8sMr2czlyYUO0a9Eac27azs3XJFTtRMPklFXR7bgkJ\nzy6u0aGGO7Fg3luRSJ8XllFocP6Mtcrv2A5sO1x3Fjpo8/O7e0e0lnsqt8T6919n7SKvxKxgImsO\nyiz33HGAoMWC0frIVx3JIvFMkd0xTy3rDHO7bBcdXmtaiwUjJSzZl0G6TZs2Sum0LTvrf3yBUjBe\nxLYzdSQjv+YDdvey2ZrYjv5z298H0grszrl7yUvKardgagZDTf8fOW1SZB1amEbPWi2YZ3/dZ/07\nyfwStm9heoksHVZ+aUWNIKkzbEfnFmXpqQXjaPFZRp6W447XX2iux3Jfpy44wNvLD3tUp4XsQltl\nW2m95xYFE2Ez6j2WXd1hvbHUvj5nyuLDlYlA9XNyRKsFZumQLckPZVSX52xAsPJQJt9sOA5o8/Nb\nOj9PYuA3fbaJE9nF1t9peaXWv+ftTuenLacACHPilsopqu6MbdvYT1tPOa3Lto1pvWeObaouWBRA\nrAYLxhKDMUrJ1uNn7c5JWXPQCfYDq5LySt794yjHsopqpPM2SsF4kaQzrh9Y6tmSGsfOFLgxW23a\ndnaxfTrbEcrOUw4NzI18WiyYsOAguxfR0rEeSM8Hqv3w7oL3tnkto81OMREkm+/PvaMSAFOHJaVk\n3LtreOj7HbWWZxsnsbxkLSJrfyFtyXJwFVisQYsLoW1z+xFk9VoM2J2SxzcbTzBtVTL5JZ6PxLOL\nyrigeyteu/5coLrzK3bi1rJ0WkECvtl4wu6cu45v+YHTTo9bnuOivemsT8x2K6NtHbYxM0fXy6bk\nHB74djtTFx5ESsnJnJpt3BHL/Y8KDWZDUjY7T511OgHBcTbc6iNnrH+n2ygYqFY4lhzllUYqqoyk\n5JZYLQujUZJlY0G+vfyIU/ls69XqIjvtZPDo6awtSxnOshmN0u59O1tSbVWvS8xi3DnVu29JKZ0O\nMjYl5wDQIiKEZftP8+HKRB77Sf+v1isF4yWyi8r46M8kl+eduZQyC12PfGw7kfQ8+3QWHyxAksMo\nxLaBGiqq+HR1MmfMnZUr/zzYWjDBdpZYTnE513y4js3HcoHqUazt9Yz47wqSnIyaTth0OFHhIdZO\nPKF1M2tZZwrLyCosY+XhMzXyO2Ir/3+XmEb10RGehREdFYxlooWlQ4+wcT9ZXBFgUrDXTdtg/b0z\nxaTYP/4zkTVHXa+nklKycE86WYVl5BSX06Z5uDWGYunMix2sJENFFQfSTZbpE5f1clqmKw5mFDg9\nLiVsOZbDYz/t4q4ZW1zmd7RgjtlYDhYXZYGhgllbT7FgT5pNvnIen7nTZbmWfP80W7TF5VXc+eUW\nbvhkIz9sOVkzrYObZ8n+asWZkef8vTFKSW5xOb1fWEqv55faxUWMsqZiqh1tSiIj30DHmAia2bg4\n31h2mMRM59akMywDMWeDh4nT1tPnhWXWAZalDWcXlZF6tpQR3Vpb0xolHM2s7hPySspZsCedBXvS\nAZPl/O95psm3ztqWt1EKxktMX3vM4zyZTkY+FoxSWjtzxxcj18aiScm1P2fpfObuTKXvv5fx5rLD\n1hfN3SQEi8IIDwmyCwhuTM6xdnbNzEFFU/rqzj6zoIzfD2bWKPNkjqlz6tAiAmn2DUeEBlmVQnmV\nkR0nqy0wR6soxyFA68zFZ+tpOZRRYDfidoZjsNNQYcRQUcVas5Kwfb9XHqpWep+sTrbL9+yveymv\nNPLO70e59yv7GfEFhgrrKPLw6UIen7mLYa+t4Hh2MW2jw2nTPAyAmz/bRHFZpc1sKZMV3Pffy6yD\nldE921jL7dG2GZ1bRVpXcmfkl7J0X4amwLlRwq3TN1f/dhGUsVhVlo6u0FBB7/bNrdcF8N4fR3l2\n7j5mbq3ePONUboldx+as/C3mQYojtm44C2dLyh3SFFjbdnpeKTGRobx900C7NFLCTpv2tGBPOt3a\nNEMIk3Lcl5pvl97RCnVU3FqNkIz8UjrERNitfTFUGPnLd9utv2tbR5ZiVTD28pzKKWG/2Q2+PjEb\no1FaLTaLi6un+fmY8ks7b8kHKxN5wjwl3vLeFZdXESTgin76fz1FKRgvcKbAUCcF48w1YiG/tILe\nLyzlumkbSDtrr0QsAXcLtoFBKSE5q4inZu+xHrO4CdzFfIptFvsl21hFp3Kr6xoYH2tVenO2p9rl\nT8mt6R6xvAjxLSOR0jSaiokMtfrKDRVVTPmxetSbaNNBrT2axZBXV7BsfwZgcjE++lPNEfKW47l8\nt+kEAPd+tZW//rzbrSvQ0YKx5LPc09yScmtHczy7uEbaf47vS1xsJIWGSqsCBXhp/n7KKqtIzCxk\n4NTfufL9tZwpMLA/zb5Ti4uNpEurKMAUs3h69h7rvTdKyd7UPLv0fTtEW/9e+fTFNA8PpUpKpvy4\ng5Gv/8kjP+6k1/NL7fI4m/3+k4OV4CzoW2WUVivR4mrLKiyjX8cWNA83uVa+XHfM7jld2d/USc3c\nespuavPw/660GzDkl1RY4yhdW5uuf0BcDIPiY+zaWEWVkdunb2bCR+vtZCs0VFqf3cncEs7pGM3N\nQztb72VkaDDpeaW8+8dRa56MfAOje7amdbMwcorL2XHK/t4et3l+Ukqnsa7T+QZO5hRzLKuIVxYe\ndDqdPTmriK6tomrE707mlCClZM72FPq9uMypmxxMEz6yzYp96/Fcq4t6+tpjXPj2Kmu6DUnZHMks\ntL6DljbbtVUUK566yCqzbbzX1m35t3G97a7N2UxBb6MUjBfYesI0MnuyFpPzhWvO4d8T+jk9NzA+\nxu73YfMIeHdKHq/ZTEME2OfQaU25uAc925lGMZKaI8WNyTn8tiuVJ2fZbwtnO2KzuCSahQWzy+ZF\ntDTQ68+PY0T3VhilKRby8/YUu7IsVo6F4rJKnv/NZIq3jQ6nymzBxESGWoPZU36wVxiHbNw7e1JM\nMvy2K43sojLGvLnKbvYQQPe2Jlfbi/MPkFlgsFonS/Zm4IqsojLG2FgFYFJSAOPOaU9eSYX1/jp2\nCJueu5RHLu7B/aMTKCmvsuYD+HbTSX4/kMkX66oHGm8sPUyKw+Cgd/toWjcP59K+Jr/5sgOnrWs1\njBLrLLDYqFDWPXMJ0RGh3DcqgU/uHAyYZpr9cTCTJfucx1qgeiKGLd9uslcwzpRnjo1lvPNUHhn5\npWQVltGuRQTXDurIxuQcXl18iPVJ1TGcGwbHA/DLjlS7dTLZRWVWK27biVwGvfI701Yn0apZGH3a\nm5Rmz3bN6dwqig1JOdZ7vfzAaTYdy3Hqzj2WXcy+1Hx2nDxrVSzv3TqIN28cQNfWUaw6klXDRdi1\nVTPaRkeQWWBglYMb9rppG6zxzEMZhXzuMEiUSG75fBMXvb2aK99fy1cbjvPrTnsLOSO/lMyCMs7r\nHEvLZmE1ZF5+IJPVR7IoqzRa3VSOpOfbt5HNx3KRUvK6jcIb0rUlKw6dqfGeAcS1jKSdeeKMlNLO\n42GoqCIm0tSWbhnWmQkDO9bIrydKwXiBQxkFBAl45OIe/P2K3k7TfPt/w/nL2O7cNCTe6fnZD420\nm0FimXrZTMPq7lbNw7l3ZFfA1MB+2HySVjaNPb+0wm5kN/nC7gDM2139shy3GenYdqypZ0vp2yGa\n9249j2jzOgPbQHJIkOCmIfEkZhbZuUX22IzEY6PCKC6rtCqYDjGmDtAyQ2vTc5cSERrEx6uSrNOC\nU80dc05RudPJE51iIvj6vmHExUYC8I9f9lrPPfPrXv4+Zw9SSlJyS/jDxn13PLuYHm2bMfPBC9j6\nr8usSiosOIipE03Kf+LHGyirrCI9z2BnQXSMMdXVt0MLwDRF1JbHZ+5ito1lN3dXGp+vMbnW3r5p\nICufvogLe7cF4N1bBlllt8TRyiurmLvTlH/b8+PobO5Ep07s///tnXl4VNXZwH/vZDITsieTPSF7\nAoQtGwQIRGSRfSlu4FKoe12L24etCn72U9vaVq3VunWx1trWtVbrAu7iBsgqKFtUQECQ1QAGcr4/\n7pI7ySQhCUMSPL/nyZM79965c9577jnveZd7DuP7Gh1DczGnmycWMaJnUrMpqY+eNxARo9MHI7Y0\n7b53+WJnjV+WG8DCdTs5dLiOxEhvo+d2WEECD5xbxilFyX7PmtXxA7Zr530zwLy7ppachAjbwsxL\njLAHVpYl6wxQW9mGZ5Ybs0Bt3PEt0+434mBWenpZVjxnDsgM+A4MQFK0l9yECD6q/ob9hw5z44Re\nPH5hhX3ccqlZz6vTCqtT9bERK8Hhw43fsOHr/Ty3dDNKKXswVpIZ53cfLC55bDFLzcHSL1/6NKCr\nzFIIj8wsB+CcRz7wS0gAmNw/jc27D/Duuh2EOmYlqMz34XWH2O653TW11Hx3xL53X+6qYUB2HN3j\nw4n0uvnFqf5uxWCjFUw7qT1Sx+9fX0+/jFjCQkO47OR8P1PUIstseA2t0t5p0dx7VglhoSGNUn89\nbhfXjemBx+1i3qQiv8br7PjG9k6xX8jbd/Awn3y1lxkDu/P3Cwfx4LllgBGrCQ0RfjejhB+flAcY\njcXiw41GJ7Bs0x7mr/Yf6aWaCsH6/dn/MNxv784ZwbrbxlOSGcuB2iN+6aNfOBRWXHgou2tq2V1T\nS0w3j1+mVnpsN1JjunGwto7Pd9Zw1/y1rNu+j/c2GOWp3lljK1swOrZnL6vkmcsqyfJF8Pq1w4ny\nuu0YisWTizfx6bZ9XPnEx1z46CL++M5Gnly8iSN1ivgIL4PzfCRFh3HjhF4AJMd47Q4fYHH1Lrbu\nPUihOdp2Huth3vuF63eSEh3Gm9cNb3S/7p5eDBhusFNLMzi9vDt5ifW+8thwDw+YdWMlVby/4RuW\nmXGCpqYRsjqoi81Bwt3Ti0mJDqMiJ57zhubwx1kD6JUaHfC7t07pTVVhIgOy43lxxVfUfHeYu+av\nZckXu6n61es88JahDK3BipW8kBjlpTQzjsn90+xrZfsiGGM+d/+8eBCV+Uag+fdnldqJEqu27GHv\nwVp2OpJG4sJDbZfgwBwfM4dkM7oomeWb9rBw/Q5WbdlLYXIkz15WyVvXn8yaW8dy+7S+eNwuFlXv\nsjv6hgovLbbeartgaI7thh3ZK5mC5EjbMsyIC2dIXgKzzEzG19Zs59T7F3LD0yvwuF2s/b9xLLjG\ncDftrmn82sHzy7Yw4tdvctUTS3lhxVcs+XwXHreLXqnRZDieEedg0dkuHnp7A1N+/y4L19dbgVYc\ntUdKFOP6pAD1ii80RCjuHktOgjEQmr96G7kJ9c/R3Em9gfoU7b+a8dY+aTH2ta0BHRgvcF85soC/\nXVCvZIOJVjDt5HnT7J1gjjBFhAn9Uuzjbpcwa0i23TlHNJgOJDcxkon9jIZrdSpWZ5abEMGsyhzW\n/O9YZlXm8Nb1J/O3CyqY3D+N8mxjpYKbJhbhcbuICzdGT5a7rm96DIPzfIzslWz7hi8Ylsuk/mnE\nRXgYkufjky17OVKn2Huwlm0NUqatQDRgP6D9HW68XqnRdjnzzY7z9D8Yy+8s+WIXc542soUev7CC\nuHAPh+sUm3YZwVmXS+wGYXXOVgN6d90ORv3mLXvkuGP/ITu+894NI3j0vIEUd48l2XQDedwuLh+R\nb5drYE68vX3TsyvtEeZLK7fymZnVc8aA+s6pLDOevMQI7pjWDxEhzZR19dZ9bN1rZAc9fmEF/7pk\nsN+98Zmj1fykSLJ8EWTE1Xcucyf1Zkpxuj2irSr0d8k572FqTGN3VnOuVsuVWZEbz/rbxjOlOJ1X\nr67iYXP0C3DPjBL+cE4Zf5o1gDeuHW7f26I0Q/EM75HIZ9v2M/e5VX7W3XNLjWf5XFPBWC6dxCgv\nIsI9M0qYWpxm3oP6QUJ+UhSPnlfByz+pom9GDGtuHUe/jBj+vXQL/ea94mcpx4Z7uPikPAqTI+mX\nEYPXHcLpprI466EPePOzr+mdFkNx91i87hDCQkNwuYQcXwSvmJbzo+cNpH/3WL/7YqW+n1WRyY0T\ni3jxqmH854qhRHrd5DoUu/XMzp1UhNft4q/vf24nmgzK9SEidluykg9umljEqF5JzBiY6feblz/+\nMQ+/s5FBuT48bpcdbB/VK4k3rz3Z79wzyjNIiPRyx3/XsOzL3Zz10Af2gPLTrXuJ9LpJi+nGOYOM\ne/+SmTX36uyTeObSIXYd7jt4mPykSHuAYcnjcbsI94TYLr8+6fVtteHLt1ePLvRLHgkmWsG0EytG\nMasy296XnxTFY+cbI4QflKQzb3JvO6DmcgkDs+s7QcuaALhpQhGhIWLntfvMTt4ZjKvMT+CeGSVM\nK80gOdprZ4KkxBgN/nrTVWSNlkNcQrTpVilIqm9ohclRLNu0h7yfvmiPlpxK5cUrh9nbVoNLig6j\nMt9HhCeE5y+vtI9bjX3r3oO8t34n0+5baB8bkpdgd777Dx22y/nClUN57PwK+4XSn0813g1Z43CR\nWFbaw+8YvvH4CE/AqVP6mo2pNDOWP/9oALOGZOOL8PBRdX1G0YfV37DNVBiWqwuMqTkWXDPcbnAv\nz67CE+Li3XU7+O5wHSkxYQzJSyDNMToVEU7qYbi6rIY/Z1xPilKjWXnLGMaao1CrE2gY87EIcQll\nWcZA4ZxB9Z3XWRWZAc+HegWaFBVmp1hHhYXa7ksw6n5snxRO7plEdkIEr11zEm9cO5yyLOO7xeYs\nzv9avIlAxIV7mFZav5ySJaP1W9B4pokQl9iWHUBaTDfbBbq7ppbyrDiuGJHP9WN6ML5vKq/MPsmO\nxTk7wyN1yk4CcJKTEGFfr8CRNWVRkhnHgmtO4mYzxpmfFGlft8ShjCxLR0T8BgXj+6ZwvxnnigsP\nxeN28bH5jlnf9BgenjmAq0cXMql/GrOGZHPdmB72d63ftJ7XotRoYsJDuevMYlsBZPkibOvEwrK6\n56/eTkFyJC6X2Nd4dukW3C6jjFZZrUFZUVo014/tyeIbR/lNKXX/OWX29mhHhlhLseFgohVMO9m6\n5yCJUd5GLo2BOfFcObKAOeN6NvrOPy4eZG9bo0qAMwZ0Z9ncUxicZ3RIzU35XZoZxwc/rffTW3EB\ni0xHI3WbE+jlOxSMs5HO+tNHAJxdkWXvS3IEiocW1HeQD5xbzns/Hek300BYaIjd4KwX4vKTIm1X\nQ++0+g4kxezcC5Kj/K5bmZ/Anaf395PBilds23uIKK+7yWlhKnJ9zBnXk0dmDiDc42be5N6cVl5v\npVgun+eWbvEbeQciKiyUsqw4XjMDws7Oz0lJpqEYrFHoxH5pvHjVML9ZsC8Ylsuym0/B18xv/mRU\nIaN6JXP+0FyyfOEkRHpt66yp8x+ZWU7vtMBusECICNkOJdG3QULJvElFdjYYGFbGnaf1p296DNNK\n0/2Ua89UowMM9zYfG+we383vczdPCNec0sPvubJIjQnzi+UUN7BOAHLMWFm4JyRgEgMYijUswBxg\n3ePDuXt6caN4kSXXxH6p3Hd2md1ZixgdvTXYsbI0E6O8/G5GCfMm9+ZHldkMzIln7qQiu10NL0zi\n7unFXGQOGqeWpNPLvF8p0WGMNAeOlgLduONbFq7fwebdB+zEB1+k17533ePD7XYmIljzYJZkxhLi\nkkbP1TDHQKYoNZqPbxrNxtvH+1lwx5suNdllZ+Qrc1TcEI/bxdWjAwf8RYQZAzMDrj4X7nFTnh1H\n9/huXH5yfoBvBybC62ZYQQJvr93B1OI0v8741ql9mPvcSgqS6keYVQWJja5xYVUudy9Ya7vzrj2l\nkOqdNQzJq39wm1pGwGogTy3ZRE5ChJ02CYayu6gql/mrtzGyZ1LA74PR0D/aaARjw0JDuHR4Hodq\nj/CX9z5vNrgd4hIucViCAP3SjU7qh4OzOGdQlp1FFSgQ20iWlCje27ATl9RbRw2Z1C+VZz/e7Ge5\nBipXS5MX5idF2u6tZy6tbHF+tgivm5G92vf+QlRYKKeWZvDUkk089ePBlGXFM6syh3sWrGXL7gO2\nZfTMpUMazfh9amkGdXWKU0sDJ6tYOAcz0PR9BKM9PPhD4x7s3H8ooEK2rKiESG+zS0Y3xZTidKYU\n+y9ya1kEgRRaz5QolpvxMCtDy0m4x80/Lx7st8/lkka/UWpmf/kiPQzvkcTSm0cTFhpCxW0L+OKb\nGjsj7wqHlXHFiAKuf3J5o3LNndSbG55eQf+MxuW1fn/2qEK27zuIyyUBs9qON9JRC9F0BsrLy9Wi\nRYtaPrEZRtz5BoXJUfzh3LKWTz4ONDUDbyA2fL2fxZ/vsjOwqu+YwNY9B/G6Xa1+OL/ed4hBty/g\nSJ1ibO+UY3Y/Xl611Z5GpvqOCUf9vYO1R7j3tXX8qDIbX6SXJz78gjlPr6AoNZoXrxrW7Hetc6O8\nblbcMqZd5e/M7Pr2u6B1Qlt2H2DGQ+9zUVUuo3sl44v0Nrs8RUvs3H+IX770KWcPyrQXamsvc55a\nzhMffck/LhpERa7P79jDb2/g5y8Yrwe05rlrSM13h3n1k21M7JfmJ/+Ue98hKiyULXsOkJsQwcMz\nB9jHlFK8vGorg3MTGg1Q6urUcXl/pSVEZLFSqryl87QF0w4O1h6heue3TDjOueXN0ZrRXW5iZCPX\nXkoAa+xoSIzyUpYZx4fV31Do8MW3l8F5RsM/mkkAnYSFhnCtw08+rk8qt/93jR3APprfHNPAZ36i\nEcwRblpsN9687uSWTzxKfJFefnHasU2xvWBYLlFhbjsO5iQv6di4lcI97kZWDRgW2TvrdrCrppbx\nffz7DxFhbJ/AfUpnUC6tQSuYdvDjxxZTp/BLP+1qZMR147SyDL+gblsZmBNvKJgAQdi2Eh0Wyr1n\nlfilaLeFmPBQlt48+qgUcJYvgldnV/nFHjQnHvlJkfxsQuAXn6sKErljWl8mF6cFPN5echIiedbM\n2msqrfxEQCuYdvCWOSvtkDxfC2d2XkSkUXC9rVxYlUtchMd+S/1YYaVxt5fWWHcFycfOCtN0PUJc\nwvSBTWfztZexfVL47Xzj5WcrEeBERCuYNnKw9ghH6hSzRxUGzIz5PhLTLZTzh+Z0dDE0mk5Pj5Qo\n7p5ezPJNe+zZxU9EtIJpI9Z0KoFy9jUajaYlAmW2nWjo92DaQPWObznjAWPq80ytYDQajSYgWsG0\nEWv+qKx2Bp81Go3mREW7yNpAdkIEU4vT2LTrQLNvaWs0Gs33Ga1g2shvzyzu6CJoNBpNp0YrmDbS\nlukqNBqN5vuEjsFoNBqNJihoBaPRaDSaoPC9nuxSRL4GPm/xxKZJAHa0eFbn50SRA7QsnZETRQ7Q\nslhkKaUaT8negO+1gmkvIrLoaGYU7eycKHKAlqUzcqLIAVqW1qJdZBqNRqMJClrBaDQajSYoaAXT\nPh7s6AIcI04UOUDL0hk5UeQALUur0DEYjUaj0QQFbcFoNBqNJihoBdMGRGSsiHwqIutEZE5Hl+do\nEJFqEVkhIktFZJG5L15EXhWRteb/OMf5N5jyfSoiHbYwvYj8UUS2i8hKx75Wl1tEykz514nIPdIB\nUzE0Ics8Edls1stSERnf2WURke4i8rqIfCIiq0TkKnN/l6uXZmTpivUSJiIfisgyU5ZbzP0dVy9K\nKf3Xij8gBFgP5AIeYBlQ1NHlOopyVwMJDfb9Ephjbs8BfmFuF5lyeYEcU96QDip3FVAKrGxPuYEP\ngUGAAP8FxnUSWeYB1wY4t9PKAqQCpeZ2FPCZWd4uVy/NyNIV60WASHM7FPjALE+H1Yu2YFrPQGCd\nUmqDUuo74AlgSgeXqa1MAf5ibv8FmOrY/4RS6pBSaiOwDkPu445S6i3gmwa7W1VuEUkFopVS7yuj\n9Tzq+M5xowlZmqLTyqKU+koptcTc3gesBtLpgvXSjCxN0ZllUUqp/ebHUPNP0YH1ohVM60kHvnR8\n3kTzD2RnQQHzRWSxiFxk7ktWSn1lbm8Fks3tzi5ja8udbm433N9ZuEJElpsuNMt90SVkEZFsoARj\ntNyl66WBLNAF60VEQkRkKbAdeFUp1aH1ohXM94ehSqliYBxwmYhUOQ+aI5Uul1LYVcvt4H4Md2sx\n8BXw644tztEjIpHAU8BPlFJ7nce6Wr0EkKVL1otS6ojZzjMwrJE+DY4f13rRCqb1bAa6Oz5nmPs6\nNUqpzeb/7cAzGC6vbaY5jPl/u3l6Z5exteXebG433N/hKKW2mZ1CHfAQ9a7ITi2LiIRidMh/U0o9\nbe7ukvUSSJauWi8WSqndwOvAWDqwXrSCaT0fAQUikiMiHmA68O8OLlOziEiEiERZ28ApwEqMcs80\nT5sJPGdu/xuYLiJeEckBCjCCfp2FVpXbdA/sFZFBZjbMDx3f6VCshm/yA4x6gU4si/m7jwCrlVK/\ncRzqcvXSlCxdtF4SRSTW3O4GjAbW0JH1cjyzHE6UP2A8RrbJeuBnHV2eoyhvLka2yDJglVVmwAcs\nANYC84F4x3d+Zsr3KR2QceUox98xXBS1GL7g89tSbqAco5NYD9yL+ZJxJ5Dlr8AKYLnZ4FM7uyzA\nUAw3y3Jgqfk3vivWSzOydMV66Qd8bJZ5JXCzub/D6kW/ya/RaDSaoKBdZBqNRqMJClrBaDQauWk1\negAAApZJREFUjSYoaAWj0Wg0mqCgFYxGo9FogoJWMBqNRqMJClrBaDQajSYoaAWj0bQREYkVkUsd\nn9NE5Mkg/dZUEbn5GFznThEZcSzKpNG0hH4PRqNpI+bkiP9RSvVp4dRj8VsLgclKqR3tvE4W8JBS\n6pRjUzKNpmm0BaPRtJ07gDxzQapfiUi2mIuJicgsEXnWXOCpWkQuF5GrReRjEXlfROLN8/JE5CVz\nluu3RaRnwx8RkULgkKVcROTPInK/eZ0NIjLcnPF3tYj82TwnxDxvpblw1GwApdTngE9EUo7PLdJ8\nn3F3dAE0mi7MHKCPMmavtSwaJ30wpn8Pw1hr43+UUiUi8luM+Z3uAh4ELlFKrRWRCuA+oKELqxJY\n0mBfHDAYmIwxlUklcAHwkYgUYyyMl25ZV9YcVSZLzPOfapvYGs3RoRWMRhM8XlfGIlb7RGQP8Ly5\nfwXQz5wifgjwL8eKtN4A10kFvm6w73mllBKRFcA2pdQKABFZBWQDbwK5IvI74AXgFcd3twNp7RVO\no2kJrWA0muBxyLFd5/hch9H2XMBuywJqhgNATBPXdl7XvrZSapeI9AfGAJcAZwDnmeeEmdfUaIKK\njsFoNG1nH8Y67m1CGQtbbRSR08GYOt5UCg1ZDeS35toikgC4lFJPATcCpY7DhdRPP6/RBA2tYDSa\nNqKU2gm8awbSf9XGy5wNnC8i1lIKUwKc8xZQIg4/2lGQDrxhLp/7GHAD2Itr5QOL2lhejeao0WnK\nGk0XQETuxoi7zG/ndX4AlCqlbjo2JdNomkZbMBpN1+A2IPwYXMdNF1lfXtP10RaMRqPRaIKCtmA0\nGo1GExS0gtFoNBpNUNAKRqPRaDRBQSsYjUaj0QQFrWA0Go1GExT+Hyvh9rNxc0qFAAAAAElFTkSu\nQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from bmtk.analyzer.cell_vars import plot_report\n",
+ "\n",
+ "plot_report(config_file='simulation_config.json', gids=[0])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 5. Modifying the network"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Customized node params\n",
+ "\n",
+ "When building our cortex nodes, we used some built-in functions to set certain parameters like positions and y-axis rotations:\n",
+ "```python\n",
+ "cortex.add_nodes(N=100,\n",
+ " pop_name='Scnn1a',\n",
+ " positions=positions_columinar(N=100, center=[0, 50.0, 0], max_radius=30.0, height=100.0),\n",
+ " rotation_angle_yaxis=xiter_random(N=100, min_x=0.0, max_x=2*np.pi),\n",
+ " ...\n",
+ "```\n",
+ "\n",
+ "These functions will assign every cell a unique value in the positions and rotation_angel_yaxis parameters, unlike the pop_name parameter which will be the same for all 100 cells. We can verify by the following code:\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 20,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "cell 0: pop_name: Scnn1a, positions: [ -1.99484437 41.49527042 22.33923077], angle_yaxis: 5.29759513272\n",
+ "cell 1: pop_name: Scnn1a, positions: [-25.72073426 36.01835631 2.43526216], angle_yaxis: 2.94311607964\n"
+ ]
+ }
+ ],
+ "source": [
+ "cortex_nodes = list(cortex.nodes())\n",
+ "n0 = cortex_nodes[0]\n",
+ "n1 = cortex_nodes[1]\n",
+ "print('cell 0: pop_name: {}, positions: {}, angle_yaxis: {}'.format(n0['pop_name'], n0['positions'], n0['rotation_angle_yaxis']))\n",
+ "print('cell 1: pop_name: {}, positions: {}, angle_yaxis: {}'.format(n1['pop_name'], n1['positions'], n1['rotation_angle_yaxis']))\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The Network Builder contains a growing number of built-in functions. However for advanced networks a modeler will probably want to assign parameters using their own functions. To do so, a modeler only needs to passes in, or alternatively create a function that returns, a list of size N. When saving the network, each individual position will be saved in the nodes.h5 file assigned to each cell by gid.\n",
+ "\n",
+ "```python\n",
+ "def cortex_positions(N):\n",
+ " # codex to create a list/numpy array of N (x, y, z) positions.\n",
+ " return [...]\n",
+ "\n",
+ "cortex.add_nodes(N=100,\n",
+ " positions=cortex_positions(100),\n",
+ " ...\n",
+ "```\n",
+ "\n",
+ "or if we wanted we could give all cells the same positions (The builder has no restrictions on this, however this may cause issues if your trying to create connections based on distance). When saving the network, the same position is assigned as a global cell-type property, and thus saved in the node_types.csv file.\n",
+ "```python\n",
+ "cortex.add_nodes(N=100,\n",
+ " positions=np.ndarray([100.23, -50.67, 89.01]),\n",
+ " ...\n",
+ "```\n",
+ "\n",
+ "We can use the same logic not just for positions and rotation_angle, but for any parameter we choose."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Customized connector functions\n",
+ "\n",
+ "When creating edges, we used the built-in distance_connector function to help create the connection matrix. There are a number of built-in connection functions, but we also allow modelers to create their own. To do so, the modeler must create a function that takes in a source, target, and a variable number of parameters, and pass back an natural number representing the number of connections.\n",
+ "\n",
+ "The Builder will iterate over that function passing in every source/target node pair (filtered by the source and target parameters in add_edges()). The source and target parameters are essentially dictionaries that can be used to fetch properties of the nodes. A typical example would look like:\n",
+ "\n",
+ "```python\n",
+ "def customized_connector(source, target, param1, param2, param3):\n",
+ " if source.node_id == target.node_id:\n",
+ " # necessary if we don't want autapses\n",
+ " return 0\n",
+ " source_pot = source['potential']\n",
+ " target_pot = target['potential']\n",
+ " # some code to determine number of connections\n",
+ " return n_synapses\n",
+ " \n",
+ "...\n",
+ "cortex.add_edges(source=, target=,\n",
+ " connection_rule=customized_connector,\n",
+ " connection_params={'param1': , 'param2': , 'param3': },\n",
+ " ...\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "anaconda-cloud": {},
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.13"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/bmtk-vb/docs/tutorial/04_multi_pop.ipynb b/bmtk-vb/docs/tutorial/04_multi_pop.ipynb
new file mode 100644
index 0000000..bd4f057
--- /dev/null
+++ b/bmtk-vb/docs/tutorial/04_multi_pop.ipynb
@@ -0,0 +1,735 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Chapter 4: Muti-population recurrent network (with BioNet) \n",
+ "\n",
+ "Here we will create a heterogenous yet relatively small network consisting of hundreds of cells recurrently connected. All cells will be belong to one of four \"cell-types\". Two of these cells types will be biophysically detailed cells, i.e. containing a full morphology and somatic and dendritic channels and receptors. The other two will be point-neuron models, which lack a full morphology or channels but still active to provide inhibitory and excitory dynamics.\n",
+ "\n",
+ "As input to drive the simulation, we will also create an external network of \"virtual cells\" that synapse directly onto our internal cells and provides spike trains stimulus\n",
+ "\n",
+ "**Note** - scripts and files for running this tutorial can be found in the directory [sources/chapter04/](sources/chapter04)\n",
+ "\n",
+ "requirements:\n",
+ "* bmtk\n",
+ "* NEURON 7.4+"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 1. Building the network\n",
+ "\n",
+ "#### cells\n",
+ "\n",
+ "This network will loosely resemble the mouse V1 cortical column. Along the center of the column will be a population of 50 biophysically detailed neuron: 40 excitory Scnn1a cells and 10 inhibitory PV cells, which we will get their morphologies and biophysical properties from the allen cell-types database. "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "\n",
+ "from bmtk.builder.networks import NetworkBuilder\n",
+ "from bmtk.builder.aux.node_params import positions_columinar, xiter_random\n",
+ "\n",
+ "net = NetworkBuilder(\"V1\")\n",
+ "net.add_nodes(N=80, pop_name='Scnn1a',\n",
+ " positions=positions_columinar(N=80, center=[0, 50.0, 0], max_radius=30.0, height=100.0),\n",
+ " rotation_angle_yaxis=xiter_random(N=80, min_x=0.0, max_x=2*np.pi),\n",
+ " rotation_angle_zaxis=xiter_random(N=80, min_x=0.0, max_x=2*np.pi),\n",
+ " tuning_angle=np.linspace(start=0.0, stop=360.0, num=80, endpoint=False),\n",
+ " location='VisL4',\n",
+ " ei='e',\n",
+ " model_type='biophysical',\n",
+ " model_template='ctdb:Biophys1.hoc',\n",
+ " model_processing='aibs_perisomatic',\n",
+ " dynamics_params='472363762_fit.json',\n",
+ " morphology='Scnn1a.swc')\n",
+ "\n",
+ "net.add_nodes(N=20, pop_name='PV',\n",
+ " positions=positions_columinar(N=20, center=[0, 50.0, 0], max_radius=30.0, height=100.0),\n",
+ " rotation_angle_yaxis=xiter_random(N=20, min_x=0.0, max_x=2*np.pi),\n",
+ " rotation_angle_zaxis=xiter_random(N=20, min_x=0.0, max_x=2*np.pi),\n",
+ " location='VisL4',\n",
+ " ei='i',\n",
+ " model_type='biophysical',\n",
+ " model_template='ctdb:Biophys1.hoc',\n",
+ " model_processing='aibs_perisomatic',\n",
+ " dynamics_params='472912177_fit.json',\n",
+ " morphology='Pvalb.swc')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "To set the position and rotation of each cell, we use the built in function positions_columinar and xiter_random, which returns a list of values given the parameters. A user could set the values themselves using a list (or function that returns a list) of size N. The parameters like location, ei (potential), params_file, etc. are cell-type parameters, and will be used for all N cells of that type.\n",
+ "\n",
+ "The excitory cells are also given a tuning_angle parameter. An instrinsic \"tuning angle\" is a property found in some cells in the visual cortex. In this model, we will use this property to determine number of strenght of connections between subsets of cells by using custom functions. But in general most models will not have or use a tuning angle, but they may require some other parameter. In general, users can assign whatever custom parameters they want to cells and cell-types and use them as properties for creating connections and running simulations.\n",
+ "\n",
+ "Next we continue to create our point (integrate-and-fire) neurons. Notice they don't have properities like y/z rotation or morphology, as they wouldn't apply to point neurons."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "net.add_nodes(N=200, pop_name='LIF_exc',\n",
+ " positions=positions_columinar(N=200, center=[0, 50.0, 0], min_radius=30.0, max_radius=60.0, height=100.0),\n",
+ " tuning_angle=np.linspace(start=0.0, stop=360.0, num=200, endpoint=False),\n",
+ " location='VisL4',\n",
+ " ei='e',\n",
+ " model_type='point_process',\n",
+ " model_template='nrn:IntFire1',\n",
+ " dynamics_params='IntFire1_exc_1.json')\n",
+ "\n",
+ "net.add_nodes(N=100, pop_name='LIF_inh',\n",
+ " positions=positions_columinar(N=100, center=[0, 50.0, 0], min_radius=30.0, max_radius=60.0, height=100.0),\n",
+ " location='VisL4',\n",
+ " ei='i',\n",
+ " model_type='point_process',\n",
+ " model_template='nrn:IntFire1',\n",
+ " dynamics_params='IntFire1_inh_1.json')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### connections\n",
+ "\n",
+ "Now we want to create connections between the cells. Depending on the model type, and whether or not the presynpatic \"source\" cell is excitory or inhibitory, we will have different synpatic model and parameters. Using the source and target filter parameters, we can create different connection types.\n",
+ "\n",
+ "To determine excitory-to-excitory connection matrix we want to use distance and tuning_angle property. To do this we create a customized function \"dist_tuning_connector\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "import random\n",
+ "import math\n",
+ "\n",
+ "def dist_tuning_connector(source, target, d_weight_min, d_weight_max, d_max, t_weight_min, t_weight_max, nsyn_min,\n",
+ " nsyn_max):\n",
+ " if source['node_id'] == target['node_id']:\n",
+ " # Avoid self-connections.n_nodes\n",
+ " return None\n",
+ "\n",
+ " r = np.linalg.norm(np.array(source['positions']) - np.array(target['positions']))\n",
+ " if r > d_max:\n",
+ " dw = 0.0\n",
+ " else:\n",
+ " t = r / d_max\n",
+ " dw = d_weight_max * (1.0 - t) + d_weight_min * t\n",
+ "\n",
+ " if dw <= 0:\n",
+ " # drop the connection if the weight is too low\n",
+ " return None\n",
+ "\n",
+ " # next create weights by orientation tuning [ aligned, misaligned ] --> [ 1, 0 ], Check that the orientation\n",
+ " # tuning property exists for both cells; otherwise, ignore the orientation tuning.\n",
+ " if 'tuning_angel' in source and 'tuning_angle' in target:\n",
+ "\n",
+ " # 0-180 is the same as 180-360, so just modulo by 180\n",
+ " delta_tuning = math.fmod(abs(source['tuning_angle'] - target['tuning_angle']), 180.0)\n",
+ "\n",
+ " # 90-180 needs to be flipped, then normalize to 0-1\n",
+ " delta_tuning = delta_tuning if delta_tuning < 90.0 else 180.0 - delta_tuning\n",
+ "\n",
+ " t = delta_tuning / 90.0\n",
+ " tw = t_weight_max * (1.0 - t) + t_weight_min * t\n",
+ " else:\n",
+ " tw = dw\n",
+ "\n",
+ " # drop the connection if the weight is too low\n",
+ " if tw <= 0:\n",
+ " return None\n",
+ "\n",
+ " # filter out nodes by treating the weight as a probability of connection\n",
+ " if random.random() > tw:\n",
+ " return None\n",
+ "\n",
+ " # Add the number of synapses for every connection.\n",
+ " # It is probably very useful to take this out into a separate function.\n",
+ " return random.randint(nsyn_min, nsyn_max)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "This first two parameters of this function is \"source\" and \"target\" and are required for all custom connector functions. These are node objects which gives a representation of a single source and target cell, with properties that can be accessed like a python dictionary. When The Network Builder is creating the connection matrix, it will call this function for all possible source-target pairs. The user doesn't call this function directly.\n",
+ "\n",
+ "The remaining parameters are optional. Using these parameters, plus the distance and angles between source and target cells, this function determines the number of connections between each given source and target cell. If there are none you can return either None or 0.\n",
+ "\n",
+ "To create these connections we call add_edges method of the builder. We use the source and target parameter to filter out only excitory-to-excitory connections. We must also take into consideration the model type (biophysical or integrate-and-fire) of the target when setting parameters. We pass in the function throught the connection_rule parameter, and the function parameters (except source and target) through connection_params. (If our dist_tuning_connector function didn't have any parameters other than source and target, we could just not set connection_params)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "net.add_edges(source={'ei': 'e'}, target={'pop_name': 'Scnn1a'},\n",
+ " connection_rule=dist_tuning_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 0.34, 'd_max': 300.0, 't_weight_min': 0.5,\n",
+ " 't_weight_max': 1.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=6.4e-05,\n",
+ " weight_function='gaussianLL',\n",
+ " weight_sigma=50.0,\n",
+ " distance_range=[30.0, 150.0],\n",
+ " target_sections=['basal', 'apical'],\n",
+ " delay=2.0,\n",
+ " dynamics_params='AMPA_ExcToExc.json',\n",
+ " model_template='exp2syn')\n",
+ "\n",
+ "net.add_edges(source={'ei': 'e'}, target={'pop_name': 'LIF_exc'},\n",
+ " connection_rule=dist_tuning_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 0.34, 'd_max': 300.0, 't_weight_min': 0.5,\n",
+ " 't_weight_max': 1.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=0.0019,\n",
+ " weight_function='gaussianLL',\n",
+ " weight_sigma=50.0,\n",
+ " delay=2.0,\n",
+ " dynamics_params='instanteneousExc.json',\n",
+ " model_template='exp2syn')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Similarly we create the other types of connections. But since either the source, target, or both cells will not have the tuning_angle parameter, we don't want to use dist_tuning_connector. Instead we can use the built-in distance_connector function which just creates connections determined by distance."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 5,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "from bmtk.builder.aux.edge_connectors import distance_connector\n",
+ "\n",
+ "### Generating I-to-I connections\n",
+ "net.add_edges(source={'ei': 'i'}, target={'ei': 'i', 'model_type': 'biophysical'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 1.0, 'd_max': 160.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=0.0002,\n",
+ " weight_function='wmax',\n",
+ " distance_range=[0.0, 1e+20],\n",
+ " target_sections=['somatic', 'basal'],\n",
+ " delay=2.0,\n",
+ " dynamics_params='GABA_InhToInh.json',\n",
+ " model_template='exp2syn')\n",
+ "\n",
+ "net.add_edges(source={'ei': 'i'}, target={'ei': 'i', 'model_type': 'point_process'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 1.0, 'd_max': 160.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=0.00225,\n",
+ " weight_function='wmax',\n",
+ " delay=2.0,\n",
+ " dynamics_params='instanteneousInh.json',\n",
+ " model_template='exp2syn')\n",
+ "\n",
+ "### Generating I-to-E connections\n",
+ "net.add_edges(source={'ei': 'i'}, target={'ei': 'e', 'model_type': 'biophysical'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 1.0, 'd_max': 160.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=0.00018,\n",
+ " weight_function='wmax',\n",
+ " distance_range=[0.0, 50.0],\n",
+ " target_sections=['somatic', 'basal', 'apical'],\n",
+ " delay=2.0,\n",
+ " dynamics_params='GABA_InhToExc.json',\n",
+ " model_template='exp2syn')\n",
+ "\n",
+ "net.add_edges(source={'ei': 'i'}, target={'ei': 'e', 'model_type': 'point_process'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 1.0, 'd_max': 160.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=0.009,\n",
+ " weight_function='wmax',\n",
+ " delay=2.0,\n",
+ " dynamics_params='instanteneousInh.json',\n",
+ " model_template='exp2syn')\n",
+ "\n",
+ "### Generating E-to-I connections\n",
+ "net.add_edges(source={'ei': 'e'}, target={'pop_name': 'PV'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 0.26, 'd_max': 300.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=0.00035,\n",
+ " weight_function='wmax',\n",
+ " distance_range=[0.0, 1e+20],\n",
+ " target_sections=['somatic', 'basal'],\n",
+ " delay=2.0,\n",
+ " dynamics_params='AMPA_ExcToInh.json',\n",
+ " model_template='exp2syn')\n",
+ "\n",
+ "\n",
+ "net.add_edges(source={'ei': 'e'}, target={'pop_name': 'LIF_inh'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 0.26, 'd_max': 300.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=0.0043,\n",
+ " weight_function='wmax',\n",
+ " delay=2.0,\n",
+ " dynamics_params='instanteneousExc.json',\n",
+ " model_template='exp2syn')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Finally we build the network (this may take a bit of time since it's essentially iterating over all 400x400 possible connection combinations), and save the nodes and edges."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "net.build()\n",
+ "net.save_nodes(output_dir='network')\n",
+ "net.save_edges(output_dir='network')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Building external network\n",
+ "\n",
+ "Next we want to create an external network consisting of virtual cells that form a feedforward network onto our V1, which will provide input during the simulation. We will call this LGN, since the LGN is the primary input the layer 4 cells of the V1 (if we wanted to we could also create multiple external networks and run simulations on any number of them). \n",
+ "\n",
+ "First we build our LGN nodes. Then we must import the V1 network nodes, and create connections between LGN --> V1."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "from bmtk.builder.networks import NetworkBuilder\n",
+ "\n",
+ "lgn = NetworkBuilder('LGN')\n",
+ "lgn.add_nodes(N=500,\n",
+ " pop_name='tON',\n",
+ " potential='exc',\n",
+ " model_type='virtual')\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "As before, we will use a customized function to determine the number of connections between each source and target pair, however this time our connection_rule is a bit different\n",
+ "\n",
+ "In the previous example, our connection_rule function's first two arguments were the presynaptic and postsynaptic cells, which allowed us to choose how many synaptic connections between the pairs existed based on individual properties:\n",
+ "```python\n",
+ "def connection_fnc(source, target, ...):\n",
+ " source['param'] # presynaptic cell params\n",
+ " target['param'] # postsynaptic cell params\n",
+ " ...\n",
+ " return nsyns # number of connections between pair\n",
+ "```\n",
+ "\n",
+ "But for our LGN --> V1 connection, we do things a bit differently. We want to make sure that for every source cell, there are a limited number of presynaptic targets. This is a not really possible with a function that iterates on a one-to-one basis. So instead we have a connector function who's first parameter is a list of all N source cell, and the second parameter is a single target cell. We return an array of integers, size N; which each index representing the number of synaptics between sources and the target. \n",
+ "\n",
+ "To tell the builder to use this schema, we must set iterator='all_to_one' in the add_edges method. (By default this is set to 'one_to_one'. You can also use 'one_to_all' iterator which will pass in a single source and all possible targets)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def select_source_cells(sources, target, nsources_min=10, nsources_max=30, nsyns_min=3, nsyns_max=12):\n",
+ " total_sources = len(sources)\n",
+ " nsources = np.random.randint(nsources_min, nsources_max)\n",
+ " selected_sources = np.random.choice(total_sources, nsources, replace=False)\n",
+ " syns = np.zeros(total_sources)\n",
+ " syns[selected_sources] = np.random.randint(nsyns_min, nsyns_max, size=nsources)\n",
+ " return syns\n",
+ "\n",
+ "lgn.add_edges(source=lgn.nodes(), target=net.nodes(pop_name='Scnn1a'),\n",
+ " iterator='all_to_one',\n",
+ " connection_rule=select_source_cells,\n",
+ " connection_params={'nsources_min': 10, 'nsources_max': 25},\n",
+ " syn_weight=4e-03,\n",
+ " weight_function='wmax',\n",
+ " distance_range=[0.0, 150.0],\n",
+ " target_sections=['basal', 'apical'],\n",
+ " delay=2.0,\n",
+ " dynamics_params='AMPA_ExcToExc.json',\n",
+ " model_template='exp2syn')\n",
+ "\n",
+ "lgn.add_edges(source=lgn.nodes(), target=net.nodes(pop_name='PV1'),\n",
+ " connection_rule=select_source_cells,\n",
+ " connection_params={'nsources_min': 15, 'nsources_max': 30},\n",
+ " iterator='all_to_one',\n",
+ " syn_weight=0.001,\n",
+ " weight_function='wmax',\n",
+ " distance_range=[0.0, 1.0e+20],\n",
+ " target_sections=['somatic', 'basal'],\n",
+ " delay=2.0,\n",
+ " dynamics_params='AMPA_ExcToInh.json',\n",
+ " model_template='exp2syn')\n",
+ "\n",
+ "lgn.add_edges(source=lgn.nodes(), target=net.nodes(pop_name='LIF_exc'),\n",
+ " connection_rule=select_source_cells,\n",
+ " connection_params={'nsources_min': 10, 'nsources_max': 25},\n",
+ " iterator='all_to_one',\n",
+ " syn_weight= 0.045,\n",
+ " weight_function='wmax',\n",
+ " delay=2.0,\n",
+ " dynamics_params='instanteneousExc.json',\n",
+ " model_template='exp2syn')\n",
+ "\n",
+ "lgn.add_edges(source=lgn.nodes(), target=net.nodes(pop_name='LIF_inh'),\n",
+ " connection_rule=select_source_cells,\n",
+ " connection_params={'nsources_min': 15, 'nsources_max': 30},\n",
+ " iterator='all_to_one',\n",
+ " syn_weight=0.02,\n",
+ " weight_function='wmax',\n",
+ " delay=2.0,\n",
+ " dynamics_params='instanteneousExc.json',\n",
+ " model_template='exp2syn')\n",
+ "\n",
+ "\n",
+ "lgn.build()\n",
+ "lgn.save_nodes(output_dir='network')\n",
+ "lgn.save_edges(output_dir='network')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 2. Setting up BioNet\n",
+ "\n",
+ "#### file structure.\n",
+ "\n",
+ "Before running a simulation, we will need to create the runtime environment, including parameter files, run-script and configuration files. If using the tutorial these files will already be in place. Otherwise we can use a command-line:\n",
+ "```bash\n",
+ " $ python -m bmtk.utils.sim_setup -n network --membrane_report-vars v,cai --membrane_report-cells 10,80 --membrane_report-sections soma --tstop 3000.0 --dt 0.1 bionet\n",
+ "```\n",
+ "\n",
+ "#### cell models\n",
+ "\n",
+ "Also our cortex cell uses excitatory and inhbitory biophysical cell models we can download from the Allen Cell-Types Database\n",
+ "```bash\n",
+ " $ wget http://celltypes.brain-map.org/neuronal_model/download/482934212 http://celltypes.brain-map.org/neuronal_model/download/478809612\n",
+ " $ unzip 482934212 -d scnn1a\n",
+ " $ cp scnn1a/fit_parameters.json biophys_components/biophysical_neuron_templates/472363762_fit.json\n",
+ " $ cp scnn1a/reconstruction.swc biophys_components/morphologies/Scnn1a.swc\n",
+ " $ unzip 478809612 -d pvalb\n",
+ " $ cp pvalb/fit_parameters.json biophys_components/biophysical_neuron_templates/472912177_fit.json\n",
+ " $ cp pvalb/reconstruction.swc biophys_components/morphologies/Pvalb.swc\n",
+ "```\n",
+ "\n",
+ "The network also contained two intfire neuronal types which are described by parameters files IntFire1_inh_1.json and IntFire1_exc_1.json. The sim_setup script ran above will create these files and add them under components/intfire/. You can open up these files in a text editor and change their parameters if required. These are very simple models, and later on we will show how to create more complex ones.\n",
+ "\n",
+ "\n",
+ "Similarly the synaptic parameters files are stored as json files under biophys_components/synaptic_models/. The Allen cell-types models also required some addition NEURON mechanisms which were added and built under components/mechanisms.\n",
+ "\n",
+ "For most network models we have found it convient to use the directory structure as set up by the script. However the user can choose whatever structure that works best in their workflow. Just make sure that everything is described in the \"components\" sections of the config.json:\n",
+ "```json\n",
+ "\"components\": {\n",
+ " \"morphologies_dir\": \"biophys_components/morphologies\", \n",
+ " \"point_neuron_models_dir\": \"biophys_components/intfire\", \n",
+ " \"templates_dir\": \"biophys_components/hoc_templates\", \n",
+ " \"biophysical_neuron_models_dir\": \"biophys_components/biophysical_neuron_templates\", \n",
+ " \"mechanisms_dir\": \"biophys_components/mechanisms\", \n",
+ " \"synaptic_models_dir\": \"compbiophys_componentsnents/synaptic_models\"\n",
+ "}, \n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### lgn input\n",
+ "\n",
+ "We need to provide our LGN external network cells with spike-trains so they can activate our recurrent network. Previously we showed how to do this by generating csv files. We can also use NWB files, which are a common format for saving electrophysiological data in neuroscience.\n",
+ "\n",
+ "We can use any NWB file generated experimentally or computationally, but for this example we will use a preexsting one. First download the file:\n",
+ "```bash\n",
+ " $ wget https://github.com/AllenInstitute/bmtk/raw/develop/docs/examples/NWB_files/lgn_spikes.nwb\n",
+ "```\n",
+ "Then we must edit the simulation_config.json file to tell the simulator to find the nwb file and which network to associate it with.\n",
+ "\n",
+ "```json\n",
+ "\n",
+ "\"inputs\": {\n",
+ " \"LGN_spikes\": {\n",
+ " \"input_type\": \"spikes\",\n",
+ " \"module\": \"nwb\",\n",
+ " \"input_file\": \"$BASE_DIR/lgn_spikes.nwb\",\n",
+ " \"node_set\": \"LGN\",\n",
+ " \"trial\": \"trial_0\"\n",
+ " }\n",
+ "},\n",
+ "```\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 3. Running the simulation\n",
+ "\n",
+ "\n",
+ "We are close to running our simulation, however unlike in previous chapters we need a little more programming to before we can being. \n",
+ "\n",
+ "For most of the connections we added the parameter weight_function='wmax'. This is a built-in function that tells the simulator when creating a connection between two cells, just use the 'weight_max' value assigned to that given edge-type. \n",
+ "\n",
+ "However, when creating excitatory-to-excitatory connections we used weight_function='gaussianLL'. This is because we want to use the tuning_angel parameter, when avaiable, to determine the synaptic strength between two connections. First we create the function which takes in target, source and connection properties (which are just the edge-type and properties set in the add_edges method). Then we must register the function with the BioNet simulator:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "import math\n",
+ "from bmtk.simulator.bionet.pyfunction_cache import add_weight_function\n",
+ "\n",
+ "def gaussianLL(edge_props, source, target):\n",
+ " src_tuning = source['tuning_angle']\n",
+ " tar_tuning = target['tuning_angle']\n",
+ " w0 = edge_props[\"syn_weight\"]\n",
+ " sigma = edge_props[\"weight_sigma\"]\n",
+ "\n",
+ " delta_tuning = abs(abs(abs(180.0 - abs(float(tar_tuning) - float(src_tuning)) % 360.0) - 90.0) - 90.0)\n",
+ " return w0 * math.exp(-(delta_tuning / sigma) ** 2)\n",
+ "\n",
+ "add_weight_function(gaussianLL)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The weights will be adjusted before each simulation, and the function can be changed between different runs.. Simply opening the edge_types.csv file with a text editor and altering the weight_function column allows users to take an existing network and readjust weights on-the-fly.\n",
+ "\n",
+ "Finally we are ready to run the simulation. Note that because this is a 400 cell simulation, this may be computationally intensive for some older computers and may take anywhere between a few minutes to half-an-hour to complete."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "2018-09-20 17:00:27,407 [INFO] Created log file\n",
+ "2018-09-20 17:00:27,604 [INFO] Building cells.\n",
+ "2018-09-20 17:00:32,418 [INFO] Building recurrent connections\n",
+ "2018-09-20 17:01:06,690 [INFO] Build virtual cell stimulations for LGN_spikes\n",
+ "2018-09-20 17:01:11,379 [INFO] Running simulation for 3000.000 ms with the time step 0.100 ms\n",
+ "2018-09-20 17:01:11,380 [INFO] Starting timestep: 0 at t_sim: 0.000 ms\n",
+ "2018-09-20 17:01:11,381 [INFO] Block save every 5000 steps\n",
+ "2018-09-20 17:01:41,576 [INFO] step:5000 t_sim:500.00 ms\n",
+ "2018-09-20 17:02:11,893 [INFO] step:10000 t_sim:1000.00 ms\n",
+ "2018-09-20 17:02:42,356 [INFO] step:15000 t_sim:1500.00 ms\n",
+ "2018-09-20 17:03:13,545 [INFO] step:20000 t_sim:2000.00 ms\n",
+ "2018-09-20 17:03:44,476 [INFO] step:25000 t_sim:2500.00 ms\n",
+ "2018-09-20 17:04:15,250 [INFO] step:30000 t_sim:3000.00 ms\n",
+ "2018-09-20 17:04:20,128 [INFO] Simulation completed in 3.0 minutes, 8.749 seconds \n"
+ ]
+ }
+ ],
+ "source": [
+ "from bmtk.simulator import bionet\n",
+ "\n",
+ "\n",
+ "conf = bionet.Config.from_json('simulation_config.json')\n",
+ "conf.build_env()\n",
+ "net = bionet.BioNetwork.from_config(conf)\n",
+ "sim = bionet.BioSimulator.from_config(conf, network=net)\n",
+ "sim.run()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 4. Analyzing results\n",
+ "\n",
+ "Results of the simulation, as specified in the config, are saved into the output directory. Using the analyzer functions, we can do things like plot the raster plot"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAELCAYAAADOeWEXAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXucVNWVL/493V1dXf2gm+JRSAMWKsWzwQhKO0YjrUYT\n7SYTzQxj62hyE3V+MGkjk1FjbtBJHHUMiSaYXL3JMMSYODHJqJiJQSLGONcH4AMEpJungCKPlqaB\n6qYf6/fHqlV7nd3nVJ2qBkXT6/M5n6o6dc5+rL32eu2113aICAMwAAMwAAMwAEGh4MNuwAAMwAAM\nwAB8tGBAcAzAAAzAAAxATjAgOAZgAAZgAAYgJxgQHAMwAAMwAAOQEwwIjgEYgAEYgAHICQYExwAM\nwAAMwADkBB+I4HAcp9BxnNccx3kq9TvqOM4zjuO0pD4Hq2dvdRxnk+M4Gx3HufiDaN8ADMAADMAA\nBIcPyuJoArBB/b4FwB+JaByAP6Z+w3GcSQDmAJgM4BIAP3Icp/ADauMADMAADMAABIDjLjgcxxkF\n4FIAP1G3ZwNYkvq+BMDn1P1HiaiTiLYC2ATgrOPdxgEYgAEYgAEIDh+ExXEfgH8G0KvuxYjo3dT3\n3QBiqe/VAHao53am7g3AAAzAAAzACQJFx7Nwx3EuA7CHiFY7jnO+1zNERI7j5JT3xHGc6wBcBwBl\nZWXTJ0yY0O+2ftSgE53Yg/cwHDGEEXbdq0IVDuCA6z8btmErWtGKKKKIY2zWsj9IyKUffpCpf5nq\nlLqONQ78yjuW9QQp61jWtwNvYy/2YhiGYTTGBKoj039+5R3rducKx5o25P0e9KAVren+Zur/sWq7\nhtWrV+8jomF5FUxEx+0CcBfYatgGtiyOAPg5gI0ATko9cxKAjanvtwK4Vb3/BwBnZ6pj+vTp9JcI\nTTSXSgg0nWqohZr73CshUBPN9X3/WmqkEgJdS43pey3UTNdSI9VQIv1fE81Nl3+soIWa0+VKnddS\nY1798AO7f3addr+kTqnL/t1f8CvvWNQj/ZE+Zyor3/q88LeclnnSR6Y6Mv3nNS523z4ImiQiWk7L\naDrVpPsobW6h5n7RJZHBge6L1zw4FpAJ3wBWUb68Pd8Xc64IOB/AU6nv9wK4JfX9FgD/lvo+GcAb\nAMIAxgLYAqAwU7l/qYLDi4CF+JfQ4jTR2+8IoeqJISBEJkw7CCPyKjvTPV1PE8111Wn3w48xBXnG\nrlsLI69+2c/7lZ9NAOWCn0z3cwEvRpRrO7z+07+9xsyPLjKVoxmkxq2mR417UWIup3pqorl0OdX3\nUXiC9C3b/0ITl1MDNdHcdL2imNl40ApbJlx79dWLhjRNBqWJbDS1nJZlFEYfRcExBBxN1QJgOYCo\neu42AJtTVslnspX7lyo4iIIxZg1aa8tkcQih5cLUvOr0a0c2iyMI5GqVaEGbi9aayRI51lZJvnAs\nhA9R5r7mKzAzlavHTr5Pp5o+/2s69aJbv3py+V/KHU9xKiHQJTQrq/IVtN+Z6NRL6Aelq2xWbLa5\n8ZEQHMfrGhAcwbVYL8ExhRJZNTQ/CyZbnZk0z1z6k+nZbFqV/WxQd0uQPgTV3o8nHrJBf6yiY2FR\nZSo3iMVxLTXS5dTgUmbs8c53LL3+E4tGLBwpMwideZXpZXF41WvTplddQccnKD76Izgcfv+jCzNm\nzKBVq1a57nV1dWHnzp3o6Oj4kFr1wUArWnEI7ShHBaKIZn2+C11ox0FUllRhz6jduDr0t2hFKy5F\nA7ZhK+7BQlyAi9LP34h5eBAPIIooWtGKyajBKqzJuZ2b0IJFuB9tOIBH8QjmoBGVqMI8NOE0jOtT\n3/WYi/uwKHD58t5k1OBR/MZVpl+5+p11WOtbp7R9HpoAIP3drsOvTddjLgC46s/Wz3zx4NVuwXl/\nyspWxzw0YRHu73eb/eqYg8tdY2Tj54u4Kk1Xi/Hzften8TYOCbSgOf0JwJd+86lHypA+Sdle45aJ\nLuzyNPwRz+BmzO8zvx3HWU1EM/LqQL4S50S5vCyOLVu20N69e6m3t9dT0n5coI3aaB29SXtpL22h\nzbSFNlOSkn2eS1KSttN2SlKSent7ae/evbR0yxNpc3aK8udq8LI48tFyxHSeQgmXtZPNhRVUu9IL\n+l5meSZryKu9WuO7nBoyuvf8QNdpryfZv+22BFnbyab1erlAgrY5m6YqoPGRjxXltc7m1w+9pmCX\nlcu4+LXJj97EAklQnC6nehcd2OscuVhstptJcKHXcPysZK/yMwWDaDegBgy4qtywfv36j73QICLa\nTttpFa2kdfQmraKV6e9aeCQpmf5/O20nIqLe3l5avX51etIuocU0kqK0hBZnrTMfv6omXttfHrQe\nmxHKhNK+6RpK9GHG+bh7tG96ioowy8SggjAJabu9EKondjbhFzS6KFc3pm7nSIoG8rELk7uE6gKP\np263CPvxFPd1AwURLrm4yfR9jW8/QWs/53dP91Hu2QvumVyloqCIi0zqz7Te4eV6lvc1jfnhsD+C\n47ju4/gwwXGcD7sJxx2GYzgAoApV2I99OIIjSCKJPdiDMak48D3YgySSiCCSft5xHCSRxDqsxVI8\nAYDdXq9iFf4e16bL9zJ/xWUjnwLyux6zsRRPYB6aXO/fg4W4GfNRj9k4DeNwHxZhE1pwI+a53vGr\nRz7bcAAP4gG8gOexDmsxB41pdxMALMUTuAAXpV0nbTiQbuNtWBDItTAPTen3rsI16bYBSLsobByJ\na+EFPI97sBA/xxK0ox0VqMBVuMbV9jloxPWYi3rMTrthJqMG92Chqy4N0h95dyfexqN4BG04kJN7\nRsoB0MfdMQ9NabxORo1nO3SfBXZiJ57Ds55l6rI1DgAggfFoQTMIwKN4BADjV8rYhBbcjPlpOhU3\ni02XQk+59HcR7vfsp7TvWTyD6TgTt2EBHsVvcCfuQBsOYBNaAAA1mIoaTHW96zc3NuIt/A5PpnGr\nn/kxFuF3eBInI54Rb35jIXAbFqRdXJpO5L1P4jwA7O7qj4stDflKnBPl8rM4/hJBu6Qy3SMielVZ\nHH4aSSZtJwhoLS6T1pZLXHzQxUTb1XA8op90H7S7TD79woxtbTJoaKeXe0ZrqPoZv7EL4lLKtjDr\nZ/3luoiuXaFTKEGXU4OrDOljjRXAkQtd5mJ52W7P/kbTZXJBavetn6WQT9+87mk86rIx4Kpyw1+q\n4LDBT2gQET2//nnX5A/KZHJxC9julZrUGoeeOOIiC8J48nE/2cwnH1+8333N+JfTsjQTuIRm0RRK\n0CVUFyjaK4hP3O9dzWj0OB4LV52fGyboWkhQ+vFrt59wzHejXKax1QLjcqp3CTK7zv7gVgvL6VTT\nh/b7QxN+YFyKs1xlDQgOC04EwVFWVtbn3oIFC+jee+8lIqJrrrmG4vE4TZs2jaZNm0b3339/xvIy\nCQE/kDUQWdvQIIJjSmpdwEtr9QI/X7D+z2/R0M+3n8mn319N02Zu2ZirX/+86vbaXaw3GGpBGQSC\n9M/W2HNlaEEYUz4MP2h/vO75WUrSP73O1J9d237tlfGyx89rTSoTBMG/phM/ZU2PaX+tft0/e22u\nP4LjY7vG8VGAe++9F1dccUWgZ/dgD/ZiDwCk1y+ygaxpyKeGcpQhiig2oRk/xxJUogoAcCfu8PSd\ni1+5HrMBuH3V4jvWPvI7cYcrZFHWNORdAHgUv3GFumrw8k0H8fd6rTlIiO4mtKANBzAHja4Q0jYc\nSLfVyxd/Hxa56tZ1iP/dXhdZizVYh7XpEM4g4Zteddjv6PGpRBUexSOYjBrfMrPhVcrbiR0YhdGu\n8dLgdS/beHj97/dOGw7gTtyRXg+SdlSiCptS4bCrsDL9XdYbcvHV12M2XsDzaRoWaMdBAMDJiONR\n/CZ9fzVWogXNfdYL/EDjVujLHj+hk3YcdK2R2HQLuNfTstVtg6YfWf+w6apfkK/EOVGuY2JxdHcT\n3Xkn0WWX8WdPT27ve0AQi+Oxxx4LVNYf/vAHOqv2LJr8icn011f8NbW3t9OBAwcokUjQW2+9RURE\nc+bMoYceeoiIiJ74/RM0+ROTqWZqDdXV1XlaK2JxiFYsWo2fdmJrPn4uA9HCL6G6QOX4QVCT3dbA\n9ZqDrdV59UH3WVtKfiGzXj5+e91BcCL4yaaxellH2bRjqcMrMiioxaLLi1Gly+3WX/dItrq98OlF\nL15jlMs6gIYgOLWfDbL+ZLdVaEe/72dV2fVpmgnquvUaryDzGAOuKjfkLDjuvJNRIdedd+b2vgfk\n6qpas2aNZzl79+6lc889lw4dOkRERHfffTfdcccdRES0bNkyqq2tpV/+8pd08cUXExHRnj176KRR\nJ9ETWx6n7bSd9u/f7+my0mscmdwfAkFdFXJPuxc05Ouz9Zv0+r492YP0S9qUScjY//sxQL/JGtT9\nFyRkOZNAzQe3um8ShtvfJH5+4OXOk/ZqevHqUy7u1Ex9DYLTTM/m0k8d1uznItX9y3XtJpPbKwgt\nDggOC3IWHJdd5hYcl12W2/secKwsjqVLl9KQIUPSAmbixIn0pS99Kf3/V77yFYpGo/TSjpcoSUn6\n9ZO/poYrG1ybAb0sDh1VZUO+VgGR0dSDLnjrsjL564MItEyTPZsG6Vff5VSfXjSNUSVdTvWelkEu\nmmKmDYD5Lvx69SWo5SDP+y3Y5lqvH2PWmrjgVfAp/dbPeFlRQcc7FyER9N2g/0l/LqFZ6b1Ffnul\ngio+mdpp0022uSQwIDgs+KhYHEEEx5NPPklz5szx/K+np4c++clP0kmjRtCja35JW2gz/fTJn9Il\nV17suSCuQVscNhwLbStfN0Imt0uuZesJZDOkXNtUSeH0dx3WaLtSvBicDX47eb0sn1xBl5Fvf4P0\nIcj79n0tXO0FaXvcR1LUM1OBF6P16l8ubqmg7+byn03PmcZcC8JcXY9SRqb3/MoZEBwW5Cw4enpO\n2DWOPXv20OjRo6mlpYWSlKQNhzbQmo3s1vrud79LX/nKV+jR5x+lidMn0sajb9Hbe96mk0adRBu2\nbCAiov3793uWe7wER3/cCKKFLqHFgd1lmcCevLm6eSStt2RMlXUAe3evV9syTfps+2Zy8av79Tmf\ntYpc+uD3vt+46ego7doToe7HPDO1MdN4+uE4iODoj8Wh+6RxEWQHfLb++UE2S+UjZ3EAKAHwCviM\njXUA7kjdvx3ALgCvp67PqnduBZ81vhHAxdnqOFHDcR3Hoerq6vS1cOHCvBfH//jHP9KMGTNoQs0E\nOq3mNPrJEz+ht956iyZMmEAHDx6kNmqjv//a39M/feufaDttp8f/+3E6/fTTaerUqXThhRd6lvnG\n+jd8iSsTw8jmBumP0LE3Kmnmlw8TFFeTn+vHz01gpzO5nOpzduHk6hLxY7q5Qn/wfyzK8qIdLQzE\nkshUfr59sMNovWjYq+x83Fd+7+RrcQeBY42vE1lwOADKU99DAF4GUJsSHP/k8fwkuA9y2ow8DnLK\nJDjy2Q9xooBf2+2cVTpfld87Gke2lpdJO9IMNttitS4/yKSzGbWux+/QKr/JEmQC2xqqvTicaxx/\npjoyJX8MGikURBs9loIjE+RSv3Y/ZXKhBFkfy9Q/O+IsqDDwiqrLZn1lEky5RkUFtS7yFUofOYvD\nVRFQCuBVADMzCI5jcnRsJsGRaVPcRw1EKLRRW/rTTmjo11+xOGTRTmuDx9riyGXS2RPJz+LINons\nCZyt3ZlcPLm4Luw+ZFpvaCGT6M/L/WVDEEFzPDXefOvxG0sbz3Z0l5eSEIQ2gwpNKcsrdNt2aQVV\nfoLixlbW+jPnssHx2AD4QQiMwpQ76hCAe1L3bgewHcAaAP8OYHDq/iIAV6l3fwrgikzlf5wsjrPO\nOisdPZUtTJfIWyjY/fPrr6xxiN9eLh1J5OePzTRpvKJHsgmBfCDIRPZzRWUSWrmmNDdZTRv6MAPt\nevMrN4jPXdef7RTDXBicX/+8xqk/5drl2y6l5bSMLqd6GkMxGkYV9G90l6t+bRH0h3ZsZUJbOEz3\nDen1iRqfcPIg/VtCi6mGEnQJzcrqJvUKAshm7fjVq8dG9/MjKTjSFQFVAFYAmAIglhIoBQDuBPDv\nlIPgAHAdgFUAVo0ZM6YPIk+ENY4PArIJwSC5qi6hWemIoZEUdUW7+E2aTISsrZdc3gsKfhPMq+xM\nFkyuLi5bKAjIgu8USriEU6YJG6Q/Xv97/fZ7Llt/MvXdTtZoM/t8QWv4tkDQyottmXpZBP2p30+Z\n8PrMReBm64/9jt++HXvMsglpP4veLyhE4CMhOLid+JbtogIQB/Bm6vtxd1X9JYG2SGwhohfHtZXQ\nQs2em/eC+mNtiyNXP2428LMislkKtt85XwZsMxOvI0/l/0z/Zeubfc/PwvBrXz7uDXnHPj7VFoL5\njmMmjV8i18ZT3FNz7s+xsH71+1lYQY5K1mCPkTny1j8wg8g715nN6IP2K5PF4QcnrOAAMAxAVep7\nBMCfAVwG4CT1zNcAPJr6PtlaHN9yrBfH/5JACwvbrbV+/fo0gekoJj9izddasCdEfyOH7EkVRBO1\nNbB8+hNE0GRyMQSpL5PFkW3x3MZFf6w7LwarfwcVZl7ghxO/TZG2WytXS9Grb9nanKt1lasg9bJQ\nsq299cfS8mvriSw4pgJ4LbWW8SaAb6XuPwxgber+k5YguS0VTbURwGey1TEgOIKBl8WhTWq9IBnE\nZRIUvEzwTC6lXCGIMMrF4sgV9MTOtKCar9DMxXLI1cIJ0i8vN5E55rQhqzvErz+2oLUtpkxuLT+r\nMkj9QRhxf/d6ZINcXHC51JOrVXvCCo4P4jpRBcexTKv+mc98ht5///2M9X3qU5+ilStXpn9nW/94\nYf0LaT+27QrIRcvxMpP9fvsx8EyhttkmQ1Cr43iBbp928Xg942U1ZOufCVFuCIyHfCwBv355MWyh\nDwmb1q6WoHV70UmNT74qu3/9Gef+HkV7LNriRd/HImDEb976zacBwWHBR0VwBN0AGARswZEt7Nje\nAKghFy3HJtZsv/3e92JMQVwuXu9/WKAFh5c2bTPVIAxIGLfede0FGg/92XcikImheQnCoC41u71a\nWfBLjGnXezyYaz5lHEuaOxbBB3748evzgOCw4OMmOE4++WTau3cvbd26lSZMmEBf/vKXadKkSXTR\nRRfRkSNHiIjo3E+dSzf88w00/czpNG7cOHrm+WcyWhyCoyCTMZMFkOvvIGVrZpVNQzzWpnx/QFLJ\nX0J1LuaSzX/tl5dJtzmXxWFt2eUbAh2Ewfq1N4hLzn5Xhy7naq3kAn7v5nI/aPqQIG3IdgxBLn3I\n9fkBwWFBroKjm7rpHrqTPk+X0T10J/XQB5OrKkhadSK34CgsLKTXXnuNiIi+8IUv0MMPP0xERLWf\nqqXGm66k7bSdfve739EFF1yQsX2CIy8GYRNaLhZAvuCnffe3Pi/N/3i5tHRobhBrwn7mWLZPCy79\nGbTsTMwpG+MKQlM26KwB2RSP4zGOfhq/l2XUX5eZLlPKmpJyGQdZA8un/1547I/gGDgBEMBC3IMF\nuA0A8N94CgDwz/jGca83lxMABcaOHYvTTz8dADB9+nRs27YNAFCEIlzy+UtQhSrXfYEOdGAP9mA4\nhqMEJen7XieM2acAyqllbTjgOqENgOdJdX6n12nQJwouxRPp0/SiiLpOaLPbF6RsDfapbHZfjwVI\nm76OW/EDfA/3YCEAoAZTXae82XAaxqVPQazHbPwcS9KnE/YXpAzBr3wGKTsbjr1OZ/SqW9eV7R05\npW4n3k6fyrgYP/c8kVLTY5BTAIPQjJwCKJ8C9qmBi3C/J53mAho/9ZiNr2EeEhgPAHgUj6ASVZ44\nkr7oUyz9wD7pbw4uxzqsBeCN/1yhoN8lfAzgZbyY8feJBOFwOP29sLAQ3d3dAIAudIHChP3Y57ov\nIEfP7kkdPytwGsalj7mUo11tkCM8H8UjWIon0oR3I+bhTtyBB/FA+v0v4ip8Hpel7/mBMJKbMT/N\nUCajBq1oxVI8kfW9TGVrmIemQMd++sEmtOBGzPPFDYA0DlZgOVZhDS7ARViE+9NMIIiA+zEWpY8M\nBZC1zmxtFqZxAS7CfViU/sxF2PrhOBtO5ZhZ+8jU/oyDXb7QYxA60P0JMp4afo4lWIe1+Brm4Y94\nBm04gHFI+NKpLt+vLo2fC3AR6nARfoelAOCJI12O0NVarMF2bMONqXbZ9eg+i7CbjJr08bE3Yh4Q\nQRh5woDFAWAmzk5bGvL74waZzh+3tUF9RrGArUXKO/o8ZiFqAGki9QNbI9blZtJUc7Ua9FnZN2Je\nRq3XC7Jpyn4QtJ1S/mlI9LvOY/V+trZ7nT+eDbK940VPADxpMUgb/Z71w00FBrk+bWhBM27GfKzD\nWsxBI+pwkWfdunwAgcZBty+bhTcPTXgBz2Md1qbbI791PX7nvZ+Gcel54MQQ821UNsjXx3WiXMdi\njaOHeo75GsexTKuu1zgmT56cvn/vvffSggULiIgXx5euXEpJStLevXtpzMljAi2OE/kvfufq5w66\nMBoU7PLyXRT1W2AOsnCfz0Kv332v3147o/NZ8LffP5bj4NfHYxlsELSsINFex3Ks7NMJs9FR0PmT\nqU77eb/NkUFPbPSrBxGspYHFcQMnQlTV8YZsyQy90o3oLLovrH/Bk9D8FsJzmRB+EISh6bLtBUtp\nj1dYa6aIIr/NY379E8iWpDDbxLcXMe36/PZd5MJwdBszhUHnG4lj41AvyB6LNCRSl07RkSkSTPrl\nlYTwWOz4lrbY+NTPy8bHXOjSC+w9On6hzH6L4UEj3/wSNmIgqsoNfwmCw96nYQuKLbSZttBmV6p1\n/fn0+t97EpyfgMg3ssqrjCD7EfREsndi2xPMq206kkgzAr/8W1590gkf7VTfdr1evzMJFh31lKk/\nmXB0LZlT9YTp+llnuUbi2ALJa2OfnQ4k3/BdOylgpmSDUsYlNKvPZstcBUe2MfdTNOz9Jkag1HsK\nFD+c2Ht0/DZPZrOSMoVa6/kwIDg+poIjl7TqmSwOfbDTFtqc/h7E4vCDfC0OW8vO5nrychHk4gLy\nc/9kYypeZYoWbOfyCtKO/uAzqMVhW1KZGFWu4+cXHutXpt/ehmzCWVsQXhaHX+ir1y79fK0qPysz\nF0bdQn03MAYZR21p5Tsfs4273zwaEBwWfFQFx7GCJCXT1sUmak4LDQ39xVHQSZrpOS+C97Mq+gu5\nMhVb4PW3r/2BbBpnroLLz7USpM5sZWZyp2RSFIK45TQci53WfvUFVVg0aAsyl/HI1RL0ei9fuhsQ\nHBYMCA7jqhKLw0498ur61b67X/MleD9Gls9pgbma7V7/+1kiQSZ2rtaD7Q47lgLEzwXmtUirra1s\nDNB2reQKQZIqBh2vTO31eiffxf/+MPOgFp2Al3Czn8s3MWUuOPBbXB9YHLfg4yg4cjm5MNM5HALL\n1z+TNsltyNdfbb+ntbB81kS8INvk9dqJfS01uvzodvuytStom/zWLIJCUObrh1fdDlmTCeJjz1dw\n6DWIfMHuS5D25KulZ3o3iCsvV83eC79+c0TTa9B+BX3eHid5z4ljD52IggNACYBXwGdsrANwR+p+\nFMAzAFpSn4PVO7cC2AROq35xtjr+UgRHkGNi/e57Pffc+hVUQnwCoA1BXQrZXCX6oJ58NfBc3DG2\nf9p2e0lah6AWh2h02QSBrf0FYTCZBG/QtRQ/i0Of4JdtPak/YbtB1jUy3dNtuJzq04c5ZRMc+eI3\nU5v7I4z86sx1zmSz0O0yguQfa6HmPkdBn/AWBwAHQHnqewjAywBqAfwbgFtS92+BOYt8EtwHOW3G\nR/Qgp4KCApo2bRpNnjyZrrjiCjp8+DCdf/759PTTT7ue+/73v0833HBD1vK8mH+2DLj2c+vozfT7\ncnRs0FBKr4mVbbLZ4Yb5MKdsLjHN/LTrJZubKmi9x8P1lKlfQfN15epmC+p+8cNVLgI8KK0Yl1lD\nSomp83SpBBXG2drg118/IZzrmOfiysoVf7n8r/uU6Rjcj8QaB4BSAK8CmJmyJk5K3T8JwEYy1sbH\n4uhYneTwyiuvpIULF9KDDz5I1157reu5mTNn0p/+9KfA5WoBor9L2K29CC7vyGK5CBmdVl0Tot9k\n9WIo2SaXHW5oT9Zs2qAfE/My720Gn8kl4AVeVkOu/bXLy0drDKqtS3+CCjU/X7pdtsZnJsafC5OU\nezY+pQydHDIfN44XLjNZOF79tWkzH3djpqy5/cVftv561SVCwyuUnOgEFxwACgG8DuCQsiwOqP8d\n+Q1gEYCr1H8/BXCFR5nXAVgFYNWYMWP6IHb9G28Qbd9O1NZGtHkzX8mUpp5M8n9J5d7xuhfkPz9I\nJqmstDT9zo9/8AP6h6uuov27dtGwYcOos7OTKJmkrc8+S6NHjKDeAwf865H7e/cSvfkmbT+0ngVA\nR4vrsXXdb7BV0fU6UXMz0RtvEL31VrrvyY422t76OiXfXE20fj2tf+45olmziBIJanlhMTW930gt\ntzdS03pO79y0vYGatjekvxMRNb2fmkxPxYkaG4mWLSOaO5c/Gxv73Gu5qZ6anorT8m/OpKan4tRy\nUz3R4sXU9HAll/OHhOv56VsrmOAPxomam6npDwlTv9Qzdy6397kaatm6jFpeWEzXPllJ1649nVou\nihPV1RHV13Pd6+uo5dIE0eLF1HLdLGp6uJJaXljM+FHl0bJl6brS7aqrI0okiO66i6imhtv9XI35\nv6GBqL6er8ZGosWL+blly7g86eP21HO6rGWGsbhwqstrTjED3VbBcXMz4+AXUbp2V2q83m/0fK5P\nPevrTNs8xrHlpnq69tlquvbZah4vG+cvLGb8nl9NTb+KMc6lPzZeFy/mftfXu3As495y3SxqWlLB\n9PF+Iy3fsZjp5KK4GbMlFdRydS1//0OCWn7tj8Pp21P9eq7GlyZdOKqvp5ara6lpcRm1nF/tGuNr\nn6ykpu0NzJTtfim8UnMzl9XQYPonY9Fs3m359V3U9Iso44+IWrYyPlqum2XaWF9PFE/RsNzTfbC+\nu/ApdLNprjPUAAAgAElEQVRsWXoeL7+mmuff0/OZ9q+bxe1ZtoyopoaqUgp7PtcHaXFUAVgBYIoW\nHKn/3qccBIe+pk+fbgY2NaDrX3iBaOVKotde48+VK43weOMN/v366+be9u18b+1at5Ah8v8vmeTf\nzc3u+3v3Eq1aRWWRCNH27dTV1UUNF15IP7r5ZqLNm+nSSy+lxx9/nGjzZrpr7lya39hI9Oabpp7t\n293lr13L91etIlq5kpJrV9L291ZSct1q7sv69USvv05tG1fSusMrqW3jStNnuV5/Pf2+XOt//3se\nfoAoGmWiA6hlUoiafghqOb+aWj5Rzt8nFhHV1VHLJ2P8+7TUe5WV/FlRYcoqLnZ/2lc4TC2nwZQT\nj6fLWn4BaPoboOUNEaJ43Dw3odBdZlERf9bWmrrlnr6kfeGwu31SZ0EBf6bquvZnoGt/W2b6p6/C\nQm7PA473/1JXIkFUWWnannD64iYWIyorI4pEqKV2iBuncsXjXFZDQ1981tSk+9AyKcQM7qZ6U788\nF2E80uLFzOh/CGqZXOzGvy5b40kuwa+UK7jTV3U1349E+HdZGX+GQq6+u+qtru6LP122fldfMmbV\n1WmB3nJRnJp+CFp+AajppxEuX9Ok1CXti8W8y5Zx+1GBoc36ejO2mt4bGtIMWN5vOQ3U9GAxtVxd\na+qVOoU+w2EjgHTdetzsdut+qzFgeixw047gXs8HaQPA7Y5GiQCaAhzJl587KQb9gYDjON8CcATA\nVwCcT0TvOo5zEoDniGi84zi3AgAR3ZV6/g8Abici33S1M2bMoFUTJgCPPAI0NgILFmDD7t2YWFkJ\nHD0KOA6jrKQEKC0FWlvdBUSj6ImNxD13tOHFtWGcPfEQbrn2AAoSp/E7HR3A5s1AMsnPRyLA6NHA\njh3mHgAMHw6MGQO8/jrQ3Y3CmTNRM3EiQIRzP/EJLJw7F8VDh+KRV17BU088gV9+85s4/Yor8NMF\nCzC9vp7L2LGDyx40CHj7bWDPHkEc90GgoADo7e3HSAAb9u3DxKuuAg4eBLq6gOpq/t7ezg8MGQLs\n3+9fQJA22O22f9v1RCJunGaDwkKgp4e/FxfzeEcijL/33uOypX+Fhdxeu37H4XFOJvkZwJSZS9+m\nTQO2beOy3nuv7/PFxcCoUcCWLdlxp8uurgb27uW+AUAoZMZr1y6+F4/z57Zt3OfWVnfb5J1MIG3y\nGiMAmDWL69u5EzhyxP1fruMGuMdOQzgMdHa67xUVAVa25/Rzdt1ez9r3vJ4BmD8MHmzwqsfJa8zK\nyoDDh/uW41e+4LayEnjsMeCHPwRefZX/6+jgeaDr8cNraWnfMQgK5eXAoUNAcTFiR49ue49obD7F\nHNe06o7jDHMcpyr1PQLgIgBvAXgSSB3swJ+Sn/hJAHMcxwk7jjMWwDhwVFYwOHgQuPxy92ASMZF2\ndDChFhRI49KP3PN/SnDb/43hqZeqcNviUbj7P6qAd95h5i3MqKqKJ38yyfc1oykuZkLp6ABinHAy\nEg7j9f/8T7y+ZAl+eMstKA6FgMJCzJ49G3989lm8umYNjnR2YvrZZzOjO3CAyzxwgMscPpzLlT64\nERsYJRmhosIwlF27jNAAgLY2//cch4k7FPJ/prgYOOss/i4492JIup5B3plJfUEYTyTCkxVgHEYi\n/H3/ftO/nh5Tf1mZKYPITM6eHneZgMF1VZUZb6++tLXxJUKjQE0tEWryn4yrH+iyW1uN0AC4P5WV\nQCLhfkfOX9m/v2/bMgkNSdNfU8PlyruaxioqgK1bgeZmZlj2uBO5carfs0Ge00KjxJwPk8axvCtz\ny4/mNW4AVt40fh2H35dyHQf49re5rxpCIe6bViy1oOjtZaarQfiMPZ5aaOh2C020tQHXXQesXcvz\nbtcuozypYxP6KF0CR45kp6GiIlZmNB2GQiw0AODoUZQCHoMWDI73eRwnAVjhOM4aACsBPENETwG4\nG8BFjuO0ALgw9RtEtA7ArwCsB/A0gLlElF39u+YaJnyAByMUcg9AYSEQjRqts6iIByUSAUaOxIsr\n3cW9uGkwf9mzh62A1lYeABmEnh4m9tGjmcFHIvzMO++4J2ksZiyRaBTo6UH5nj2Y9clP4kvf+Q7+\n7rLLuKwtW5gxDR/OF8DlC/PS4DjuSVdQwJOswGcoBw3id7wmXlcX0NCQFnZpXIVC3lqbgBBtV5eZ\nTLGYmeyhEHDGGawZRSJmAupnBGxN0K4rEmHturaWP70YVEGBm6GJRl5dzZrytGmm/4mEoRWNs7Iy\nriMe5095Xj4PHzYTuaKiL/MsKeF3pd7eXsOs5LOjg5/58Y/ZOq6tNW2orATq6kz/5B0vjbOtDRg5\nksuor+f+DBnC/+l2FVmnJhQUmLGLRBgXEybw7507DV0VFJi+FhYyDemDwcJhN810dACf+xy3R2i2\nrAz4wQ/cDDoUMkJC476jw3yvqeF2TZrEZRw9atqjadhx+Lkbb3T3+d13+R3Bny1Ey8pYuXzsMYOL\nsjJDo7Yg0m0/9VT3PelrUZF7rnopGKEQ41fmy7ZtBqfV1QafWliJIhoKAVOn8r14nOfs4MHe7RTo\n7ma+0tvLbSsrc8/XRALvAR6mcTA4roKDiNYQ0SeIaCoRTSGif0nd309EFxDROCK6kIha1Tt3EtGp\nRDSeiH4fqKInnmCBMWgQMHcuMGyYIcyCAiaGZBIYOpQZ86hRxuVUUoKzz3QXd/bpXeZZEQ49PTyQ\nhYU8AB0dzKyGD3eb1lVVZsIeOcJCY9AgvtfWBrS24u9mzcIbGzfi7y6+OH0PBw7wszKxOjrcTEP6\nU1RkniksNExM3Aw2HDxovKY27NrFmqRmMD093D9b+AqEQqa/FRXAmSnkjR1r2tjVBbz0ErBihelD\nQQHX097uzdxCIaNFd3ebZ2Ixbufhw+bTBsHHGWcw4xEtbtcuZrA7dnD/w2Fg0SJg925+Xvfv8GEe\nh4ceMpo1YCZyV5f53t7eV4vfvJkZwa5dxmqU54m4/z09wPvvAz/7GfA//wOsW8fPFBQA990HnHSS\n6Z/txpGxBpjhXXghsGAB17l0qdGUicxzo0a5BXVvL8+FcJiFVnMztxtgjVe0Uc28enq4T2VlzOAq\nKvi5w4fNGIVCwJQpwJo1xmp0HGD5cqbvRIIZVldXZvcnwAKsuRl4+WWDC7FuNQ13dHA/du/mcouK\nuI3yTDhs2ldUxP0oKuK2/9M/MQMeNsyUJfjWeC8v5z4DXMemTe62XnghK4RHjriFn5cLjogZ/qxZ\nff8LhXj+AG5eUlHB49rVxePc0MBjv2ePtztUC+OKCuNBKCgwuDzzTBbwZ57Z9/0c4ONxAmBTEwuM\nBQuYMWjGFInwpEsmgX37mDkfOeJyC93yNeDOW7pw2XlJ3Dm3FbdctomZzfDhPBHGjDFaRHk5E0s0\nyv/v2cNEk7JecOAA0N2NQytXGuuho4OZYWUlEI3ic5/9LGjlSkyIx/leVZVxdQns2WO0H3ENFRUZ\nhiUMoa2NJ4NoZaGQ2x2ntR8BwU9lJQtc7S5KJPh+V5chYu2zlfUCgHEjGszq1VyO1C3tq6riz95e\n4zuORvlTa1pdXSzERGuXfo4dy8LgyivNf6JdC4jgOO004De/MVpucTELztZW4xN/4gljcUg7RHg1\nN7M22traV1u3QfopuDjjDLPWIJNUBEZHh7FA2tpYoG7bxhNbfNr/9V8sTABvBWDECH4+HGa6mD8f\nuOMOHr9o1NTV3c3PVVRwHaWl7nKKihgPzc3cXm3B2VZrWZn5XwT3+edzfYcO8RiFw/x5993cloMH\nGe+HDgHPPcftGDzYlCPCWoST3dddu5gmNP4rKsxc0v1Yu5YtjMpK7vfhw1xPcTEwbpyhISlLxurl\nl4H77zf0qBm90G1lJffh/PON8NAutaIi4POfBxYu5D5lWyvu7gbeeouft91k27bx/AG4/bNm8TOi\noIgFv3o1j9tLL3nXoV29uk+6bSNH8px85BHEkP9BToW33357vu+eEPDQQw/dft2ttwKf/Wyaoezb\ntw/DQiEWDuLjBBiBgwYZIh4+HOjuhvPuOzj3glJc+QUH5562Fw7IMHEZZDFFR45kTWXwYCYex2FB\nNHo0lysTI2XNAGAX1r59XJZM1oMHeQKXlTFh7Nvnrk8YXVeXWdwfOZKJ+ehRowGL60qIQ6yLoiKj\nqYVCPBmTSb5XXIx9PT0Y9vnP82L+zp0sMC65BPjGN1ibEZMf4LrFzD161NTV3s5abXk5Py/119QA\nP/2pWaTev5/79elPs6ZDxHVOnszMXtxAe/aw4D3pJH5HXCnLl/N/zc1cZzTqXo85/3zgoouAm29m\nhjFiBPD006b9l1wCfPWrbLp/9avA3/4tC4fiYrYAtGUn7wwbZgSAWHQC1dXA9Ok84c8/H7j0UuCe\ne7j8tWv5/0GD+Jo4kXFgL8xHIkYIAKw5ay0yHOZxFbz9679yedddB/z5z9x+EVTjxgHnnAN897vG\n2ikoYBxVVfF3UQK01XTgAPdRaIXIjHE8zvWuXs1t6Ozkz+9+F7jiCma+06Zx/848k+vYtYvnWk8P\n4/bAAS5LW4qjR5s2VlaaeTZzJo+xKEjJJP9/8slMW0Q8JpMnc9tlzEIhHovmZv6dTBq8iXXT3c00\n2drK3zs7eZ2jt5f7OX48t/PAAeDii7k/I0bw5zXXMP3t32/oTly5GzYAL7zA7daK21ln8XOC80iE\nn/+rvwLmzeOyn3sOOP10FghHjxqLsbqaFdU1a8xYnHQS/+7u9l50FxgxgvmJCJxEgnni7t1mfi9Y\nwNZmby++sXLl1ttuv/3H/gX6w8f36NiRI412JT7Pjg5m4gJHj/aNjmptZQIZNMit5ZSU8IDaoBe1\nxYwcPtytnUg5ev0iHOb2iFYkFgzA9/fsMWsyAE+G3bvNQl9PDwuN0lJjERw5YoTGkCHGaikuZsIf\nNYrLENN8/nxmDtu2sSbc3g5861us0UycyKZ5VxeXW1HBQuXBB/meMJ2dO4GnnmLt9913gY0buZzV\nq4FnnmEBW13NE+qv/gpYvJgtCwB45RXjquntNQJ98GA2y4lYwDY28gSeN48ZhFhIsRj35fOfZ8Y5\nfz7XP2qUmbTNzYyDp57i9y67DPjkJ/m/FSv4UyJNQiEeh/feM1pqdTVPuq1b+X4yyc/deisL+09/\nGvjFL4AZM7it1dVuK1C0WgHpa3Gx6UdFBX+Pxw09JBJMw9dcAyxZwteFFzKeQyGu59VXufzmZuO+\nGjOGXVeyvvT++zx+iQTjQfvVQyH+LcwoHmcGu2MHu9HCYWPt/eu/chtnzzYux23buMxQyGi7Esmm\n1wpiMW7LKafwGE+dyn2W/ogbqKuLnx0xAli/ntszZgzTVXu7oYf2dkPj48cD//iPTIsrV7JgOfVU\nxtsXvsBtrqjgOru6WOvv7mYLSVyDO3cC11/PtAmw++6ll5jBzp/P+K2oYFzKPC4r47qX8lnhLpdm\nayvXK1FUgwYZy/qZZ4DvfY//f/llLkes9oICxun27fx7yhSuW8YnGuXnJQhD3HenncbzYOxYnlNC\nU7feyvitrua2XnMNu+lWrwZCIYSBLCvs/vCBhuMeD5gxYwat+uUv2fRsagLGjcOGDRswcexYFhI9\nPcxohw5Nu5HS0l0YcCRiFr5EsAwdyoPT2WnWKYShDx9uhE40yuWJJiX/ayEj75WWcpmjRzPTaW11\nt0HKkDZKRInWdgsLWfBoX79ER3V0GGK1PwHX4vGG/fsx8ZJLcgvtraw0zK62lttw5ZXMOG+6Cbjr\nLqP5eYUV+oUpeoH20abCrHHHHayp7drlZtAVFcwEpO4g4aG6LdXVPD5iVZ11Fk/AQ4fcIZcSBlld\nzZbGI48YnOj2eoEeB3lWwmQl1FXcQQI2s5foLD8oLOS+E7npo6CAtefhw9ki2raNyxbG0tXFjGXL\nFv7Pph2/sNNseJXwVoD7JbiqqWGXjSgCfngS8AsNF/xFo9zvXbtY2JxzDguYQ4cMraxY4Q4BtkOU\ndbvt/nuFDldXs7UgwhtgQTt0qOmvF+gw6kxQUMD4O3SI2xaLmbUiGUMNwod0PaLcCcRiLqt2KtCx\nhsgjAic7fDwER20t8MADvM6xaBE2rFmDibKuAbC53tnJjBlg7UYzNREae/YYpq2Zjzwj+zmGDzfm\nshDX8OFcj+zFADhsNxxmwhO/eXe3ERJbtrgtCNk3Eo1y2fbiuLRZnhPwi70HfJn1hv37MdEOXfYD\nWZCXxVOACfNznwN+9zsmYt0+v/Z4Cakggquuji2gBx7IzMyEGWTChw1e+wa87tnMIx5nRl5QwFps\nVZVxwQBuZmf3Uf8XjbLL4tlnzf9lZcbVJJBJ6Nr99Wq/QH09r6WIZS3MRisFfuXmAn7vitD0qs+v\n/ZkUAXlW8CPWo0BDA4+PFlCZ+uW3v8QP/PohtKjnquwt8+pLtnlgKw1a8MlckH750YpVx3jgwEai\nLOFZ3vDRXxzv7OQJ29jIZvRVV7EASCZ50GSRSfZf7NhhkFdcbITGpk38Xk8PC4FYjP8vLubvIjQi\nERNtpcN6hw9nKyKZZEa6eTMTTFubKVOiuWIxbkt3N5cvi3bhMD83dKipW1wmOjw0FHLHcdvx3npB\n3I/ZELknp70YHA6bdR2J/5ZQU4DN9gceMBqM4LSiwn9Siv9aw+DBxlT3W5B+8UXGl5TtON4CTyZS\nNmanF4Hlu6xNiXvTDyT0cts2d1SSZlZFRew+kPBhmyHozVutrdw/gUiE+9be7l44zmSp2f31s0pi\nMV77aW3lMW1tNQvRwvxkDAoKTAgokHnPTmFh3/0KdpsEz9IPceUAfUOs7Yg1wV847KaRUIjnvd7r\nIRq6gCwoC90CzBP0/NGhtLqffiHuMofFxSjvyH4vxzH7bSSQQ571Ew5e+z10u2RMS0u5bi00xC0u\nOPfb46Hvx+PYAez0fjA7fPQFx3vvscugqoojZh55xDBk0UYkFC8cdm/ckwF65x2jFRQWMvN+7z0e\nrKoqE4UllkdJCbuuJkzgZ+WewNGjZg1CQ3k5+2B1+F53Nz8v73d3swBqa+P7eo0DYOJobzfRYjYQ\nuTUmaUNxcd/9HpoZTZ7sJqzOTu6zbHoEmEi/8x3GozA/rT3F41yOhtJSthhkMtubuSQM1N47oid/\nMsm+ZPGja6YkE9Ped5EJ9OSVMgWX3d1GiOiQbtk82t3tXpiVCazb3t3N/mu/8GF7o6PGobTDDj/V\nTMTeNyD3hIHJe6WlJtiipsa9piafhw+btldWuvcT7NtnmLqEvHqBDs6Qd21BIzjX4dkSIi1RboCJ\nINQggryz07S1oIDb9PjjhqlKG7q73YKqpsbdHgkwAXhOTpvG38vK+kY82VBWZlw+Yjl1dZl5cuCA\naUdbG9NATQ27NnVfpCwBWdDXuPISMkeOuGnq8GE3/ZSVufc06XkRj5v72TYQZoGPvuCIxdhF1dTE\nl95R29ZmQmUnTDB7Mk49le/J2oOAIPOdd9zWhVy2gJAFc22OqgG58+c/x+Q5czC1vh6nX3ghXv7D\nH/gPvStcNuiUlpo9HYAJ+dWbyIQICgvx/Msv44y/+zsU1dbi188+yxNFtw0wpqts9rKjg7TgkYVO\nAVkU1qkubr2VF/YkwqahgV0f9fUsHIqL2aLTcMEF7GaSvRnig7dDJCUKpL7exKsD/FxdHd8T0ExA\nwid1/L4GezOhDZMmsdYq5ZSVcf8kMiwaZZyFwwZ3ZWU8CWWRvbzcvV8iFjNWlIQQazjrLK5T7ssG\nx8pK4LbbmJ4feshdZjTKZVVUMC71BjwRaCIMSksZl48/zgvGc+dymPIttzBeRdgmEkbQRyLAeeex\nYiD12oJPr11o0MJaC5pYjK+yMmbOOtRVgiI6O92+eRk/XaZtZQD8fmUl8PWvM15kbAsL3VaehO3W\n1Ljxl0jw70OHDM0ePswCQcLkbcZdVMTPbNtm6P+88/i/o0dNaL1AeTmXtXChOwxX+iARb4DZy1Nf\n39fLYMPkyW6hM3OmKVv2pojFo6Mt33qL71dUAM3NGAmM9K4gO3z0BYds6tq2jWPwb73VjdSSEvfC\nd2cna1IiRIYP5+iVaJQHTzNuIcaSErNn4+BBdpvoNQYpW96tqMCLb72Fp557Dq/+7GdY89//jeW/\n+hVGi2ald4WLu2zoUL4vk2vkSI5C0Ttty8vT2u+Yqir8x4IFuPLii/n/3t6+hC4Le5EIE7ZYCUKQ\nQ4cyocbjvNBXV2faNXSoCdUEmOD+6794AiQS/DzAE2/tWmZQOgJEcLhwIbsQRTMT7UiYlxYQsj/g\nj3/ktpaVcajlzp1moVOeBUw4qkBFhTucGeDfug8Alyv9bG5mn792uVVXM3P85jf5uXic6aaiwviT\nzzmHaU3aPHmywavsAaip4UV9cWMJU0skGF9Dh/Kz8+aZSJw332R6Pucct5vjjDPMJrDGRo6gEQZe\nUuIWJGPGsFv0uuuAG27gCKXLLuOInqNHmXbF7bhzpxmrpUs59FSEtN4AV17O9NHYaFxY9i53wOwn\nERDX26ZNRkO/7z7GQW+vWaQXRi7z6uSTjTBtajKbD/UGuliM3Xzbtplot54erk+3QejkoYcM/hYt\nQh+QTY4Aj4XgVxh9YaERhHpdY8gQrq+tzZ2G5NAhbtuiRRy8I/3+5Ce5Tbt2uaMKr7+eF9vFY6IF\njRa8O5WHqbqaecV99xl6E+X5vPNM32X3OJC2eAv7w/8/qOy4x+tKn8chWSprajhl+MqVnD1WstZK\n9lm5tluHH8n/b77pzpgradklS+2bb/bNYrt9O6cwl7LffJN+c889dFldnUmVnnrulT//mc4++2ya\nOn48nTlpEh189VVavHgx/fWnP00X19bSaWPG0NevvprrJKKy0lL6xhe/SFPHjaOZU6bQ7qefdvXj\nmksvpcfuvjudDbj9+eepbsYM+sT48TTltNPo8e9/n/9bvdrg5PXXOTtudXU6JTQBJvsn4M5O6qQy\nvMbj/LxXhlTJyFlYaH5Pm+YuHzDZO3UmVp35U1/l5d7fpa21qSyksZj7/+pqotJS7zIlzbfXf35X\nKGTaresQfKlstVRQQDRzJve5sdE766md+VVnvk2kUs3r98rLuUz5T+NT8OiXTdbGmd9/kjE2FuO+\nSGZWydILmNTifnXZ970yFgMmJb30rVZlk62rM+1MZXH1xKFcXrTol5lZMubW1Zmy7T7KFY3yc4A/\nLfnNA6Fn6b/Qjq5XZ9y96y73HNBtE5z6ZZ2WcZLnJKOv4Fj+l/kRjabxnUhlJc/nyuulE+lKCw5J\ncbxsGa1/5hnDLIVpb95sUqqvXdv3rA55Rv/evp1/C6OWMy7kfS1wRLC8+irR3r3Uvn49TZs6lcaN\nG0f/cN119NzixdT5//4fjR01il7585+JNm+mthUrqOu112jxQw/R2NGj6cCKFZR85RUaM2IEvf3n\nPxMREQB6cuFCopUr6etXX03fvuEGf8GxahV1vfgita1YQfTaa7T3+efp1FGjqPeVV8w7KQHiSquu\nJ7ww/gkTDOFPm8a49WK41dV9mbp96ZTSfpPQUenHCwo4FfSQIeZ9+e7FqOz6dRppO621LThiMTO5\nwmG34LPbJZcWIsXFfO6EzURkgs+c2VcwVlVx2zXeNfPSTMyuv6bGnSa8urqvUNPvCt5CIaL58/2f\ntVONe7Vt8WLD8IqLuR+C28pKphP7/VCI26DHvbzcMGW7j4I3aWd1dd8U7PqSMyz0+1/6UmZ6lLLL\nykx9WiESmqmtdTNxxzF9lrobG01fbdrxmwdCD3baf8fhOjWedZm6LXJ5zQtdtte91Hzpj+D46Luq\nADbB58/nvQRPPGFM595eNuvEjSSRQmPG8CJWaytf774L/OsPgC9+DVj4f3iTnLindEZdCZHs6OD3\nJZVINMqbliQnzpEjKB87Fqt//Ws8dPvtGBYO42/nz8eDjz+Ok6JRnDliBHDkCAYNHoyiVPjvBRde\niMpTTkHJ+PGYNH48tqcW0YpDIVx27rkAgOkTJmCb7ND2AiIQgG/86EeY+oUv4ML/9b+wa+9evKdj\n4L0W3HQaEVlYb293R8AsXMgmrpi+ElUyfjzw29/29eN7RXZ1drILSjZvAcZlRGRcPb297HrQO391\nH3ROrFCob1SO7S6Te5WV7JqSRJgAj+H48aZ90n+dcFGtLQFwZ0k9epTXfbZtc78ja0NvvWXcEQC7\nICThnI4eKi4GvvhFbqOsuxUWMl4EIhFuu2xE6+pid4efL5zIuDe6uoCf/MS9ZqEX2e2F764uk3VY\nIshk4xrA0XDiDpTNjOvXm3uTJpngAcnrJnUcOgT86U+mXt1HwZu0c88e46K0+1lZyZs/NRw9CvzH\nf5jnYzF28+h1MdkZL4EpZWW8GVHwKu7UV19l/lBWZtYLhbZiMd5seuGFfReiNejcdfKuzEFZrO7s\nNOWXlho8S1CGPDttGs8zvVZn72+JRNwhwvrZwYPNuk4/4XinVR/tOM4Kx3HWO46zznGcptT92x3H\n2eU4zuup67PqnVsdx9nkOM5Gx3EuDlTR/Pk8oebP5xBRHVWk1yJ0jqrhw80C9MO/Bb79feD5V4Af\nPQz87Df8/J49JoSupISFg4T4ypqH7M84csTs0Uj9V9jWhvMTCdxx441Y9J3v4LcvvmjSN1sRLmHx\n9RcXo7C8HN2pne6hoiI4qey2hYWF6NZRSXYYJIBHfv977H3/fax++GG8/otfIDZkCDqCbLqzo2D0\nO7J+9Mgj7EdtbOT2t7fz/oP58/uWN2JE3wzFumyJLqmtdUcDSX/sXEV6QVYLv64uXp/wihLRiRMB\nkwRx2zYjdNraeD+NhkSC11akfGFstlCRZxcu5HIFZ9Onm53ZbW3uhVbZaR2Pmz4VFDBjvvtufl72\ng/jtJzhyxORnAnh9RQS3HUL63ntmPUP75SUVTzLpxpF+P5k0dVRUsGImisN775l1oZoaE+Wkd+zb\n4S1s+ZgAACAASURBVNHd3QZ30rdRozJHMnV1GcWAiOuZNo3b3NbGm07tzXAiqIm4jTLeusziYnca\nluuvd+daA3isKipMShydvqi1lfs4f75ZO/SaZ21t5r4dnLF+fV8cPf+8UWx0iHtNDW9i3LWr7z4Q\n3eZolOuU+SBrX5IlQOGhB8j7UJ/jbXF0A5hPRJMA1AKY6zjOpNR/3yei01PXfwNA6r85ACYDuATA\njxzH8Yg5tUAWbKdNY6YmcdrFxWxdDB3qzpBZVcUC4JRT+Fr5hrs8+a1Th8iuYcnKKRaJvtSC+sYD\nB9Dy/vv83NCheH37dkw8+WS8u38/Vra0AADaDx9GN8DadzLJgkhHecn3oiJeABMCkfxT0h8Fbckk\nhkejCEUiWLFmDba/845JvxKLcXvknViMFxsTCaN1C0Qi7jBa2Yx2+DDnzRGGWF3t1oLDYSb00aPN\nxiyAdy7H40aYyOKxPhNEtK5o1FgIsRjXPXWqiVCaOZO/z5rFbUwkgE98wrRdR2o5jmE8yaSJrJHx\nBXihuaHBpKxetMgwRZnssRj/X19vAgMkz9GSJUyDwlQlJUVXlzvarbSU72/bxovf//t/uzdlTZjg\nzjhbW8uXQDTK/a2s5Eg1AZ3jyk4emEya7LaACcAQISB9EK1ezp0QPMt77e2sCZ9yClwgFp+2qgCj\nsIVC3M/GRh4nYaKFhVz3ggW8iFtdzfMyFGIlRSsyOlqwt5ejoBYt4jJ1JGBREeOlshK4804eK1l0\nB7jtxcWMU6H34mLGq5RjKyBa8STi38XFJmpp4ULTN4Dpe+ZMUxaR6YtYvwJdXWYMJkww0XH6P9mH\ntnEjj3087g6ECIW4r9LH0aNNf2pqOCCgpsbMMZmn8TjeAVT+pdzgeKdVf5eIXk19bwewAUB1hldm\nA3iUiDqJaCuATQDOylrRRRfxtWIFE19HByOuvJwHTzbbycBIQkGBM6e5f5+d0jZLSngjl0ReaSEh\n/4vlATDhps7lOLRjB665+WZMamjA1Jkzsf711/Ev11yD//z+9/GP3/kOpl11FS766lfRMXIkM6dI\nhAdc3DlHjxqNtLvbJMoDgKNHsfKNNzDq0kvx2NNP4/q778bkv/kbAEDjpZdi1YYNqPmbv8HPnn4a\nE8aP53InTGDiKS11nwFx660mPbVAOMwTeufOvlrU6tUmI+tjjxkmeuSI2TfT3m4izGQi7t7NDFOf\nqHbTTW43hQiNr3/dmNuC2927OZngrl2cF2rrVs6btXcva37vv28m1LRpZiK98opxe6xezZFD7e0m\nkgxgOlmzhtu3Zg0LAtFio1FmCmedxfdkD4aUv3QpW2JLlpi+rl1rXAi9vTx2lZXuGPyVKzk3kux5\niMeNsBHGdPiwoTWJvnrhBcbL1q2mPq1xi6AmYoWkosKcZSJJBdvbGZ/nnMPvHj7sPktEIthOO42Z\nlZS7di0zMF1PVxdbbDU1bktQxrWriyOfFixgZi00fd55PJ7LlzMOo1G+19XF4yRCHzDMTuimtZXx\nLWMmjLm7m93SbW2cBmfQIHduM8lbd+qp7OKKRpmGvvMds1lP0vYI2O7QggIjtM87j3EIcN8aGjgB\n5Le/zd+HDXOHbScSbss5keD3H3rIRFgJlJezoBMrqLmZaV27RCMRbt8zzxgcbdjA+EkkWNnatYv7\nLUJHrKOaGiSBDDtdM8MHlnLEcZw4gOfBZ47fBOCLANoArAJbJe87jrMIwEtE9PPUOz8F8Hsi+rVf\nuTNmzKBVq1YBLS2cdwfAhi9/GRNFaJSXGyYmPtfKSrNLu6SEB+PeHwFvbgQ+dQ5wyzyeWKJFjRzZ\nd4+EgBzxKgJFpy2JRnmQdW4r8QdLaoNolAmko4NDFkXD0WlNsoHWWoX5SI6rqiqexIcOuVwtG3bv\nxsSf/IRDbHt7jUUl7YrHuW6tzYpFIOkPKisZvzoPkWy0/OY3efKuXcvvTpjADEZriPr9VLvSexTs\n8wbicXYn3H03u4JuucXkOpL+y/iWlzMepIzSUq5XXCWS5mXECJ7wcgaEgLiZAMOUJVGfrCsIhEJA\nag0Kzz7rnbNKdmfrnEadnYyT5mYzdomEaYfOYZZM8sSX1PX2mAMsHDZscCfXE3zoFB7S/0SC3UTP\nPsu4lX7a3085xZ0OxevYUjkWIBxmgWmn9JCdzjoD7BlnsMDQOZWknZlSpgBMf8OGMT69nhVaFOEu\nYyZ0VVdnxlwntvQCwfO0aUzLkozz8GETovzkk27cxeOMB51rDeCwb43bc85hpUOnftGHken0KcXF\nLOSWL+c5q+eqPc6VleYIYxlvSZEj9FVfj7FLl+7dSmTlqw8GH0h2XMdxygH8BsCNRHTQcZwfA/g2\nAEp9LgTwpRzKuw7AdQAwRpIJjhvHBPzAA8C11/I9nRZcNAkxF0WTHTOGGcj/d43Z9Ld7d9+zyUUA\n2ALEznwLsKYjB0eJhiqbCouL3bl3enrMEbV693pVFV+bN2fOnSMpRnR2Ts2cOzr67jkRK0KERkEB\nm9crVpi6bL8xYJiBZNyV41ITCWYA//M/hkE/+CBrobLpaONGNzMBzPuAexe2bRFKig9ZA3j2WWNp\nAO6+AzzZZMLJmpLAkCE8oeSAomSSx1ozRGHw9fX8XdZRNMik7eri1PTi0pKNjtqnro+T7e01ON68\n2bRdrL9//Ee3kBc62bLFvUZhBzns2GH+19q+zVglmEMLypoavtaudSsqXjRw5EhfwSBrMkJnsh4g\nzxw86C5X90MLYb1DPBNIqnYREDa+zziDaUiUFoBpVBKYrl5trAavsRWQrL/79/N6hOBcrEZZ5JfE\nhcJTNN7ECpZswgLFxZytVvY/CW1IP2QfiO7zqlUmkwVgaESU4VtuAe691+wJAsxc37ePy6usZMVr\n40YMAYb5oTgbHPeoKsdxQmCh8QgR/RYAiOg9Iuohol4A/xfGHbULwGj1+qjUPRcQ0UNENIOIZgyT\nnZLPPMNXQ4M7ymHQIDa5ZROcgKQxl93jcn5GMsmDLCk6xMWwZ485h1wz4pISw+C3bzfHzcrBUW+/\nbTJ1Cpx6KtcvyRflMCjAMKQDB9wHJfkBkTvyRx/kVFBgjoL1SskxaBD/d+aZnB5c71i1jz2VsidM\nYLzIOoHspl2xgvEr5e/YwRqlvfhnHx2rXR4ybn45guJxxkcsxoxe+4b1e/KMWF8AP19d7d6xXlbG\nbjG9K13+FxenpNo49VRun+wM1gvlra1m/JJJNxMrLOQ2FhebHeizZ/NvUUIkV9T3vseptAXXGvcj\nRphFZMlXpsEr42pREdetD7aqrWVNt66Ox6uhgYXVxo3M2ISJynqOfRSyHPQlEImYMdX5voQuAcNw\npc2nnup2eQ0Z0jcyTqde8YKiIlO+RD0JrF/Pc1gfBfzeeybVfVub2S0u+aW8QJ9YKHRgZybQuar0\nWeA6wu6RR8xBTYDR/OfMMQqHfTa4Phq2rIzH6+23Tbp5iUoUPMlmSEnQqkFyzYmgfflloLkZXUCG\nVMuZ4bi6qhzHcQAsAdBKRDeq+ycR0bup718DMJOI5jiOMxnAL8CCZCSAPwIYRxnOHU+7qqZOTfve\nNzzzDCY6jkmpLtqFnf321FNNqK5YItrS0FlqIxGjZVdV8XfJiHrwoDlWVhhKSQkzK506HXC7yaRu\nsUIk/bto3HLsqV8WUYNQw5ht7csHNuzbh4mf+Yy54ZXlU7TV6mpuq0S1eGXBtbOSCtguFS83lLQ5\nW2ZSr76JKa5TbCcS7lPSbPeRdg3E4+wf1n2XNuv+yXdb27bdK8JcdNirnbXXKz261OmHR/u+/LZx\n5oWjujrWbNvbzfM6S20sZiyQIUO4bZMnm3UpAXGl2dlXpU59PxZjpiu/teUjz+u2lpebg8vCYQ5Y\neOstk0lYW4T2GAj4tcvGf0UFK027dpmEobLb3CvNiD2eNtjp43U5Yo1I++0jBjLNV90f7UIEvFPE\n264rjSc7RT/6l1b9eFsc5wC4GkCdFXr7b47jrHUcZw2AWQC+BgBEtA7ArwCsB/A0gLmZhIYLFi40\nDOHIEZBoMD097sObhg41g6cjmGQQRGMoKeF7kmVXooQAY6Xs2MGfEmVVWmreLy1lxl9c7BYmhYUm\nekpPeFkfkPPJW1vNYU4aCguZMLWGpSdRgJTQJInkBEdyMqFdz9ChRhuXOrwWQMVq0aBzCun9AjIB\nJSpKpy/JpMRIuKe2rgB30rtEgvGmXRRaO6uuNvHw0t9t2wzuBfRZ4XZf7Tba7pXubqPBioZuJ2fU\nlpldp5wvArjH/tAhd/LF225jeu/pMYc2lZXxCYe2lv7yy0ZwCn1IAsS2NhYGQguyeP7SS4bJRCJm\n4VzCgOVUScAs6E+ebPAop+0BJuOw9t9LNBNgstpKFFpnp0mrUVLCubYkUknwWFnZN2eWjjaSevSn\n4L+9nWlOjr2V9ZWvfc1tMZSVmSNpdTReJOIevyNHWDiI8qGTPEqUkwg9GVNpy6RJbitGWwtaCEpK\nFQEZR8cx/ZN7dmSdhGrLWSsAUFCA/YDlEw4Oxzuq6gUicohoqg69JaKriagmdb9BrI/UO3cS0alE\nNJ6Ifh+4sosuYmKfOxclvb3Y394O0sxAmPW+fSaCSUdLyZkZEuGkD2IKh/m9jg6T2FBSq8vnoEEm\na20kYnJPCRQUsMtMR2cJEelkioA7ekvyaAlB9/R4pyeXOsaMMcTnOO7nCgtBjoP93d0okTNJJO+O\nXlcoKOD279rFUTwaJPpG70HQC9IAf/7LvxgilcXYaJQXuBMJ9qmXlzPTWreOJ6VMOGEAFRWGYQwb\nZhLUeQmYzk5e7I1GDd4dx6S5FwG1bRu71YqL2RUjk8rekCj9KypiXNjuM3F1aVeZwNCh3I6772aX\njOR2kpBegPsgjBhwM4VBg7i9cihQOGyiqgRPL77IbrZwGPiHfzA5oZYt4yNCNYiw1kcbT5pkEvRV\nVBiXqOxF0i4cCeldscKcH97VxZa2lHv0KPDGGzwGAONa+ia5l/RG06NHuV3V1RyxplPQV1e73Vuz\nZxuLKBxmHN53HyfQFAWksZGj1AS/Qovi1tRHBNTVcVTV0KH83JAhTD8PPWTCjbUCALB77ze/4XGJ\nxXj8tGsOYOEniQ1FGdqxw+z1qq5m+hdlUiK55AhpcasBZjwk2agEZsic0htntbCJxQzdiBUkyoFO\nd9/biyHAUOQJH4+DnCSq6v77gdmz0XXDDdjZ1ISORIIHxD705/Bhniyi8cgCqawPyP+yQUoyuorg\n0JpNV5cRGLJhKpnkOuUUQXFjxWJurUs0lLIyfqeioq+2KOWLm0xSWNtWh+zpqKxk95mf5dHbi5JN\nmzDq9tsR0mGOsktYNNMhQ7gvbW0mYmfiRGbgcgKZNrNrazlB36FDbm1eQJ7VJrd2W2mXQHk5+/p3\n72aXxdatJgxRWxja+pG04jJenZ3u/+NxE10jYxWPswBbutR9yFYy6Y54sl1Lum5x+VRUpLMGuKL3\nVq7kTWJLlzLDKivjyJhkknE2bBjvUAbcp/AJfo4cMeXqg4B0pJJY2gUF3J8xYxiP69YZd1EoxIzx\nt7/ltui+y/jbEXy2S0jwYLtXxB1TVsbj9v77vM9i1y5ePxHLXYPgW/CnI8qkLV40I3NZ6hSBc+ut\nvHi8YQMHT0gU3N69Zu5LXXV1HNAg7krdd91nua/rkFMuKyo4LLyigtdWdf/q6swJknZwgu6n0KWe\nJxL+/MorrJC+8w4Lyd272cqUrMASNSV0It8FZ6KoicvWwxU9ETi4gShLHnkf+LBzTR2zXFVz5xIB\nJtmh5IOZO9f8N3cuUXOz+RSQdxIJzj3T2Mj/NzZSOseLTvqm39dlE5l3Ghvdv/Uz9nN+7Wtu9s5f\nZF9eydMAk0OnpMT9vM4b5JWLSZ6RnE66735tkHxRgned+0gnZtN5pewkeDqPks7v5JcLS9ru1wev\ncu16GxsN7nQ7syUNlDpLS7kMwZWdhE7oyR4br3blkoBR8hsVF3OSPLtsSeincakT98l46ue8Lp1z\nSpcrONTJJvUVj2dOqig0JbQ4a5YpU3ArfRRaKioytGzThLTLj0Z1TjXpi9SXKcdUZaV5z6s/Xvmj\ndK40fRUW9s3VVl3NbZY2RSJ9xyQcdifRtPEsuNTzIB535+TyyPd1KrA/X7770c9V1dnJJvzs2ewL\nvekmc4ZCNMpagZwQ2NTkXYakjBg/3hwKNW4cb1oSSS1hjGPHcsjv/ffzu7Nn87uzZ/PvBQu4roMH\n+TTCCy9k6d/QwM/Mm8fWkZj5Bw9yu+RMkfvvN+XfcYdbC9MH7WhrY/Vq1p50CnGAXV2JRN9jZj/z\nGfM+EX+Gw+7IFh39ZOfw0SDvTJ7sfYaBjrqJRNwntOkDdwB3ZJku59Ahc/JZdbXBgR3BouvUoE9o\ni8eN66SujkMizznHvfs8FAKuvtq4IsQ9J+WWlzPeCgrYEnjqKY54AVgj12sTmzfzGItLUONY2iWu\ntepqE22k+6U/dZ4uqe/BB/uGictCeDjMLouKCqYRWTuQFBTt7WY/wsyZ7qwCduqVWMzQgEQdymZE\ngK1UcfHJ2pG0V/otYzBnjjkQLBrlNog1KTQpmrqE+3Z3M42IF0EyIWh8dHXxXBMXpBwkJpFF2loS\n/Pf0eNNSYaE517642L0OKJkYJAceYD6l/XLOjoBYjjoCctcuxpX0PZk01u2IEQYP4krWaf8F5Kjq\n8eNN6n/Z2AkwP9N7pWIxoKGhXzvH85I2J9I1fdgwSmt2c+ca7U60FbEm7P8bGtLZdNOgMuymQTRA\nkfiiQYpV4mUtaCtDWwzaypDy6uu5Hnl32TJTli5HZwuVPul037aWIlqUZBfVFkA2DV0uO/20rbWU\nlZlydRbTsjLvLLiZtHitSdmap43LbNaAzmBqXzrVubb2bG1c+qX7qPFga59+KcTt50pLzVgJjiS9\nvaY1jWu/jMK5XPE4082XvuSdxt5OAS9as26HjIf9vvRdWwFa647Hma61NWDTq1+WV696bEvNtvIk\npb3XGHjVU1bGFk8mq176oX8LPqRdQidDhnCZggOvjMRB0uDbFpKUY7dDp2jX9Csp62UMtWU1dy4B\nWJUv383+ALAUwJN+14cuOKZMYUIRpCUSbtfK4sXue3V1/FsGIZEwjFszFAHN0LXQAIyQse/L94YG\n9yDX1XkLFhFYqQFN/1682KSNFuIZMoTvaSKprOzL3IJeQ4aYCRCL8SUTwRYUtbWZ3Q9BLz/BpZmv\n9Feerahw49Jx3Gd66EtPEM3AwmG3gCsvZ4YhzwwZknlCFxW5GZ6dXt3rnQkT+rpVBI+aidXVmefi\ncdNX6b+d+t5up52WXrt75IwG+U/6ol01Xu5Azfz0JWUUFrrT1uvx0O+VlbnnINDXfRqUXoqL3en2\nNU3IuJeXu8+ikDlil2v3TdO+fcViTCteNCZXEAGfTWkT3IZCTDu6Lk2n8bhb2AnubeGq6VLKLigg\nmjWrX66q7A8An0pd9wP4TwD1qesX4ESFeVV8zATH9OmG6QrytBARjVIuW6sQjV8/J/dskGcaG92M\nXt/3Wv+QwdNCSlsd0qaaGv5PHUrVp/1ezBXwtgY0odoEK8xCCxyNm+LivmXKbyuvf5/ys/nN/XzK\nflp2ZaVhfLrPUr9dXizWlxbsSzM5wal9XoLfPSnT1vwy9VvjJ9NZCZoGxdrz8qP7XV4CTJSQTNaj\ntKG21tsiyXbWRJB+x+NumtFj6bcu4HWVlxtc19ebMmMxdx+1tb9smflPK0rZ2qzr1OtPWvAlEn35\niuDLLstPuHgJLI0fr3EVXiP8wh5fW7m03p8OUL58N2vKESL6EwA4jrOQiGaov5Y6jrMqbx/ZsYSF\nC815HMuXcyTC1q3s85s9m32kEvlyzTWcIO3gQQ57XLCA1xzefttsqpK4extkjUQ+U1FcWLKE1zUW\nLOC1kZYWs+4C8DrL977H7ZP7VVUmxff48RxOLOXG4+zXlP7U13N0iCR/0xuYJOJDokzEX3322Zx+\nYPhw3kglYar797OfuKeH+yo+61CIfaKy4Ur8rLEYhzETmbDawYN53eOLX+TomYMHgS98AfjVr8ye\nFmlfSQm/J/mhZFOm+JT1uonk1AHckSbnncflNTRw9MiaNXz/1FPZBy1RJSNGmEisI0eA117j/8vL\nef1l9GizvnTZZYzb6dN5beOHP+SosbY2rjsWYzx9+csc5ilRd+eey2N1991c3ymn8LpEKMR1vKEy\nLTsOh+JK+mzH4fBqSZcxfTrjdetWrk/WHyRz6znnmCifw4c54qq9ncdHopIkwksiqkpKGD9HjzKu\ny8o4Cu6OO0wfhg/nxH6//S3/V1nJZU+YwDi9/nqOHopGuW2HDjHuxE8uc0mOGwDMDutIxEQmynjL\npkKJBDt0yOTvkv4S8XpeR4dZU9RnxDgOj+vbb/P7Ej69d685qreoCHj4YZ6PAEd43Xsv0+kTT/Dn\nD3/I6wOycVeD5C+TpKKRCPexs5P3KQlfKC9nOpcjeJ96itPt3Hijob1IhGlh6lQu49Of5vNQ4nHO\nSXf4sDun2BlncJ36jBW9eXHiRI6W0xF4Bw4wrU6dytF5W7bw3Ny6ldeQdu/mNVvJ+CCZAcJhHqMg\nefD8ILBpwpltT1G/xwLYcEJYHF5WgV7b0GsSGrzWJETrt5/Raw868smOqiIy5SUSpiytGYh2IieI\n6bbp8nQ0jva/A+41D4BPXxMtOhw22pTXsZh+R7XaWo79rK0tSX+Avtq27ebwcgHV1nq7m6qrzfMF\nBabvmaK6vOq0Nb6aGtNO6Zd2C2ktVNwhdsRKJOLui26/l6Zqa/axmPsIU123PKs1YdEi7SNipZ/a\nErDdP+Gw6Uck4tZa6+rM+Nna6JAh/KxXtJQdZWXTitc7XniR9odC3taP19HFXnXa6wfRqFmjFLqX\n/mVzQ+rflZVczl13MR7nz/c+Criujueu1CF0JVZQdTW/Y89fr6uujt1Pfv2srzf1S7+FVvR8kd91\nde5+WXjuj8UR/EE+H+NtAM8B+BOAbQAu/tAFx5QpbveQMPPFi/sed6qZu2bSmYSLLYi8wnvlXVnv\n0ANkm5Pa11tf31fwaKGkBZoXMSUSfRfQ7GvIEDcjykVoeF32mcaamP2YhFy2uS2/y8u9FxA1c9dH\npuZzBT3eU/ApEy4XF0oQnNpHqOr/7PBQwaWNt1DIf33Hrz/2+Ai+vY4N1peux2ZOXldQJSXIGodf\n//Xl5W4SF7UcZTtkiL8rye6bpt/6ercyJmVrYa+DXjLhTtNRUZE3XWVS2vSlXVK2wMvkLrWUoE8A\n3cddcKSERxjAtNQV/rCFBpGKqtKWgvZnCmNraOhrLXjt6RCwLQ0/i8NLuGii0gLAJjKJ4a6r8xZa\ny5b19fXPnGkWeIWAw2GzkFZVlXmiaWLTRKcX/0RL0hNA2iGamC10vTRgm6gl2kRPkspK0w+tpTU0\ncF/1PoHSUv8zs+1Jr39LHLuOctGLp2VlPAb19WbBUfApGrv0S5hBJuYpjEL3UUef1dXxpzDPSMTN\n4G1mOGSIf33SJ71grc94nz+f+2NbUrkGU4RCPB5ei9Fy9nltrTsirLKS65cx1Psx9Pnk2eqVZ+V8\n+FjM0MGsWVyn9Lm42NCxpr/6evfvTILLjlQsKOBoNOEpYkGIEiXKqaZ9m04LC904t6O7ZBz1HJg5\n00T1TZtmoqRmzeJ7kUjmPVr2Zc3rCUDbcRMcAOpSn5/3uj50wTFlitsk1RFK0agZVGHM2nLwAmH0\nUqbfpkH7+cZGtnJk4jQ09LVAJCxRL4xrAm1ocJethYwQfTzuvbnOXoSUyTVxopt4tEaiNf2CAndE\nRizGxO2lhYobQ2+qkv5Mm9Z3g5W+Egl+t7zcPXlCIbdbxS96SbujbK3NZu4Aj7ft4opEvPEo46St\nm6Ii93O1te62aReW12QVhl5W5rZitCCQfofD/Ny0aX3bZ4+3ZgZ+uNa4GzyYPyVUORLhvngJfLtv\numy/6CvdVx19JmMRjbqFRTa3o76EnmyrQUdYaVx6hU971ecXOOE1N21XlHaxadeqpjGA6JRT+rqv\nvK5YjOuYMIE/7XlrKwga75GIe/x0dJgWstK31O8E8P7xFBx3pD4Xe1z//qELjunT+65R6P0YOjZe\nh9F6CQEidxSTPJfJpeW1A1ze9TJhpV1a2AkBStSVl+DIxJQyTQZbQxYNLdtO21yuaLSva1Cb01ob\n1owg25UtdNFmCLGYm+mHQoxrL7+73yV40W4x3V6NX43//rjR/C6b0diuSQnV1viqrc3sUtIMRgsh\nsVR1X2fO5HG0GbF2MWZqf6Z2xGLe96XM0tK+odR33WWYoC3o/GhFz3sv+rEFsYTwyzhrDT7IfCsr\nc4+J1z4rr7bmE7Um+LXHx2vdzKOtNUDyuAmOwAUB13xogqO52bikFi92WweaaOrr+27w+//Ze/f4\nOqsqb/x7kpykyTm5NGlJShrm0MuhLaQtLaWhoLThNoIJDqDDvOEyDAIOkcGh+EpH59cyr4rK4IgU\nX+XnWAFvr4wo4qgERqqIY+UyQOV2ICW0VFoubdM2Tduk3b8/1vlmr2ed/ZykRbD83tmfz/M55zzn\nefZl7bXXWnvdti3cQdBwrVVTofQhmnGwH5wgHc+hbQHa3TaX866Sq1ZF+9LTE0ZMSlkLF0rdGqGp\nGtAIWl4uko82eDK474YbhJmE3AettGIRv6rKuzoSDtwO33BDVB9cUuKl3fJyv2C1RB4ypmezntlR\nFaKlTG2TWLjQ7/w0YwzFD1gjL8ekYzzYnjVAUprTqrc5c/yYtMRndwKWQGi3YfYt5A5cXS3jmjNH\n/j/3XM+cdRvZrJfsdXqQOXOiKqs4N1GrziMMLQHkvIQIIuHEOnVApm23pMS5lhY/5ubmeMOv3nGE\nXLCJI5rYNzcLLurd0Ny5UQapd7s6VilO5VtXFxXErHqoo6NwDPYZPWd2LLZdjQ/6O9/TTJY7uRMi\nWQAAIABJREFUVn7X6sGFCyPpU94R4/gYGMfjgXstAB6EpEl/GsDV+fv1AO4H8EL+c7x6ZxnkrPHn\nx2J8LzCO68hw2iZItLkQ+DwD9Gj/0MwjpNIKGcKpEyfD0p5QHR3eHsBAxM5O/72jo/iOxsZw6MVg\nA4D0wgh96kXS1hYNrAsxB+pklyzxxIpEZMkSb2exem2L1M3NnrjoBVJaGiU0WmqqqJA2qEcm82E9\no+Woam+PjsnuupJJqUvbQHSkNNupro7aQfQnLxtoVVrq3ykvj6ocSMAJh9JSbzfo6BACV18fH0Wt\n22bfLdHR6kWOSTNvMpyFC/07tbUiWGgY8z2+q9UiNhOBJmYTJkSJf3V1WGovphqz15w5Uo/FZYvz\n2mNNX9aRIG4HFFovoT5yfqqrC3OLUQ2q5yO0viwjITxKSnwgLvvDINIQXoTGYpmi/T9vP3nHjOOj\nMI7/CtybBGBe/ns1gByAWQC+AOC6/P3rAHw+/30WgCfzRvgjAfQCKC3KOGgctzsAbaPgPW5bQ0SZ\n72vDepxrbuh97f6rJSMbnGOD+nR/rAdYLjc2tQ6lvxBiHmzgVhwicmyhoLRsNrxwD1aNo9uwOy/N\nBOx7xQzoozkOxPU3xIhHIzJx8I+zh3C8o6kPRzPM86qri+4y9LvcUds0LiG31LeKP3HzNNbxFYuS\nH20+KWGPpvYMqd3GkpqHrvL2/bHO0YHAK05gYlvV1fFpU/Q7+e+H7I4j8Mw9AE7L7yYmKebyfP77\nMgDL1PP3ATihKOOIc8eNy/mk3V6549BeTZrQx7nmsl4bmWrb0/dpA+jpkWd0rirLOHT8R0jXbl1G\nuQOi4Zk7IPaPiytu8YaIW1mZtN3REc3bpMeTyUQzmHZ1RfXQvIqlKglJUaxL65q1Pp+7oc5OUc3Z\nhRTHqJqbPYO18Sl64ZOoLlkS3rlYVQxho9vi71CEuibk3G1ohkw1n1abtbf798aNi+4Cdd3NzeHc\nURwz1Rh0Bed8MsOu7lvI6A0UMr6Ghqh3j7bbUZCgulGnfLEqVcJWq7bsXJaUSF8JMy0w6T4wjoHr\nTu+wKitlXoj3VK3GGbhD6ySd9vCl9sB6fOldgmUkxdzoU6lof2k45286T1jPylWr/JqZMyeKkw0N\nAg/Vj0OFcRTsOMz/mXwcSA2Abep+gr8BrARwgfrvXwGcV5Rx2ABAFr07IFGly6W1X+gcVaFEh6E6\nQ0wqxCTi3tNBgDYRo2ZYRGhu+efOjaqGqquj+nVtEyDSzpkTRTybc4kqLy6URELucTxLl0aDoAhD\na5i1RNESIH6S4C5cGPWyYt8aG30wFBdjJuP7ocdhJeTycnkunRYCpG03c+Z42Cxc6Bf5nDmeoFGI\n0Ck6dDJJq9vXrq8cP99vbS10JeX3hgavdyaOco74XEOD3LvhBqnTpoch/LU7LIkgcaS9Xe5T393e\nLgxX94XMywapVVX5NtiPTKbQRZzvVVb6vtxwg3xWVcn/fIZ95PyOplpaulTqqaiQujVeEm50tW5o\niKrqdIAl4U1mzfonTIgSeKqJ0mnnTj892qe/+Ru539zscYxwt4zFMlDN0DjHHHs67dzUqf4ZqoOJ\nnwsXyhVSyWkBQru2250Y54E429DgZgN7DgXGsbLIf2kAj9F9VzOO/O+tB8I4AFwO4FEAjx5xxBFh\nwh6KnbC6Tuv5pJMbFmNA1iZi4zhIcJiBt7PT129VZyQWOuJVl7iAqpAEH7c9LqYmoGSmiSAvmytp\nLEFnVvq3QVCaENqsnqOpM8aiZgJGz5VV7LK7NKsWsYTAqgTT6ajX3FjOMtE4EpoHu5tJJDyTD3l5\nZTJR1aiNL7K/49rR9dsMBNb9276n5728fPSMtvrS4y/myVQs+I74Frofcp/llU7Hr7mQ7SCEEzqG\nR7te8zM09oNVbVlcqagIG9A5NvXs27rjAHBNsWsM7yfzKqdr1L0/nqqKOw5tmwhloNVESqsGbICg\nZRraSK4XnLaJaKO3XrA2pbr9baUDu+PI5QpTXjQ0eClcJ/Pjf3EIFmI0IW8VIh3bo9pLExCbQLHY\nAgipwTSh1J4f+lkaa/Vl1TNxl1ZZFCNCOgZDq3pGS69t3UwtQ9Xv69QwdjdWWurftUQuRKRCsKRz\nglVLsG3urCor5ZOBeHFedCH4aEIXsuvo2AMNy6qq0YUWzldzc7zhXAf42TnkmGwsQwgnE4mxZVGw\nbrohPNdwsOsuFITH+vQ6DKWhDzHvcePi17Z1oQ61qetR9b+VHcdYDnKqHuWKLYlEIpHfNTzrnPui\n+uvHAC7Of78YYvvg/fMTiURFIpE4EsB0AL8bQx8leeCWLZJ8bds2f4BSJiPJ0+bNk6Rt110nycKW\nLZOkcID8z8NodLn5ZkkCx4OaMhm5Ojt9QsKlS+X9X/9akuZt2SLP/+AH0kZ9vXzW1clztbXy+cgj\nkhANkGR23d3y/dZbgXPPlWR811/vE8s5J++++aYkJ9u8WS59LOW4cf7QIZZ0Wvr7pS/5c4x5mM5t\nt0lyRh6OVF0tMGN7LM3N/vzi1lZ/vKVz/jAcHq40frw/a7q8PHpQU0WFPH/66cAvf+nv89hLfdwt\nx81SXS1J2vR560x0pw9Oqq6OHlz1nvf4cU+c6N8tLZX+V1fLEa48SGfjRpkzfYRvSYkkiWOZN0/G\nxmM89+zxz6fT/kjiRMIfprNxY/T4YtbL+WPCucpKge9JJ/k6eajVccfJ/MyZ4//7/e89XFh48FEi\nAaxZI/AdHJS+Pvus/N63T+aDfdUwZCEOMEGhPpu8osLDsrdXEhP29vrjk2trJdnfn/+5JOnke4R3\nNivnngMyjw8+CHzmM1LvBz4gyRmTSfm84gqZR8I8nfaHIW3eLGtucFDGxEPDiHcVFVJvba2MRx9U\nxUSbQPSgro0bJamgPeyssTF6NjuLOZJ15Phg1tvc7OE7c6anI5/8pK9PH13b3h49qG33bqlTFx6M\ntXevJK1kctDKSn/IFectnZZ7u3cLDuZhsBswlR5AOViOM6btDHASAAfgKQBP5K8zATQA+A+IO+4D\nAOrVO5+EeFM9D+B9o7UxsuOwBy5pDxWdZMzGUuj01dqLyrre6p2HVV/p9CbathFys9WeU6Edhm6H\nuyZKI21t3mirA9Vsoj4GgWm3wThPIStlaunFJlKkxETDOO/RTqF/F/Ok0pG0cTmqdIqH+vrovLIt\n23cd40LJlXr+uL7oADFKlqHdGWE6d250R1FVVTwAS186EaXtt5aW9fs6zkMHNnJe48bFeQu5mepc\nSSF1jk5AyHtxwX4VFdGIcNo87MFnxAu9lrQkr4369irmXqptRxp2Cxd62OndfQgeocjy0I6D/dfH\nCmhXccKN81JeHk4fE4cn+XMyiq4dzoNNhqiDejkm4hbbNTustzvlyJeLXW8n4xnLNRIAGCLymgDY\njLQ6My0JEuBVUNqDStswQhlxbYCgVmPpaGqbFLFYjiwa7ru6omce28OiaKjm1dlZPINpscs+T8Tm\n1jvu7Gob6KcRnDaUEOGxMShapagj7Ds6orEvbKu5eWwuk/ogLHsx1kYzqRBjZL9CuapsZDkZW2Vl\nlDGGYm5CREtnEtD4HEdENGzpCce27Dhsxlh7hQy6xAEau/WzGhetqzZhqZ1PuHasSvRg8FPHZml4\naQ9FK1SFxhaXt0tHZrNuK+BZuyUQzvpsx2nhXF3t8SbkiVdZ6embbS8kfMWNqbZ2hMHMfQtxHGNR\nVT02yvWnLzffLHnnAcnFT3US1StU32SzwFVXyb1775VzBQBR13zve/K5bZuoifQ54Pz+gx/IeRv6\nP0DurVzpzxvftg345jclJ/7atXJeQ2enVyF861vy7PXXy7nk99/vzyK35ZFH5KyBkhLZEj/yiNTF\ncu+9/hoYkLM3eKZyIuHVOTwPmUWfewzIVvaww+R7Y6Ns7a+4Qsaj1VTJpKgGeDZ2Mimqt8FB+U6V\nyYIFUsfQkDxbWytzwP6kUnI+As+H51b7uutkniZMAC6/XOq+915RCU6eLN8HB71Kyjk/Jp5LosfY\n2Cj9JkxYqqu9yu2SS2RMmYzgwQMP+OdTKTn74NxzZQ54hjpVZo2NMoaGBvnd3y/qI0BUJwMDoirI\nZKQdfS775Mn+zGmtGgN83x57TNRZ+qzyhgb5vXOn1PfJT3r1x6pVwI9/LPjY2urPrU4mpX2eS81z\nTNJpUX01NsrYeH4K2wFEzbJrl7T3j/8obVdWApMmSfunnio4vXKlP9u8slI+P/UpmcNMRs6/eOYZ\nmV+qRNNpuerq5HdJiVfrEEdTKal3aMifN97WJjjGksnI2G65RdYZ1YKvvx492wXw6iOqpuy5HKyP\nqqzNm6Wt006TtXruuXJWTleXnP1z//0CZ87h2rUyZ2Vlvg2qkDiedDqqUp00Sc7PILzZT6qAGxsF\nzlwHjzzi5yed9nhPNTRLRYWf40xG1KzHHQdUV6MUCOgnx1gOeIsCVP2pdxkFOw5KZVoK1mnMtcSh\nubUOEnQuKs3boo9zjUt46JyXqKynEH/btmw/4nJfAX77rHdUWmqJO7nMSjG6X6H7OpOrlh41fK1L\nr76qq6NpSHhZYyTHo7f0eldl+xqnLtHpKijhUm1AaTGkkrGSGp8NtWFPV7MpU+IM73Ht8dhaPX+h\no2j5qdu0Rk89D8xIGzoB0b6nd7K2rzoWSEuvIXVSc3Ph+qPqjc9buI7FGcDCVTs98D6dAyxujCWp\nIfsQWgOhQMilS6OeaxpPi3mJjZbkUOPqhAk+Tom7Jp0oUtencSyubt63HqTNze/YeRwnQFKHrM//\nngPgK4cE4yCBDQXkaWBxsknUdDyHZRw29sK62hbLrtvdHU1Rzva1DUVvsRmMpd2I2U+7PacKR/v8\na1WMztoZZ9sY7dJeMRpRQ3mO4hCW+bniVESWsNoU2Fz49GkH5NMGP9qLOGD/H8t50PoZzaw6O71f\nfXV1YZR+XC6huMtm+9Ww1ARPB+Qx5Xsc/OL6Qxjqdjg2xrTocbNeHV9UDN4aBzQu2KNV9dxZmGvG\nVyyHU7G29bg4Zgb4sg+h83LsVSxi2waNklFqz0AgGqOhGcrCheGgzTjYavsrYWiZnw2e1P8xxbzN\nJ5eH2zvFONZAck/9l7r3+0OCcYSixVl6eryeU9sarN2Cz5Iw2yhxErMlS6LMJi7luu5TMSalo8Rt\nfexDdbUP1GLkuU7pzIWio6y1BKj90HVUrl6c9pOIrhGfi3TcOI+kHJfOyspxaYRmviFNWJqbownn\n+KnnisZ4wlLbN0K2E52/i1coX1JVVdRQWVnpn9HSNu1Gekw2Lb62U4R8+hnFS8nRulbqTLHakULj\niA245LxUVBSe9DZjRtRuFHL9ti7iqZTAiYkiCX9Kw+yzDkKjeycdGRjtrOdRj4PwSaej+BDKdcax\n6foss+QZIfzN6Gi9uy8WfKvHQ0agE37anYjdYds4JO1WT9d8HeVOmNI5YcaMwp2rnkM9Z9q5R0fh\nE37WfgZ4pwVqL3p6pJ58m8cAu94RxpH/1IzjyUOCcdgSl1NKp1nQi4mqKcssbB2hxIfWUO5cOPrc\nPhd6xhrOmRZFSxn6yE8ih2YYWqIKpSsJHclJZG5ujsYA6AXPwD3dFzLk9naPuMwMa89F0E4COuMr\nibKVpPQuisyV47ZBkHYHpAMadbqTEOHR77FPhD/Hrw2UTLttmVNjo49a12qttrZC4zbrSySisGM6\nDRJum+SRcOnsjKoTCUMmsNRSNmEel7W4uTlq0OdYbO6qsjKfeYG7SXtWhfXsyWS85yHHwXbiIqpD\nF4UmPfckuCaNRkS1yrVFJwstVKTThYzAxsuEvMt4sX2dXYB91V6Gen55oFjIm4rw0s4h+hkdX8Q6\nNQ5YwYJj1GfmdHdH+vKOpFUH8G8AFgF4HBLUdy2A7x1SjIPEmAvJnn1BxqEz2GqCy0+7cxltR2MZ\ngLavaG+v0KmB2iU3FMVOZCDRK3a0qVUv2ANdtASlJdLQVrmysjC1iJYeQ943mUxYjaUZQGdnVPrX\nyG+z4FpiGVrEIdVQJuMXtj1x0No1mOGU/eCnlZT1ZRM86j6wPsJU27jsGeOAV6foE+xCbeodniUq\n1nOK322qGRuhboUA26a134TcswHZxcYRfmv7sGlnLG5rAcceQAR4HB43LrrjTCT88zZzgx2bHvdo\nSRw1UbeBrxSmND5YhhuXcDN0EqZ2oWWAr1YT24SU9tK7b7tb4iF36uCvd0pVNQHAtwFsBvAagG9B\nxV8cEoxDuyCG4i4ofenFZ41GTMfOd22x7rJ2F5LLeV2k1knbukIuvLofZHIkZESghQulTe4MqEsu\nL/dbUyKNPu5VI6VOed3RUTziPJPxfWhvj9pX9OK3KSmoouHvUOSsXQRkHpow6XTntk2tXquslGhz\nHb+iYdHcHHVt5tXWFiWmevu/apVvj/23Eb5MHGij3zUz5hi1zt3CLJQwMbT4reRNKVb33woCOmJa\n245CO5NUyo+VxFknV7Rjs5dO8kcBQM9xXP40bY/Twg53tMVStuirvFyIuXZLr6qKnmaohYRMRvrC\neBytqrVxEaEDwbj+qepbulTuaZhxJ89cXnqMxRKA6ou4YA+ysnPBNUT1qE59Yhj+PGD/O8E4bkf0\n3IzxOFROAGTRRlQSZiab0775tBVotZU+oIYqIhqV4tRWWvdtE9UBUV11yI7BtrVqyuq3NWKEgt/0\nOdwhY5veqmrEs4udSF4sH1VtbZTIkiixHZ2ILdRWXHrsUJ+s2kAn3IvzZAoZU7X3mE62qL19uKAq\nKqKw0oRjzpzoaXgWTjwYS48rri/6HdqRWlrcyAK3woKtU9+3OaHi5o5tNzREiXR1tcDVqiFtRlzA\nnwGv6y0piQ9etGdTEE90fzRhC/Vbq3qSybE5H9TXj80pZDTPqaqqwrWk36Nq0u7cSGvixhTXh2Qy\nekZNSKALMe049XOxNt9Br6rQeRtFM+K+44wj5E4bF/BkA/1s1Lf20LK2D62K0u1pFQJ1kqHEhXHu\nwLrkctEtsE7VTAS1J8BpwmHdgItdIZ22XcxatWXvjXZpZGZ/NLNg2ujRgsJIhDShKnb2RtyljY5a\noNDqhThDKF2UtdcQ54VG6dE8kLQAE4LRwXjCafWJhW3o7BQgynTGksBS2yhCu6HQzsC6wy9ZUvhu\nXP+K4UIy6esJqWA17to1UOzI29CZ7qGjcnUwrK3D2j0OFFe1eoztapyIW3cHElRZW+umAm8eLN0d\nSwAgS0kikRjPH4lEoh5A2QG8//aX5cslMG/5cgmm++hHfWDMvHkSsNPZKZ8M3mMw3003AU89JUE+\ngAQT8fPqq31gHyCBgN3dEvDE9gCpI5uVNlIpCXxaujS+v0cdFQ061GX6dAlGY+4dBkgdfbQEnF1x\nheS4qq31uYH27pVn0mkJlspmozmqmpslb5AOEAIk/xUD3srKfJvMY8TAvkxG2mCuoc98Rj4J40xG\nAtcYOMXcV8cf7/+/7jqpl31KJiUQs7/fv3f88QKXjg6BJfvb2ioXc1pVV0cDxzh2G0ynSybjc5Tt\n3Qv88IeSs6u1FZg/38OawaEMoDr3XOl3NiuBbSec4NssKZHguOee84GB/K+5WcbAQK4FC4CeHhkf\nS22tBJRx/MmkjL2tzedWam7288J8T2Vq+Tnng0SPP94HBN55p+AR84cx6K+5WYLUMhmfb0vPpe03\nIIF0DG6cNEnG0N4u7911l+Rh6uuT+hsbpe6VKwVPOc50WtqqrpY22tvlf7bDuWPAqnM+EFDnn/qn\nfwLOO0/6cO+9Aq9sFvjXf5U1SdxNp2UNZDISMJdOS8BkV1fhOkgm/Rriu7xXXy9zzODF/fvl3k03\n+XpSKenHxz8u//3d3/k5ra31QZ0cjw70031hwG1Tk+Dl3XdLLi/Sr5NPFngRHsmk3GdA5PHH+wBL\nDVNAcDuZBPr7sQ9QyeEOsBzAjuMiAM8B+F/56zkAFx5SOw5d4nYSYy3W6F3M7hF6lzrkUMBgnNsv\ni1aN6XMMtCeYPc+Dajp9uA0PidK7J6rU7HkSpaXR8wqsPl5LjtlstJ/a5ZjxBnSp1We2r1oVVU3Q\nWK6lO+1IYI/dZXoQ7UZKexLP1NbOEDyDQ3vA8H0d/GbVlnrcfI7SIs/roAsypXpr89Bws2e+OBdN\nra9VlXyOqlLWoZ0SeKyutovYOBhth9O7buKTTrmjz4/RXlhdXdGzNgCvsqMun/NHfNHnluijCLTt\nyI6N7Wn7Vjot2XatZM/s1jZQVuOZPi6aY9M7D855R0f03sKF0XNZbAyIbScU/6VphXadXbLE95v4\nzPNoVq3yTiidndGUNazX2kRtMCDV8TpFD+EZ8q7MZl0lsNYdJN09sIflaNeP5q9ZB9voH/MaYRxx\nhyoR0JpQx0V9hwi69oEu9q5+nwinj4mlh4cOxCFx1YTDGsx1cja95aaumnEZDQ3y7IwZfquqDaDa\nI0YTxrjAuJAHyYwZvi19Ol5nZ6EHEpG1vd3DzbqwEql1n3QUuXbb1YSY+vi2tkK1Dg/csQSnvNwT\nBo7BelrREMtFqz3RdPQviWUIjlqtUFLizxRnH264QWCnXXLZzxtuKGRUxEFrP+Nlo5K1/cK68urn\nNEHRz4c85fT47P+0e4X6Zg33eo445pDB2R6RykA26wSgxxdKAsk1R7holVacZ5U+60MT21Wr/Prm\nIVX60Co6kfCQLq2m0vhLAq9VXFbFbedMw5Hu03qcoaSXdq1p9/Y8HtTlj7N42xnHoXiNMI64WItV\nqzzC6P+KudHqZIY2cWGxwvfp1UVJQkvr1otL1x3aJVECHEvkc7ErpMNOJj3SF3u32PneRFI7JusD\nb7Oo2qujo7jNJJSGQj+fShXeD8WejHaNJTEk67PR0NqbLa6eYraEkHGbjhzForhZ52jZcuN038XO\n216yJErkQvabYm6i9nkbh6GZbFz9Fu5jhSnx2tohLJx4FKuGA3cJur8cp20vrv24uCHtKs4dh4Zd\nsYwLvOyYLO5wp8PxBhj0OxLHcVCVA9+AuO7+Xt1bAWAjVJp19d8yAC9CUqqfcUCMIy7WQqcK0Vty\nvVXm+yTyNihvtJ0GS5x6i8igzxy322atKtFGc7bPCFkSZOtBZKNsifzaYyiEzKF4hURCdheUSik1\nx3l5LFkSPewJKIzUJoHSEjXrtUFwDQ2ymPVRpJyrkDQWszAi8R+6TzofF6OYU6moqy2j2hlhbt0f\nGc2viVHI6yjkYMAU1y0t0fnijkinh7deRZZQaEMq4USYhTzcLAwpPGgJlTjCqHRA6gsRM73DSafD\nmWHLyuQepXOturJpbcaNK0xtU1bmPb904Fsq5aO99THAHCuzPOg4HQbLxc2RDmK0uMbjDHhEbUVF\n+Hx3LdBYpqznhGPv6vLru6EhqgYl3BhpT/dxvePnRdwh3oRUrup6W9Oqv0XG8V4A8wKM49rAs7MA\nPAmgAsCRkDM5SsfMOHTRbq1araEJso7p0ETeHuFqz94odpZ4qA9c+NbGousKJTbkrodtcAHr84ZD\nDEJfPG/aEgP9DCOULSMikdC6ZbswNBPS0bHW13zKlPDZBGSWHI9N1qeJHs9diEu4Fyep6gWjI4f1\nlj+0rdfEWV+hdNXWIyqUKyykPiwv9/NTVha1x9jgSV6cP5vCRadMCRExJiKkmkXPN++HdnU2pmS0\nU/aofg09Q3sOmXncDlMfvWrx2QYUWvWUzu+lI861veKGG8KeWIlEoTqMc6Mj8lmftp3Q7qRtjIBX\nIVsib12YbUS9xv/OzujxzXr82m1XMwvLKHQwcL7tbP7I7kOOceQZQmaMjOOAj40NMg7tLhsiCiTI\nNtutVmkRKazxWquyNLPRwX823kMnRQzlzdFMLGSEY5txW1erR9YHylgpsby8MPlcsQy2obiQEDFl\nqotQ/ywSc0EwWaBeIPb8ioNxtQ0RN9ajczDxqqjwthte2mjKfhHGZD7WOB0Xy2DzfVm7Q0hNpBNV\nxs2J3r3pSwdadnZ6SZbENC6WJhQPEnI1tUem6oR+dg7jGENoXhOJ0dUzOgeWTh0SgpO9NK6NprrU\njgChLMNANLjW4nY2WzhGzntcbIcWtnS8kR5jnMpYpzvR97XdK9DmIauqKsI4XoacCvgN5IMKAawE\ncIF67l8BnBdT5+UAHgXw6BFHHBFlHDqtOb18NIBXrZLnbCyFVmlp3agm5HbHYX/b7LramyWUOFEf\nBlMsD1boCkl+OtZAf9rnSSwoRfG/uKAjXjo3kY4o1nDkMzoKt7FRnueOwS6quAUfsuvE5Q8iQauo\nKEzkyOBBq/cNSfXNzT76X9fB3aC2efX0RBM/sh8aLppQ6My6vL9woeCoPYyJCz+kj9fEiqcSsn3t\nAMDcUrYf9tLMRqsFbeQ867C7Ec6xnROeRR53qBD7NRa7kmZs6bT0jSoejdvWcMx4Kk0HdJZajQes\nx559HsqITBjZnb4+edT2SXs/2iu07jRO2J1SKCrf4jMN7SH4v1MpR/6IjKMRcoBICYDPIB99fiCM\nQ18RGwclSs0MdEZVToBzHpGWLCmM4tZeD6HdBH/btCba5TKXi7ZLzyObQI9MJaT2smnVGxridab0\n2NHupNrDJ5TOI8RMdAAVkVmrR7TUR2M3XVCZWsHq/zWixxGtkFTN79Qna2JEgyb7qHcFmkiGGCwJ\nHt+hXpxnIOicPqWlQpipgqInF1VnbJ/jLkb8Qlln6d6rDe1aOmZKCj1OywT0nGq4p9OFO26tciKD\nXbgwmgWa82uzBFgCz6N5Q//peWxo8HYbpt6oq/Nzo7P5jnbpTAU2ZxftNVS9tbb6nXAq5dXVHGNJ\nSZQQxzmgWAM2E4LaHFy0t2Sz4q7M9afPR7GZjO0c6shxvW504kSd/oTrgB5dtEmGcNGoqd9VjCPu\nv7esqrIJArXx2+pE9QKhu6j2XNFqqpBbr1ZF6USGQHSXoidNH7Op1VO6TlusWiVO7202zEHGAAAg\nAElEQVRTbGtCrHMPWaI82sX2tDHSEo6x1hUiQHb3UFZWuD2PIxyh33EH88QRNA2nUP/ifo82rrj/\ntJdc3HyUl0cZgVVbhMYel7eIjCEuG4BO+UJipNvR7xXzyIo7eGms11hTtOixco7tmEkwbRoefU+n\nXg+NR0v/FMhCz1sc1cQ6JAglk/E7P9ouKRhRxZhOF6ag0eosCyPLMPQaNarV2cCedw3jADBJff97\n5DPsAjjaGMfXHZBx3J5voRmJ9Y6wjAWIP6iJ9ejEiNpeodVTcTsQvVXVRnndj1CciZUWicgWaXUa\nB41ImoDqxVnMXTSEgDqWRD9j027EEQHLYIjAcYxH+7Ibg15kjHYMVmI8ECLGduLyQ8VdIaIdSv0C\nRAPfQsxLt02BROcRs7E1tq2xXFayPdBLM/e4Ooox7tC5L6GLKeaz2TCuhtoYzSamz1uxDJxj0Tt2\nHRBr69IMYKyChb3IoEJzqJnbaHOlbSo6p5u+Mpno2SV4azuOtzVlSCKR+C6AxQAmJBKJVwAsB7A4\nkUjMBeAA9AG4AgCcc08nEonvQ04ZHAbQ7Zwbe0g8037o3488Imk/hoaiqUe2b5f72aykRLjnHuDs\ns/3n7bcD69fLeeD6POLZs+W6+mp/zvgLL0gYP1OY3Hyz1HHzzfL/7bdLHYmEpFnQ9dXVyTNLl8o5\nxfz81a8krcnKlXJu+ZYtkibgsMPkjGpA0i4kEpKC5PDDJb3Kv/yLjLW6Gli8GFi0SNKCpFKS0uCV\nV6SeI4+UFAbNzfJ7cFDOVS4r8+dNL14s9T/3nKRRWLVKUiBs3iznGtfUyHMNDQLHRAJYt05SITz1\nlNQ5Ywbw2muSguE3v5GxzZ4t6TwefNCnDkmlBBY8K/oPfxDUzmbl98aNwDHHSDqLmTOBDRvk3qxZ\ncob10JCM42//VtJPTJ4sda5d68/zHhqSFBK1tfLujBkyNkDGXV4uY9+/3/dn8mR5f9s2n0okm5W0\nHk1NwLPPyvfaWjmre/Nmn6KC6Tteekna3rgR+OUv5Xn+98wzksaipUX+Ly2VthobJX3ExRcDDz8s\nfdy9W9KMPPus9D2VkvlsbhZYchycP5Z0WmC3YYP8bmmRz+FhwcXBQZnP+no5533dOmDiRMF/4kZt\nrdT95pvSt5YW4Le/FXzcvNmfoQ1IXVdcAXz60/7c70xG4PX00zJn06bJ71tuAaZMEXwYHpZ329uB\nb35T4H3jjYL7VVWFZ8bX1QlM+vv9uOvqfJtlZTLubdukrU2bZBx9fXL2+m9+49OtPPuszFltrcDk\nyCPlucZGmZfXXkOwNDZK/4h/Q0M+VcrwsOC3xr3jj5f+Pf64X6fV1dJWVZWc615aKu+tWSPj6e+X\ntVBeLn1MpQQ2nLfGRhnPypWSymXXLoFpeblfO4BPs8M5GhzEXmBPeGBjKG/3juPtviJeVSEvJ6sH\ntbuEUNFqJqqhiiUkjHu/u7swMJH1USpgVLBOGaD7pmM+7NGiWlqxUktra6HBurOzMAUJ69XvWmN+\nnGeJlrb0bkSfv8G+aKOy7oOW1mzAlR4r+2CP6LRSFcfJ9rQaj3DVB9owbgSIOglo9Sav0C5MS7n6\nmE+7q9Up//lunKRq9dshyVfXx/lbuDA6V4RPnHFa26u6u71NQ8cxdHd7VY2OMOd82fUVwkWtbuEc\nWCO2hSHHoQ3QOliQ9jUNf9qh7FkYVn2jd/K2Pb3O7FkbcVexHcGcOVFthk0nor31WlsLdgUjGSM4\n3pDbvYU3x9fYGN1JmTQ371jKkUPxijAOe4KevhdK7REX1GfrsWoweyZHsVxUocBEy5g0o7GeWd3d\nUVdXeooVy/6pYwZ0wJQ2KHK7romRjm2wnl8MoIzLjhuKZtZ5gmyKecvU6eNvF3Gxs8k1AW5ri6bF\n0OPSx6XSCUIboLVHliYe2WwUzlr3bQ3xqZQ3jGrHC6YSscx6NMITyiCrPXIs0daBfXEHLtmLhwPp\nY3p5HxCYdndH3Vi7uny9zLXU3l54jG8qJb9DB2fFHZBFWJLQa4GD64n/8VkbQKivuCzKnCcbM5HN\nRpmMjmGyKr44ZmHdk+nJGVIlE4e1zZOxGDYmJ3QAloaV/m1tMuXlwfODADz634yDRJZSkya8lonY\n8zH0+6FzMzipNomZ3lHE2T1C0ew6CaJlLrp/rNsGHmlvF7qZakTVi0G7AOrEflaK0/Eg2tPLBjGu\nWhU2tuvgNfaBuykSJ/abhJ0Rv1wwfJYR5Zrw8UAcfSCRzt+kF0qcO6xO8sf+aUKuiRBjO7QXk3YS\nsCcG2jnSUbw6VYVO8VJSIs/YOJJkUmDT1hYNItTxJYQhXapDu4rKykLCQs8b4oLdAesANrbLuV24\nMAo/1t3c7IkspXXr2acP1LKJKC38m5t9v/Sa6OkpZBQa7/iu9qzkf3yPh5dpnKHLbihXmD55Uduc\n9HrSwgPr1qf3aeFLp93hOtN4rJmv3oGEdvTsVyh/mJ1z7sIpzN1wwztz5vihehXkqiLC6rO5bS4o\nfY8lJOnzHg/pYQyIVi0xElYjKyV9fcB8XD+0usyq0Ei4rXpKI5EmGtpdV/vHqwPqC7a1cXmqNFMk\nIurobx3opaOFNVGywW6htvSzVhK1Ucu6bbpD8t6MGVH/dU08LVHWBIT9tn7xIe+XkK99VZXMf09P\nFM4Wxlr9Y+HA8XCc1tuM3k42zkD3m1ecQ0Do0KzycslACzg3fryHsU4EaeGpCSqJoI3vYEoMfZKf\nhYfeNZeURBNK6ojvUCbdYvhbUSH3GNirmZSex9BujLBkUkX9X+jIXsYFaYGGjFyf9EeXWRv7xPPo\ntSChx6Xdt9NpHwvFcfLEQeJJIhFlxKHYGo6houLQdsd9xxiHJbJagrLpuanrDzEOjSBWH57NFrrm\n6k8gqhPVjIQSk0XmYoxD39MEORTRrQmNPSHOLhKdAyiUfllLQq2tUSmJbeotM4Mtdd0hAsaFxkVl\nUy2EpNnQOEOXJvQ63iR0kJH+TQHDEoqQ106cizCZEPvM/FdlZYW67NHUR6GrWIyEPZFwxoxoJlib\nVflAkmXqKPWQvSTO226sEf9jcee23oOh/FFdXYXjoot7SPCy9RJntZegzr5Moq+ftXawOE+/YpcW\nFPTOWaeNz2S8MKLtlrzivOq0VkHfUzFqc4Gh/2YcWn9n3V4tgQ8ZuEnMiWQ6toIEVKusyIj0zkPn\nxqLqRbcZUmvZcwSsHUAzH04+EcEm2dPufTpCWyNQNhsNWtOLqr3dI7PurzZkayMdk77x3VAwn+6f\nNe7qZG52bNqfnmdAcGxMgGiJUCjhXF2dD1ZMpyWYT6vBODa6P2p7kF6YTNBniU3o0rYitqHVP3Fq\nB8ATwMbGQqMyYZvJ+CNLdVp9IJzqgp/aIcASHbubssZr1stdmt7ptrXFM1W2UVoq7dbV+bnTfbdJ\nD0NjsapGwlYHbJaUyHdtX9P9KCmR/mpBQeOrzulkDe92bNrdfunS6Fj1GPQuTR+vrJ/Tji+h0xQZ\nH0Y6RdhrVV1dneAl51K/X1IifVRC8H8bx+MOK9IJ0fTOwxJo56LqIxtTEUfYrXpLe4uUl3sVRhxT\nYP3a+K69j7T6jQuqocEjHqX0kPqDEhPfoSqgstK5mhq5z08itUZwnofNRcFxaYP63Llh9RAJQV2d\nj87VBDCR8H3T22wuNLujYnS1Vl+QUOntuZb64ozqmigwIrirK6oyo1BA4ki9uQ6uXLrU90e3FUq8\naOdHR2+3tRXuljRh0JHN+oyR0XYuZWUyP3pc7e1RVR3PuOAYyZBTKU9k9M6Lhl6rUrR9SSZlbI2N\nhQ4VoUtnSuZOjXhkiTttR2yDuK7r0zsE6yjB7ABLloTVkXHBkpyPujqfzYC7HZsZWr/DMbGvOurb\nMsbKyigz1+tx1aoozeDOQQtyWiORyci5NPqcdsPQDukkh+8I47Dpy/XuwzIUyyS4+9BGXG2YtjsF\nzSxC6i172UBCqqH0yWk6GInMQyNHsVw/elFUVkalp4MNTLKXrkfvyrSHSbFcSKGrtNQvttEC9TTx\nCy24t3JZZ4PKyvDZ2RYOegcad9mdx1iC7kabsxARGK2uULujuXXGqUBC6WSKMTGdKuNAL45Rqz9t\nHjI9b6Gx653WgVwlJc5Nnep/k4lZXNU2CnsV87wq1nZINcuklZwD7vSt0HQA13/bOGyeKJ2ZVm9Z\ndaryuB2HTWdukxuGdhk2QhzwW89QAkTnou9YAy2fYb9DiFQsqlXXRQKgt8djjai2emPtYZLNxhMX\neh2NBZmtJ5YmGHaBjkaoD+QKpYSPu7RKT2eu1e66FhZUmcTVWWwOip3hEHovJFgkk4V90gzXumLr\nWIEDTSXT1jZ6uhU733FzMRqeHEjKHK0eDeXZGg03QulsNPx1Knn7vo0J0nZCenHF5Tbj//Z+XAyX\n/p/9HA2m1dX/veNwzhUSepvOXLvEhc4ft++wWNuJda21TIpeICRycUkStVurNdCSKXV0+MWuDxPS\nW3Ser2DTVnCbrM+gCJ0+aD11tNsk1TV60ZHRUfrROtW4hT0Wg+ycOVGjPS+ORauOGhujtgBdjyaO\n1h21pMS3Qck5mZQtfTYrqh3dpk74qKVmLcXqfF6ZjJeItYqnWCptHj6kVUb2ICOqW0IOA5xjnbjR\npjnnHGtvNO3GSQHH2tPsvFVVhc9WD6XfsLsFPqeTNeq510KXzu3EueJ7WmVHJ4TKSnlH44/+1HEa\n1qBvia8l5tbZY8YMaVd7TTU2ehuaVck2NkYFTp7dEQc33qc7tu6LZgpMvKnfD6kM2R89l3k15VTg\nzf97GccxxxTGX9iAPTIGfdZxnFcVPYos09BJCnWJ2+3QeysuZiQUBaxPCbSusHGSlpVCQpIUCR2Z\njJVGuCiy2WjgUVygliaY7CMXtlUlaP31WFVu9komwxKYtuOQcC9cWOgUEKfa4n3ttRIan13k/N/u\n6nSb+uK9uCR4IYLFS9dN46eGWch2ogleyGhtvczo6qmIiuvsDM/JnDlR4SRuzka77Kl7FsdtPFLc\nNZZjgbUtsL1dxqbnSc9FyHMr1J5tVyfqDM0h29Mqbp3nLHSscrGcXxqnLU6EMipwXGrO/u9WVU2c\n6EYWPQmuDdhzrjB9hs1Iq4lkZ2eh2y2lBJ4YSAZg29M7l5DLre4LF6rNwKu9ruilpaObNSEqtnhp\nAA4RIV7a0G4XA9UZxYgcFwwJVLHdhd0N8IwHSmvsD+dILxydSkJfcT7+WrrUKctDOvKGBs9Y9Xv6\noqumdW8NBVvpxUypkzDSDgX2CqlGNIztpeden6lhjbp6R8o2bL8tbG0AXYiQ64OFmDZdtz2W7Lo2\ngNVedE3lnDQ0SHs6JbwVDEJ4opllKI2ITpOjhRG9m7MpTDSu2iskwDGbgg74C+GRjpHS9eh1GKdq\n0/FjXIsan5UgcUhnx33bGccxxxS621Lyt1HgDKjRqhu9I7FHieodgDaE29QhoZ2Cdd/VRR8AlMkU\nngiof1uCWFZW3H+8rMwvqra2Qj92u6jsqWJalWNtCnV1USlfL2ydhkIvAuvVoheeJXo2sM1Kf5Yp\nMZCMni1c9Frqsm7AVhrTbero3cbGaAoMEjd6YlHa533Cr6qquBRs809Z+NqxZbOFrrfsF3HI5uMK\nwcn+1m6n+mKd1LNTEl640KdqsfOns85aPNO2Lur7tcSvPaBCz3d0ROOv7O4kzpGB53MTjpwzfb4F\n+82Ibx3tzme0JsLmmSKcdNyMVi8yC4LubyYT3dkmEj74MnSkgFaXsl7LZPipA3K5/rXnlc54gEOY\ncUBO+HsN0bTq9QDuB/BC/nO8+m8ZgBcBPA/gjDExDhrHbWoPe0/HVHR1FRrMNcGnQV3bKUK7Cxth\nrt1r6e9OdZV2x+VC4QLp7IweY0vGoXdB9kAnK8FpCUTvIoCo/YAumCGipo16jHYNtZFOe28wvfXW\nMNN+/9Sfk/BNnRqND9CEyI5L5xQiQaBLo2b0JFpxaikduGgJu96JhSQ5Sp7aeUGnT+nqEjtJCJ5j\nucc4Fe25o91ebSwFCSIPSOJBVCQgOpBNzynhF+cx1tFReB63PU2O869xasYMgQcJcSjFvE62Z3G7\nGGPX42YMj52b0HwDfmdBQZJzRmYSZx+jO/eMGdH+2N2FDvAM4ZM+vM2mcQkxbc5naCycBxvYGVJx\ncZcZd05L/jpkVVUA3gtgnmEcXwBwXf77dQA+n/8+C9HzOHpxIOdx2KKRRCOR3kHouIBQjqjQziW0\n87BJDUNnnmtpTBMue1QtUOjCW+wq5p0TWoSjecBYRC22KMkANQOhVEak1Xr4UJ0h3b4+1lPPoU2X\nYess5ilTVTX6CX2j1cFLR6YTFnF1x9mmtOFWp1Lh/1rAsFex433HcjEimmMgLDOZ6FkbxdyeNd6l\nUqMfnsVLZyWgtK5zdYVwNnQWCy+bmNLCMpUqzCZxsN5uY7lCO1g6zNiUI2PFlRBeWsP/aA4ogbqP\nBYYPScaRZwgZwzieR/4wJwCTADyf/37wJwCGdhzae8Kmu9bGc/0Z8oCyTIHqA73zCEWHc1tuD27q\n6vL5ZXTqDapAbDJERh2TWGhVUnNzWMdLT6exxFaE3At5ZbOFxlV9CBT/a2/3hLNYfS0t0T7qRVRW\nJlK7JQRaFcHnQ0bUuMXGgC39bLET/qZMia+rGBzjiJtWATIA0xIWbR/SyRPHGq9in7O2rKqqKLHU\n6hKd6tteOtg0dDH40jIynYiQ8KRtgv0NMdoQTvB7nHt3nFrO4n5FhWdQOgCzmMOChm1ZmeT1svDQ\nnnC6Ta1aCp1GaOfNqoxtf+jNZdfXnDmyRjQcyspkXMVccisrXV2e9r5bGMc29T3B33grZ47bMy+0\nSkmnENGxFFaNFZdmXau0QunQbdS3No5rNZiuO5TbKnR8rA481DsWMpmxnBUQQljA5Uqmue7KW1yu\nZJonYPokwYoKLxnqMwu0IbOxMRrHMtplc2lls1F1k46U57Oa6R9InqW4q6LCb+9pFF+40MMjnf/P\nunYWuSKwtPEbNg2KtrFRmAgZYPUVOmlRExC9e6BEr+dEt6N3gPrMi9DYip2oqAUKnaARcLlTL3fd\nk+4UeBAWcSlJQpc+5dKm2tHOC1wHOoGnzpbAz9DutNhVWSlzWvM1lzvhgui6s/OqtQe6fo43/xnB\nEcIszpah29HzGqeBiOuDnTONrzN/4oDKQzflSDHGkf+99UAZB4DLATwK4NEjjjgiuOPIPfCS6z5+\ntcvd/uvCew+8VMgg4ordcdjAwZCdQ78TVyd3LTaflS75unMPvOS6//Jl11PbKcj3gWuca20VBEjd\n6nIl01yuZfHIYs1VzPJEML/IcpNOct3VX3W5SSc5t3Ch66q6w6HOua7pv3C5238tcPnANS5XMs11\npb/tuqrucLkLV0g3HnjJdR3+Y7n3gWtcbsppI4sgd+EK1/2XL7tc+Ux5t+oO11XzXWmHiFrVKs8f\ne57LzTjLdbc/6nqmXSZjuvFR1z35OzJPuZzUN+c+WWD19S73hX8b6dsI0qdudT1lp7juypXyWf1V\n6QvbSc9xuUUXuq7sL2R8p14eOYozN+U06Wfjj6Sdri7XOeU/Heqc6zzi19LGhFVS56STXHflLdJO\n+n/L83lddK5kmutOf8V1Vv1Q3k3f44lDNuvhNP44lzvhAmlz1kOCf8TJC1fIM7XH+nmadKfLLbpw\nhODnTrhgpA9dVXe4rtS3Rtpw2ewIUeopO8V1z/yJy73oXO4D10i9U06TOkqmue7jV7uev/iy71PJ\ntBHY52qPlfvJGS5XdYx8P+ECwavKW1yueu6I3SX3gWtc98yfSHu1X3O5Uy+Xuibd6XIfuMZ1nd8v\nuFV1h3+/ZJrL1R4r/a+6Q+rLu2nnFl3ouqf90OVaFgvTef8zvn9z7nO5GWcJbGac5brO73dd5/cL\nbubXYO7Uy/34ybA0cwEElz7+VdfV+CPXdWSPy006yc8zx8k5b1nsstXPyhhmPeTn8dTLI7hPoSY3\n5TS/9kqmue7Dbne5L/yb4Pq0H8q6SH1L6pt4t8Avj+Mja7XyaN+fB17y8zrth66n7i/8us9kpL7x\ndwkcS6Y5t2SJn78TLpD+ZO6SuddCUcviyNhQMvXQjeN4R1RVgdJ9rXOoc651kXM9v5DfnX8l97ou\nC9Pzt1RGOxzqLbzbdZn0OzNzj/T//H7nenpc1+H3yu+5a1z3ZVsd6pzrPn616zxlsxCyjh0jdY/8\nf620136UIE97p4dV92VbXffxqwWp2I7z/7POkbrm3DdCJLIzd7quxh+NPNd92dYRJtvV8Qd5Zvae\nkbG0Lop+1h/p54njcLncyPPZ2XtGFhzqnMtOeElg0vy69OvoJ11r6/aRtvleZL4Ji798eeS/1j97\nxeUeeMk1z5TfzdP3jMCg+/jVI8+2trwced7lciPPNTe+IZ9H7hp5NvfASx5Ol20dgdPIHORLZ8cO\n6f+JL7vcjLP8GNQzrKe1qXekjvrDdrieOzdIPwiTwzeOjHeEeJ/fH+lr6/zd8v7kPa7z6CdH2hoZ\nZ+v2kXe7L9vq256/e2R+RuakZZOf17lrRupq75T/2496dqTe7jn3ReDefdlWP75rPVxCuNh92dYI\nDhNvci9G4VM/cbvHnZ4eL9TkmYzG467z+z1MFvkx6TlHnXMdHTsisByBR8vLI0KpXg+ci9ZFit7M\nXeO63r/R13Gtxz2uje73P+PX82UuOGet83eLEJeHdQjXWufvjtK5XM51zXqooP/vRsZxozGOfyH/\n/WhjHF/3VozjuRedy87PI7j5fFsYx9tYiNTNM/LIfL6ML9Ma/d19bfR+ptXX0fMLz0Sdiz6j/8u9\n6FzzLDfCVJyTe+lm/7xuq+P8KIHuukwWTNdlfmHrZ7Lz5T8SoZ5fOFd7hP9P163nkISF/y/piMJE\n19F1WWG/bL1dlzmXme3/b5wu3xunFz7LfpLJkaizHg0b/YyG6wjznx2FjZ4HLezwf7ZBmHVdFh6r\nHrP+TsFAw7v+SP++Hif73nVZeA70+tEws+/oMVlYdpwv9RAPOUYLb/udz7V3OpecEF3Huu8UQJyL\nMiQ+Z9cMx9RxfrRd/RznjnNA+Ol6Q3CMW5+63fbOQhwhfhDOq74VbZPPlU+Mrgu2a+kccYH94Bwg\nUXfQNo4SvI0lkUh8F8B/AjgqkUi8kkgkLgXwOQCnJRKJFwCcmv8N59zTAL4P4BkAPwfQ7Zzbd7Bt\nT58KLJgn34+aDnR/GFj290DrLODiv3oro3pnywu98tn1QWDeXPn+/AvA9Z8H+jaE3zkyE/18oRf4\n6MeBtc8At3+38Jnbv+v/mz4VgPPtAFFY8j0A6Fvvn8lOFbjW1crvb98FXPuP0u7ALrlXnQZy+fHc\n81Pg7DPlc+ZRcq/58Og4bv6qPJ+dKuO/+iP+v0Qi3+50+b9/O1BbI3P97bt8nzrfByz/hMDr1q/L\nJyD9nJIfy/YdQFmpfC8rlfGuvNH34eqPAKctAX5wh/RjW7+fl6eeBnYOABUVwBWXALOPlmfOPhNY\n+imB6z0/lT50fRAoT0r/bv5q4Tycfabg5zVXyv8v9Mrnt++Sdtjv6z4mYx0YkP/u/bncv+ryQvxe\n95KM+56fyjju+Snw8auA+vGyHq7+iO8Lx7d9h4yRc3/UdPk+MCD/X/xX0p/Nr8v9dErGt/JGeZ5j\namqSus8+07dxxGSZ03t+6vESiMKbz/et9+MgLF7ZCAwN+3ljH2/6tMBky1bB4xd6ZQwWb3SJjG1X\n9L/ycvmsqZbxEq63f1fayE4trLdvvfTxpk9Luyw11eH2AeCxJzyOsJ1TTwbOvUjmtq4WeOCX0TaX\nf0Ke2zskn8fNleevuTJK5049Obr+OGbOARJ14+N7VryUHeyLYynOuTgSfUrM858B8Jk/VvvLPyGA\nv/ojAjASz3t+KoTg3VBIOLo/LOPpWy9jWDBPECnXK4h581eFQADA4U3RTxJgXdKp6GekOPNp6mRb\nv/qN1Ns6S4gO72enyrPP5YAf/8z/XnySIO22fv/+2mf8/7puwC9Mzh8gc6jbOLxJrlyvMI+aaoHV\ntn65V1ERhqmuo6ZamPLGn3vmrJ8DPFGsq5V7ZJBrnxEivGUr8J1/k9/dHxYcW/uMwIb9r6v18OLY\nNFz5zhe/Ip8cO+FERtQ6S8bav13GsGCeZwDEbxKr1qOBs87w/9/6dXl/y1bg0Sfk0mOsq/WMt65W\n7rGujZvkHvuZTgnTXPIePz96TFu2ROfZzqmFr8artc9E3wPkv44/F4bFuWa556dyT88d1w37dvNX\nvbDFd/n5ykbgF7/y73Oeln8iCleWBfOi9dpx1tVKW7oOjpX1E2+ID2yH88/7FHZ0m1xvV39EmAbf\ne+phT+dYj8ZzPQe3/vPmzTjI8rYyjj910ZIjIJLMr34jn++WYomnRpi+9UJMLv4rIHOEf+7h3wI/\nuQ/4wyaRvK7+iJcgl39CPimFDOwSSfWpp72k2tIiRKKlRX6/0CvSHXcVbOvsM/0ORveVOwl+HjdX\nkPiqy4Vh3/8g8MjjwIQGL8FSItZ1TZ/qF5T+rduwUh/rOmamMI21z8j7WohgYb847r710scQ7Ivd\ns+Plf4S5fu5XvxGJlARA96tvvfx/zZVC0O2c63YId47X9q1vvcwpYa7b1/WHxrX+FdlJcp0QZ+y4\n9byy6B0y+6bhotekXY92bln/2WcKzhGed32zcO7PPhP49/sAJArXg+67XQenngz87AHgki7glT+E\n5y405xxrMZwcrQ4LFzs/3HlxJ6/f1XC86dNCB276dBiOtl9899Z/HtyDgy0Hq+M6VK7YAMBAsTrP\nd3uJGw91ncXGOmIMnllYj9Zp63ZC9qGxwNQ+o+sbbS603ncshXVTJ1x/pNeRF+vX24EbceOOa+Ng\n+hD3Tuj+WOt/K88dyBgOFB7Fnj8QnNJFO2mMtW9/bDjGPf920ysAj7qDpLulK+9K8ZAAACAASURB\nVFasOGimcyiU2267bcXll18++oMQTrt/v3Dehvq3uWPvQHntdeCh/wQu+R/A3Nn+fuU4YPWvgRMX\nAp/+pIz1hV5g+Q0Cg4Z64As3y26jqhL42peicBkYAH79W+Dv/xY46QR5596fAdu2iyT3V+f5tixM\nbTuhZ5JJ4KHfAHV1om/fv7/wHZa775Vdw+yjgXM6wnBgm8kksOZRefYvzgL+6yngpv8FnHJyuF8a\nfh/8gKgNdu8RFQGfCb0X1/5rrwMX/S0wbQow9cjCcYfw7/4Hgb+4QN7581Pk/7PPBP7lK75N24cX\neoGrrxPY/OU5ooK4+iPS/9B9Ow+2fjuW+/5DYPiJj8XD4f4HgZW3AXNagWSZ2K/+Jb87sO3GlWQS\neGIt8HdXCLx0G1u2Anf9CIADrumOwvO4uVE4c2xbthb2O26uOI7KcYIDKz4RXUNxa0vDcbQx6jpS\nqdHxiGMgDrYdV7ydseBm6Dn+fuQ3X9iyYsUn/3f8m0XKwXKcQ+Waf8wxznVdKVfPaue6l8ln15XO\ndVwkn7lel3vRuRWX9rr+zvx99d9IyfXK+/reWP4b7bmxvDvaMznp+xNTL3L9Z8tYv9O0zE2r7nVn\nHds7Mu6tFyxzpx3d66ZV97r7Zi0bgcmKS3sj0gu9kZpnFLZz58Rl7pTUanfnRA/Lh6Zd6U5JrXYP\nTTWwXvVd51oXy2egHeeiHiUvPdDrfjzhSndHUupbM1Pqnlbd6675UG/BXG745mq3eu4y99IDvR5O\nem57Vrv7ZgkcuFMq2EHket3qufLMikt9GxdPW+1uKV/mLp7mYTStWp5l+18+T55ZcWmvnyOOPY9X\n2qNHS6/WK4hzuGbmlTKenMDih2UXuXsnXFkwVysu7S34vfWCZe6sY3uj0nW+X59932p3R1JgG+lv\nTsEuP6Y7ktIPl+v1MO26MgoDhYsWnt9pEhz5SqV8EhfZZ77Ldbf1ArUu8+uu+1rnplX3Sj86LnJr\nZl45Ugfny3qZse1p1b0C51wU91dc2hvdYRoYsE3O8UNTBV7XfCi67loXyXPfaVrmYaTmfGRMBrYa\nZmcd2+vuSF7p7p1wZRSPQriRL12XuYK1+9IDAquXHigcywieWBqo+nzNh3qVe66CbUnmNXeQdPdP\nTvjf6jW/tsE5NMmVbJHPkkn+Hpqca13sVlwqkxi5jybnKjPONc91bk67f78269zSFc6lpzrXMNO5\nxmOcq542UpddbBHi2bo4+pxzMpFo8hNqmJpgzJW+7Z7VUWzK9Uofdb/Lpa8vjVvk/nBq/t2KI5xD\nk3u2ZJHrTSzw9aHJbb1gmUfUXK97uv5MtxuT3Qt1p0T70n6ec2hy2zEl8r5Dk9tb0uLbL5ssn6XN\nkc8dp1xUsKhWXNrrbilf5m4/47tuV3LaSB3DOHzk+69LznLbK2YUzuWEWfI5N99P1R+HJueyi5xD\nk3uyZLH71GJhRDvbznV7jlzkvnzeahlv50UCl5bz3J7JC0be3Vk2LfK5P3G421j/nkj7+8oFpruz\ni/0ccY67hJH3Jha4/xh/kfv5J1a7H0+4UuajRxgemaNbcm6k76vnLhvp18jV2CpwntDqNo1f5AZn\nniL9O+E8t+LSXrfnSBnry4m5rrdikfvs+/Ljy8/ZjvKjRuraM3mBc5n8WDMLpP2UzOm+MjWPrWpc\n+bY2jV/kYZsn9jtOucg9MfUit6P9ohGY/z4pcNhRIvAbLJvi4ZtfD5vGK1zMv0fYbb1gmcBGwWDT\n+EVuT7M8v7F2kTvtaCF6t5/xXVmPFX/mHJrcL8ZfJG63XFv5+VqfONY9MfUi59rPlXFz3XRdOUKw\nV8/Nv1Pv8e3RIy7y67NntRtoXDAyLtd5kX+28yL3Ws0Cvz4yC6Qt0ofMghECPtDocS2CR5Zpdl7k\n3MIznaue5tbVn+K2YVoEH7ZVtbofll3kXq9Z4PvTdaXr7xSGtDu72MMwTwP0vG6sWeR+WHaRe2ja\nlSP3Xpmw2I3DuIN2x00450bflhzC5biKlHt0b82oz+1LpbFrXxLVu7cefGPlSfGBy04BZkwDftwj\n9yvKgT17gfo6YMs2/9k6A/jBv8oz198EbN8JPP8ikFvn6+QzS68H7s3XV18H/Pbf5fvNXweefQH4\nxa9lHAASZWUoGR72daSq5HNgF1xJCRL790f7XVEOfPXzwF+fD7ywDnj/hdE+sHSdA/zkfqBf/Bxd\nogQJZ+pKJCQePK5kWoDqFLD2OaD7EuDqD2Pv6ReivG8d9ieTKBkakroh+WZY9iGBUjjRewwNF9ar\n29XfO07HnufXoyL3HPZObkH5K8pHubYGeORn8eMFsHNcHdK7t4XHUlIiugJ+dp0jVsqzzwDuuQ9Y\nv9HPGeDnnXDo24DhRBnKnB+PA7Bu0nyU3vllZD58frxPtR5nqgr4678Ebl0V/b9yHNA4EXjlVWB4\nuACmRUuqSsa1Y6f09Y0t4iKVTAL5OQIANE8CNr5a+H59HV7/myuRumUlqvZsj/5HeHG9ADI3048A\nHnxY2jtxAfDtu4GO04Ff/afgXHVa+qPKwAknYdsLb6BpYD1KB5VfaXOTjGF8HfBfa0faiS2ZFuC2\nG4Hbvx9Zh8TJfakUSgcGZG0DHl8SCdHnDuyS8SSThb67oaLHUpIA9jtPJ7rOwcC6zahYswZl+wO4\nPlrJ4xYAD+PmJuC1N2Tt8F77icDDj0ibLB2nj4x9Gl7f8qIbbjjwDuD/B4yjZrx7dMe4t15RdQrY\nMVB4v6QEGF8riNA4EVjzuNwvKwOGh71PYn0dcNNy4IGHogyidQZw0wrgo//gkZHEkcjVfYm4YHz7\nbj/p3ZfIs7euChNTS8DZD8AjajolzwzskgXx/MPSD02AUlXAnj3A8D5BtIFB4Hf/FWYOozENQBjq\ncy9Kez+5E7j2es9gdQmNqTwJzJwOPPmMbysBobZTWoD9AN7c6hdkdRo4bo6MMwHgFw8LDEpLgX35\nEKCGemBcOfDqa0LMWEpKpA91NcCeIe8C09IEbNgk3xsnALv3ip9nfR3wva8Cp53s6+i82DOOqkpg\n16CMe/LhwC//0/fBlspxMtayMhlPqJQnpc5t26Ufp75XxvDbx6QdW0isx1o08QGAtnnA756QOjTe\nEM+TZcBhE8TdjnPX3CS/2d99+4B9MYJG5Tjp48AuaXv5NcAXvwZMbJB5q05LHYQH55Btkegmy4Bx\n44CqccDmN3w7ZWWA2y/tl5UKPqeqZH7Zx9YZItAAguuPPCm4xDGyNDcBm16Pzp9lqIDQg8pxIqA8\n/Xy0Dl0axsu17CqhD488ERVkKscBg7uLz5fG6TlHy9wND3smptc/8YXtJMskMGVgF1BbDcyfDfzi\nYczGG7ufckOVxRsOl7c1APAdKSWlB/deXY2PIisrA6qqCp9Jp2QhHd4I9HwPSCkYayRpPwl4X7tI\nZ089I8RkwVwhImufAy7/uEeUynGeYO7bJ++s3whc/CFhFj+5c0RSx9lnCMGyBLaszBPwinJZdJ+8\nWhAeEKaRTAoikZg0T5LdxrZ+WbgsuwZlkQEiua55PJ451BaJZCIse/vk86hpwPQpwFPPFj6bqvJj\nKlMe4XuHgN6X5btzQmjYlT1DslgaxsuiJ9N98GGB9y9/m4/EKwc+/QkZPwC8uUUIx/ha305FucBl\nz14hPtpvctMbQNt8qX/zG8J0ypOyk/joPwAXdAscAaAm7d87bILgAQCsezmeaQBCJPp3CJHUgTTJ\npMxhpkVg8Z42IXbHzxOh4onfe6ZRViqwYDmi2c8/S1nM2si0AK0zRfpsP0nwdMMf/C5h54AQmZIS\nwfPypMxXqkqe5dyRIAMyT5ZpAAJrjplErm8DcMMtsjYee0ru7dgZZaL79skcsK26vFaheZI8SyGv\nMi80Dg+L/3XjRI9T7ScBq74kfe48XQQ44v669V4AsQR/4yagaWL03mFGME9VSdt9G2S+WUdVpRD2\nhNr7vblV6MFfny+/c+sELnykvi5ad+U4r0XQ8GDp7RNhZlyFzBGQ3y3mx735DRnbUdOEqQ3lGUxF\nueDdY08B2SkoBw5a4n73x3GUBngfpZNiZeeAJ5DDwz4MVZcFc4E33hQEX/C+KIIRYXcOyET0b/cc\nvjoN/GGzXyhaUkkkBJE3bpL/dw2KKqBvvaispk/xUu3NX/eqD77rXLQfZWWCJJ9bKYik4TIEL8nk\nemUM/dvz6qQ84XVO6l1yInDdR4XJ9W2QBTB7pkheGzfJGLbnF1o6BRwzA9j0GjDlCGBSo/y+8StA\ny+GyY1j7LHD/L6Pw5O5ht3Ifr62WhdUwXvpEKdiqLThm/s//KC1yV7BlmzDA274AfOz/ETewwUE/\nF+VJwY1UVVjlMDQsqkHWT6m2olzmNrcuHxn3WWH2Dz8i/e/bAGztl35UVXoJMU6aTCSAObOAlmaB\nVd8G4MgWqT87RdRiy5cKPrywzu9gKYEP7xOYzZ4l+LPsKuD6L0bxQuNJIiHPNowHHntSmG3XOYJ3\nuXWegR3bKhF2G1/1RPvE4/066Donj0/ronUPDXmYcsylpX6NlZZEGctR03w9dvdJ2Fnmm50ijKNv\ng6wbMgHixK5BeYdrf+2zfqd/2slynbggH5k3M/quLrXVErRExlgZoK91NYW4yD5s2Cjj1rugVzeL\n0PHwI/Ic+6iFOCC6cwjhTnVaBITnXvSCYapK2tIRkE/lcYqqqRnTgBOOA1b8szCP95+GN3NrXi8c\n2NjKu3/HcXijIPOMaf4euXB1ICy6cpwAflhvQ8tksduSTsliq07LpGhCM7hbpBtA2s5O8f9TEibi\nDQ8DC+d5KSK3TrbogJcw1z4ndhBdzj7DjwXwUjjgJRq22b/d54BIlnni3DjBM6r+7UI4+zb4LXqq\nSuqdOb1wJ3LYBIEhJdf9+0WC2TkgfoN9GwRGdbXAuWcBbz4roeGA/Ld0hXxyp+Ig/SdBSJZ5Ztcw\n3i/mVFVUmgeiO5PBvNRdnRbY1tbIAvneV2W3dvYZohJ4/2nAXbeJio7hszOnCzzqAnaxqkppe1y5\nv9fcJM+vuFbgk2mR+gHRlxOW5UkgM9nDjhJrTX6hW+LjnOyu7u0R+0LbfJmfZNIzp+l5ffv0KZ7Q\nDg1LH2pr5Lk33pRd6qNPeUKWLAOm/ZmXQMuT0t7goOxa+ncIk92+U/COu4zWGTLnfRukHe7aEhAJ\nP9MiAgLni4XSOGFKCVoT8X37BQ/aT5L1etNyEcw4JtosAOC9+Z2WVsltfiPPuHv9uhvK70Kr87iS\nqgLmHuPnqW+DVxeffYYwEe7sb1ouWgTaNDItIjxlpwB3/b9RY9Hgblk/XHvciQGyvrj7AKQv06fk\nVZaTPLNct152jX0b8gwwvzvc/Lrca26SXdEE5VfbNr9w17hjp+wOdZnY4HPusLTOlHHWpAUGLc3A\n75+T+aitAS7+EPqwbz0Osrz7dxx79goXf/kVf49cOmSzIBIAMiklpcDevZ7LA4IIp5wkSPDtu6NE\nCxBiOaEeuOJCYNX3/OQDfleQafH6+M1viM5x1y5ZqPV18u5nvyzvTDkiLPncc19Ub00pnPrskJ2g\ntCR6742twiTZr6pKyQnx+pvCdCZPEsPrcbP9jgSQRU6ppWG8h+nMaSLVv5aXxH/ygPTlm/8HSFeJ\nTh4QtcEfNgssZ0wXgmQdA4aGhYhlWkSCv+Ubcn9gl5dcAQkM0NIzVRrDw17S+vcHgIn1sut77Cmx\nBbAsXwq8lF8j/TvCuxr9fWBXdFdVPx74py96GHxupfT1wYd9G3uHgKdzXuqeN1vmmTp1QOocGvLE\nlO3tHIiqCMvKgB/9TCTXv3if7CRooK6oEKJwb4/gwdrngHMvFTXM+o3iRDGwC3guH8JdWyM7yc+t\nlPtbtsnu6ablwN0/832vTgP/4xzgMzfLvVSVENwt24Dne/2a+fSXpB4Seb02kklhCn/Y7J9PVXkp\nun+H4MDTzwE/+jnQ2ODxcsu26O7405+QHWP/jqgUvnFTVI3TOkN+//ZxwcM1jwtz4jNT/gyYdBiw\n8htia7vjLmBWFjj9fIHjys/KOrv6w17Y2fiq2HtYqMHgWty/X9Z0bY0wX/Y7mZQ5XfO47JqqU3k4\nzgReetnPX/92YGde4OO7qSoh7j/uydsh5uRVYPt83UNDwjT1ziI7BbjkfKFDleOAF/v87vG42cCn\nPi9zO3mS1wD0b5cxv4Xy7jeOj9GrakyFRmVAFsDFHxKPnL1DHsFra4D3nyoMRXvRZKfIgtk5IM/c\ndZsQFy7k9hNF4n58LfCl68UwSKJSWy3S8fKl8vvmrwsiA8Cxp/ldhTVGjuZJUqxwMVZVCmJrzxTC\noblJFsa+fV760304kEJ4fvAyIQbJMiE6g7tlLF/7ghDIvg1jMxYWMwZrNU1zkyw6MovGCVFit3GT\nMK6hocJx6fnVRTPskMNAdopnhI/ndfjsgyaCwOhq1dA8d5wO9KyW9/h+5+nAg78p8EoaMZR+++5o\nfdohY7S+NIxXRuu8yonOALafWn2kDLEHXMjIa2tkvXzq835+qiqFEDZPEuZtBShtyM5OEWHFOgOw\n0J649Hrg5w/Ke9YQTnykIDO426tdbQnNV21NlNjbkqoCVn5GGPnq/BxaVS3hHRIWOUZb9DpgfaWl\nssv/1Mdw5LK/f/0lN3xYfMfiy7tfVaVLRXlYH1kSGGZ5fhveMF4mrqLcMw1A3FJXfsMjQX2dIN+8\nVtFX1tYIUUmnvMTMrfe8VlFj3NvjVUrr1guS928Xw+CEBk/E+pnmc4owjVtXiRQJRFUqyaRfmHuH\nCg1oiYRIORxvMhkde2mJN4aSeO0aFAlp75AwDEDgUFIiC3XHTo+wQCEhbRgv49DwD8H74UdEVdC/\nQ54bGvYLYO8QcPn/VMRdGSaTZbJlbz9Rxj7naFkEmWbZMVKdAnjnhbSCy8ZNUYKx+Q25x6s8CTQd\n5olSaV410Nwk80Emk0jIVVoaXbghwWvyJJFc7+3x7Uw5QuBOYzFtc5Ma82Oe4GFcWuJhaYkQYb1n\nr8Bh7jFS71PPFjINjrf3Za+SoTv51R8GTn2P4G9DvXzSa4mFapJ+Zf/bl1dX7hr0Y9D97Nvg1Uf9\nO2QXBkRVrImESN52J68Lx9K/PWq7SSSk7QVzvTppaNivs2SZEH3aAkiwjzwiqopNVQkDPvsMEQ7v\n7fHMgp9cX9oNnAKNQ171dqLMSeU4+Ty2tZAG9W/3Y42zl9Adn+PWc0lnBo6VOFo5TsbQPEl+V5RH\n1WZ6l16Vv7dvnwii//RFNCBhPADGXv5kjCORSPQlEom1iUTiiUQi8Wj+Xn0ikbg/kUi8kP8cPe3v\n4Y0ekPSVtmX/fv9MXY1M4t4h+Zw9C/jAnxdKWv07gP/4tZ/wN7eK8fPBh0WC6t8ube0ckIVy+/fD\nKWfpqntkPusaddMPPizqGz259HqiN9bNKoUrPTheeVX036kq/27lOFkozol6jpL40FBUKq+sFLXF\nys96xlmSN9JmpwAnL4rCjGNpP8mnmbU7gb1DwOc+KZLbee+X/2kvam4SxM5O8fpm6qFra6T/fJaL\ntXKcfG+bn1ftDIur47r1UseGjbKo1m2I2qk03EM4AHiiqPXpe4e8izWg7C9JuU8Yu/x5hvv2FTJs\nLlTiyrr1Mn/plGeCz/fK+Cm579sveNF+oszF8D7lbZf3+CKsScAzLcBD9/h2a9LSx9w6ISxxQRxr\nnxUbiR5z3wbgY8sFP3fs8Hg6X+XXeE+bn6dMi8xl1zkiGLGfgJ/DigoPCxIqeqwRnwjHcRVAQ94W\nou0lZWUiHGRaPJPs2+AZO4n4A78CHlrj3+P9+bMF33bslHfo3JCAxHEQR9pPAq76G+D8j8RL67S/\n7d4jc8QdVl2N4MeM6cB1V8k6Htwtba15XHZ42lMqmZf8s1OAe74ZZWAUfLZsizJtTUMGd0sbLMTR\nwd3i3MB537NXmELbfJm3OUd7GyrpD8tb0VbgT6iqSiQSfQCOc869oe59AcAW59znEonEdQDGO+c+\nEVcHABxXN8E92p8s9sjoRasqRvzWA37bQFT/2LfBbx27zhG1zr09QgxSKVmwrLfjdHGZPPsMYTKA\nqG74fflSv9tgoBm30Pf2FFffaFWCLvRn1wFetTV59cGv/XPtJ8l4BgeBN7dFmdmSE8U+0LdBJK7D\nGoDh/eLmysIYkft/CXzwcm+E3zsk7c2cJj7z2kuGxLt+fNQeMQLnwPae96juqa0Bzj0TWPV/vOPA\n/v2F8GicIEyThsnJkwrVJ1YFsORE0XWTqFRUAPvy3kxsn3NCdVVdjUiiM6eJzh3wNg+LTxxLMZVb\nc5PYjKiqnHM0sH+f4NZvH4uqL+bPBl54KYwHIfzQMQ2AV2UQ/xsnCv68sUUIUtt8aePsM4BLPhZV\n64Vws3GCEFI9PsLAqvforELnDtanPd/qaoGdOwuFBR2jwHfr66R/jRMFT4f3eWM0+61hW1oq+Bq3\nvmx/9VxqlRo9FgHBNRWYOwKT2ppC/GucAJQlxdFhz14ZQ9t8YYxcix2ni7C5c0B2ehMapN6dA159\n3DBemB3HpVVYxIEpR8gaz0zG7Cd/ddBxHIca43gewGLn3KuJRGISgNXOuaOK1fNHs3G0zRc3zLJS\nATARHyg0RrfO8IFLgA92u/4m0RlrfSonr+N07ylEN0tdXljnvaou/pA32LFOXaii2btXpD6rN9el\neZLfHZF4WFfN0YLHrF3DEtm2ecJMrc58LCXU94oKkaLoGUZiAHibxIkL8jaTywsZjCVkdEV98mkh\nJlu2RvtfnRaDqd55kPmvNnYDwqq+TuoNEeqKcmGacWuLBs3lN3oVIW1Kw/vENbymWuZZw53zpsc3\nlsC/qkrvLZVpETdrB3GQeOxJ76mlVXrW3hFyL6a9Q68VlhAz0XYRO+8WJ1nGkqlg+TXAh68tdN+t\nrfZqtjgbx1gL4UxGoftFVVJN2guPxUpttUS86/6E3MPHVfg10H6i33UDnq5o26qFYcN4YSL79vn+\nl5UJ4+rbgOPwBh51Q2NONhABx8G89EcqDsADiUTisUQiwfS2jc455jfYBKBx1FpCagmrShhLYSwG\nCcGAMvxxy3rYBK9GekWlYWCw2/KlslugNNI40UfKDgwIUf323bKzAIRZfPQfRFI/91L5r65WmMat\nq+S5iz/k3Q05xcP7ZFHuy/twH32Uj2IlPJqbpC87dwpSvblV+p6dUrhASXgSBp7UYe/e41V9gBCa\nynFehbJlm/R3+06vAhutVFVKH0k8mpu8qmf//misR8vh3lZQW+NdYJeuKGQa1elCV17nJDATEKnW\nGhcbxsthDLq88qrXOVenxZ26tsYzjS3b4o3ae/bGE7vGiSJkfO1Ory44eZEII6u+5Anu5tcLjfVU\n7ejgMgo0Id05y65B6Tu9+Z57Ucb29PMCi9oaUeNoFYpNaaPVIyw8Jq+y0qvkiCeDuwtPCatO+XsL\n5kbdR+OirosxjXRKcOGBhzwuJpMipGVahGmkqjxz0XYWPdbmpsLgSUB2kBXlgpf798vaoVpNr5Oh\nIY8ra5/1dbIN9o1R5v07RPAhLJIBpqld6gFRdWrX43Uve/qycyCv7jR1vLnVzxv7MDw8wnz2AqN4\noMSXPyXjOMk5NxfA+wB0JxKJ9+o/nWyFgliTSCQuTyQSjyYSiUdf36uAy/WkOXdiDAy1pMQb1Bon\nyOfOAU9odw0KQm7cJKoVBiIRMQYGgNlLZELqav2C37LVG2fJaGprfBzA9TcJwf3g5d5N9+wzZKdB\nb4+V3/ASr4P0rW1e1L97zeNCaAZ3e6Pp8LAYpM89S5CwYbwwuJWfzRvUmqKG90RC6icxrBznddga\nAVn27BUGVlbmHQfWPuttR1ykdD5onCD9JoHbNeilweq0wJdRyrOme+bfONHn3ek6R9RAgNif1j7n\nffbb5sm8/ODrEmkNeBhp6VDDLVUlDEzr0LWBkQt7x06Zv/e2SR8YL3LFhR6/xlVEmastZIrWLz9V\nJarAtc+JEGHVo1RbZFqAWUdJ+7d+Vn6TOA0Ny9xrHTnfY3vPPC/MbvmNXn1BO8TMaSKs3HajwLm2\nxsdrAMDCY5XDhYp5okdc3wblNJHPrdV+oqj79Jg3vyFu7JkW6Q+Nxm3zvPND23zBGe3OSocIXerr\ngFs+LfBY/RuPt86Jq/GJC+T3wC7PXCblHYhKSoSRdJ0j14M/EKat7RLNTSIo7tkreEkDdN8GGbMm\n6vxO93UAOGqq9KHrHLEVATKm6z4qQsKp7xH4MLJbx2/oHT3XC3d0ixd5m5gWLs5Y4jMqtM0rDCx0\n+TERlzpOx4sYfhEHWf5kjMM5tzH/+RqAHwI4HsDmvIoK+c/XYt69zTl3nHPuuIlOMYYQmxmLKo6B\nbQvm+HD95kkiFS27Kp/6IL+gf/+cN24TMX73hF/4Z5/hI7M5+Y0ThWhnp0R9qBmJTVXSlm3y3/S8\nO6dO2UGiVJOW56jrpQcRd0WpKh870rdB/OWHhoX439sjkeFrn5X3tylpnZGuuhTbuXGXMjws+nx6\n01DyYfqKyZMEdpvfEFdkXXYO+P7y3b1DEnlOprZ7t/z3+pvAt271wU8Du2ShMwZm0+syL5d8TMbX\nPEnGWFIS3QUN74tK51SdkOjSwPjkMz7pHyD2DqogTjtZmPrXv+NtK7v3RJmrhp1WJ23cJDvJ224U\nfJjY4IlNbp3Ef2jpd3C3D7Z88GERTEjEkkkhgCNEQq0FGrqJJ7Oyco+7nOwUz2C5Y1y6Qphl/3Zh\n6u0nSV8eWqMcLoYFZmSYe4e84FWSV10N7JL5Oud90ib7UFvt8YSEcHhY5u6VV/Pz+JqPAN+RT33y\n++dELavLlm3CDLSgxvqo8rXEc9WXfE6tG24R4v2zXwAP/05sjVu2+XExYJbrbstWnxXCGpa11xnb\nfOVV0SA89YzQEAbOfm6l0IqPLRd8ouPBGYuFwVZUAFP/zPejJi20YXjYpbiiywAAIABJREFU58M7\n8XjfXnVaBMGblouH3o6dAs/WmR5PAG9r3fRafg4OOvYPwJ+IcSQSiVQikajmdwCnA/g9gB8DuDj/\n2MUA7gnXoEplXvoptlUfS+nfLjpEltfekIm/4n9GddypSi8dkfCPqFsmCfOgKoVEZ/dun7+q6xwJ\n1Lqg29dZWyMEq3WGECStwmKZkN8ib9wkCEzX2SOPkPdOOE5+T2wQCa20VGDy8Su9KybVPH0bvCFS\nF73VrUmHU3KkU3nJZZ7f4ref5L2nuKiSignw3t4hb/QMzRfjZfh+1zkC5/IkcPpigQn7VFIiC51z\nxoBE7vCowtq/XwiXdtvVUffcVQ4N+wj3ZFJUU13nSOqSTIvvL+f85q9H27B5vPYoiXT/fk94q9My\nx6edLPhAuwPdpK/6G5EqWTItPmo8O0UEkw9e7ufxhls8Y9S7lWRS2uHc9+8QgtOej45edpXMcdc5\nIqRQBUtBpW8D8HredTmUrVgLZFTfUaIH8pkQvihzlKoSPJk53e/i7A6Nu6C+DYW2ER2BbsvVH/a4\nt+pfZCe4faesXbpAd54uauTTThbJHBCY/t0/Sv/+7h/D49r8hp934gxVVdwBUVBiupymw2Q9Lrsq\nmquOAt68VvmfuMMd0Te/L3i6Z48Eb7IflZU+8/b3virC5Ne+IPNG7zHLBPo2CL3ZGXDPBkYcIxpR\nOropIKb8qSLHGwH8MCGTUQbgO865nycSiUcAfD+RSFwK4GUAHxq1psPy8RD926MeLrokEoKo/D9d\nJZM0vC+62AYHBcF1FOtIUFyJjxgtS+bzS23w3kHplES8UvqhPzm9LbJTvOcUDcht8wUhPn6lSChX\nf1gQ44Jueeb+X+YDqWqiuwFtBNuyTRZd1zneU6aiXBbb4D7JTdN1jnjHXHI+8L0fRTPF6uylNWkv\nCTY1isTnnL9PJwBAxnHYBIl0TVX5XEnU0TMNxcZNhXOyf7/UObgbmJaRz8mTBAa5dQITZk+l58/X\nvxP1hy9PAtOPFO8S5i5qbhKX6V2D0saco4FnczKHdPMFxLutt0/Ujsuu8hHUy67ybU7LyL27f1bo\ncdN5MfC7x6XOqkrBrQn1ErU7rkKM2yS2qao80UjI87fd6B0jyISGhsTGklsXZY6ZFkmLAXi7ze3f\n90SH2XWZ62v3Hmm/Oi3wu/nrwhSWrpAx9W3w9hmOs3VGFM9fyyd53PSazy7bvyOv1kqIJFs5Try6\nBnf7HGN093z9zUKJnJkAtDqQ6p1sPp1K02HAmsfyaWkS4phw+QXAt/4NeH2L38Emy4Bdu4EZU339\nP75dPl9YJ6lXBvKCHI3Jp53sc34RroC34ZUnZW2+ulm0B40TRLibdVTeQ+tVWTvfuVui61d9T4RE\nZtTe+KqMb8dOwbf+HeIN2dIs9XNHWV8nOJZpkV3Ri31iW+UOPVkmONk0UWJvjj8WuPBcmaubVkQz\nM9fVSl1Lr49mD3j4EU+XslML56l1puwEb7gF5bnXD9od9d0fOd5wmHt0SxHdMktcUrtQCXmqaGJN\nN9fA+QGRUlsjEsaDDwvx/tat0fMw6FlCd1Z6VnHyVe78SB/o5dTcJKqNmrSPOj/3UkGk2hpZQNp9\ncbT+au8p7fXSNl+Y4k3LJcMnU7Nrd7+Q+yw9Weh1E5eaevPrhWOhazL7seREUbeMqxC7z9xj8ukl\nThRVytCwf5YeQTyHRDMgIKpD/v/aO/f4Kqsz3//WTnYScudmAgEaQREvXERBatSKdaJTjzD10jp1\n1LF11Kk6nYO26pkehU51Olo9o72cliO29jJ67Nh6m1rRj9fiDWQUVC4FilyEAAm5kuxc9po/fu/j\nWvvNTrJ3AiQ7PN/Phw/Zybvfd93e9az1rOfi10FC4D/1fHcLsaJCF0+rL6sZgONt7GhaMD293J1Z\nSVQAf4EghD3VfRNuCT4ZtjD7tE5B20odRFjI50ced0E4k13jt09v74oEOXzqeba1hN+RyVHG7ITx\nSRxU4c4BxLrr3TWJZvDC1MnckYVDkAsy3m64msIR6D4ufUvGq76UmNoAcJZi+SOAJ3/m2kPuLeOk\npBj43Dz2o99W/ti5sJor/0XXcTIXtZcEER1fxnrKmKya695Vqc9Z8zi2ZBz43v2yaDt2squnlEXK\ne/lFFH4vrXB96JdR3sfgdwOxqsr8nON33b342vbg8NfXtYYFYrwr+XlHdhZNIf1DdP+6rCwOrFjM\nXRu37LRZJ3FCLSp0h3H1je66WIy/P2se8OWFwP9ZShXP6XOct3ZrG01FTz+VA+nF13mP6dOAZfcD\n767lMwB3FnPSNL50++qYO6NuP/Afz/K+553Ne58yg9f6SXhGFtPZsSCfuw4bhF8XIdnaynKXFHOb\nL+qsnbtZzlfe5GH7vNkc2EWF1D3XN7L8+xuciiYnynpt3c7nyET9abh0r407OijQNm4GYNlW82Yz\n/lhdPfv2nm8DN32NL++b71Ig50YpkGSybQ8mzrtuY/iIWDvLPKWS7eDHGhJi7cAZc9kO6/7Eths9\nkhNNPB6EOG/nSry+keWbcQLbqi3m6lGQnygUc3P4nalTgL84i3GDLr6GQu75V4Dz53NieuUNOpKV\nFAH//C0XiWDOTN5j2aMMTCgvf2cn20rUEFkRCtNYO9t81x620badLNOOXZxUfvP/GIzzvQ84ub38\nBvAP19Ds9+ILnCl6ywE3YUazu8dKKy7k/ds7eH1bzJ2VlRQD378D+PrfclX++tus+x2LaMl1xyJa\nA61dxwPjrdu46g73SU4U2FPL6/JyWSY/yq61QGkRx/KtN7K/XniVC4SCAu5+quYAd99OVemyR52j\npI/cs6OT7SJCdFxZ4niJxSgkIxEKgOxs1rmokPWYM5PP+vb/ZJuv28R3py3mxvrokdxhdnZyTLa2\nMc5bQT7H1IFWHqjX7HVOwA9+l3ld1q7jrvLjHVxULjyP/Sb9l53t1ON7gjNaWUQ88M/sh/pGd544\n4wSgdj+Wttfj2sV3LEE/yPwgh20xAHnOjLakmLrW9ZsSk5+IhVB4NyEr8rBQKS7i5NDRyZWjH7a4\nciJDoK94h//kwOrKm/jCyj2zs2lj7a+gX3uTHecP4sIC/l2shHKiXBEeO9ltu7OyOFDrA+91f1Uo\nK8/LruckKOG/RXe+4h1OLOWBSrMuiZWUtIUxrKefR0PapqGRO6LSEk5QL/3RRd+t/hytg/JyGMCt\n5QAtZ6Kex208SR/k5fKZkk9g7XruyJ79JftQ6vbDh/nv7dVsh71BmG+xwMnOog5dnjXrJK5WGxpd\nMEJ/BSh1am4BNmwBzv4sJ0M/dLrsEOdf7PpA+s2P/zQij6qt9z9yz5Hd2rYdXIH/4jdut7dxC3DK\neWyDlgNOnbZqDZ/5wqtc/c6ZxbEQiXDHteYjN85luHbFnSqtvYM/+/pzAHjhNXfPtevdili8uiVG\nmOxwWlupc7/iEq5et253Afz8c8Cwq3pDIw9/Re0moU3EF6m+gQJ5zUcc16JWFZ8XyVXT3uGEV9gk\nWd5jMWIBuHN79kVX7527uFo/NnD2zIm6MzDpr6lTgE1/5vjLy2U+nsqJVH3KQk2IRJwj5FurE6Pv\nVs1hnwGJ2oRzqii0R5YC++tdOHiA78ipMxLf4aJCCgipg6j7xKerucUlKgOcc/CGTawj4FRUAP+X\nnaZEkx4zms/cuLl3zUMKZL6qqi8HQF949EWyAGX+JCeqnopyHmCKSimcSjas0xc1hWxN5RBZBtI5\nVcBP7uELJltz8UT/w0vJDyd7Kns44NlAHZ98JBjjr3+b6FwFJKrBwn/riwurKdx8Nc3lF7n2BbrX\nK5njoPRVT+dc/u/8fvVVhjLhiVogrE4Skjm9Cf448gNn9hSNAHAphI+dTNPutesTy7igGnju5e7f\nzw68xnd84ibZZPUXtYaf3rhqbqLarbCAevXX33Ye3n4Y/IpxACyf09N75feLqGelDUX9JD/7/ZlM\nPdxjLhMWIyEFrd++EmTyqeeBXz2RPMBgb32RClJ+qSOQOFZ8tW+yaAVhtXGyAJMLqp2ptiDjxHcM\nlsXmhPEUJKLG9oN7At3aeDb2da22Hf3aPAymH8fB4TMTnMVMsqBpfQkN+W5Bfu9Co2ysu9fO3ew0\nGXg7drlMZoA7GBZTOdFpP7GMHb9xCztaLDP878rEBQSB15IIjUiEK+2KcgqdBdWcBCon8uWed4pr\niz09TG5hsrJcPCqfokLGsiopZlyexmaW75rLgwB5QTnEXDIryzlepUJuDgd7XX2ib8Lmj13YhgXV\nPAgsG8u6j8ij0yPgfAQiJjEeUhhrea2UKy/IFlc2hoJOnjF9mtOZ/80NjIkUpqS4Z6EBuBzQOVEn\nNArynX9KabHzT6icyD6ccQKF54z5tCArKXaHt9lZHCPJJrrOTu7CROhKdkg/0OSUSa59r76MY76u\nnpF7553iEgE1t3AXKc+R2FzCHs9voKf3Kh53vgKf1AALr+L/EqNtxUq2TcuBxPGWatpbUYEC3AVI\nxsiiQuA733QWTXIOUDkhCEsSOgfuS2gkSxBXWOD8iX54NxeEd97srCB31bhrG5rc6n/jlsQw7dHs\n7iv+ZFZj4qskavSSYu4itm53Jv3GcEexcQvwx7dd/4SDewLd1I4RIIXD4eRk/o5j6nF21Z+8FUVB\nPtUV6darp5AHgn+gKrmoKycyjIX8TkyD71xEtYMIDICD64GHqJ8UlcH8KuCNlRw0fjwpyf528xLa\nmX96KJ+kjNOn8UXyV8Wf5mvuZVWVncXr+sqUGN5Oy4DvaycTDrvdE7m5wflRNj2jZUL2V7Th3NEA\nJ/2SIgrmnz3GCfGBh3rvQ4BCr2af850ZM5qqjTGjOJnG4xQi6zd114kDLsz31293VnqjSqmGETWK\nH68ofHjsE94lyqoz2erbj48lubuTIWEq/L737xde2fq7I9mpiPGHj79z6ikzXWdn9xhTPuGdULLn\nCCVFrLP0uR9Pbt4pFHrtHYn1EUOLhLAoPeyMKsqDzJ/NPLcpL3MqQ8G/t38A3Rh4iM84Hvj+nW71\nH26XcKge//3pLUxQb+1y+UXUbPRl5BKNuggEcs9jj2Y8s84uYH4Vjn75ySM4rPq2nYmf/YO0VBGX\nf1ml5URdZE5BrHYWVFNANDS6/NoAJ74LzuUEsWpNoqWHCA1xslp0HSd8A5eRa+t2ZxF01ZcocJ5+\nxIWZABKtuqTca9e7MO+CvCijvDzbPuJ5GhYakUh372d/AmhqdvrdZH4gPgdak+86wu3qOxL6q3j/\nZfdNOYWawOHvmeWc8B/698RowfJs318kO5vfOWuemxREjbGvzpVlzbrkQiMS4fWr1vCMA+BYq91P\noZGdzbbxE3M1t9DUNLwblgjNgMtj0ZPQkPuISqLlgDMDLy1mHcXcUsxcI975g/iZVJQn9rlB4kRp\nA6MP8XSeX+U8ySWsd0V5cg/50SPdyrgtmDzD/e+/lzlRxjfzkb4qLABW/oHJnEqKgFknujELBA6B\nQbn9hYKMSX/yTiY0RpXSgq9mn4tqG+8KgnIWuXL4JtW/WerUp88sZ/8+vRy4xTtbDgtT0WZUTuR3\nTw3qm51NJz85g/QdFSV0P8DFV6n3Dos1XVho+OFWJImVn3kTYNvt2O05DhcMKANg5guOVGMjAXwp\nk+WKmB1YIE09mv8v+aabiP0wDuPLuMKQl2jOLNfpU6dQiImTFsBB9aOfAdffyhVpxTjnGLV2vTto\nnD3dhdeOtVO4iM257GJ8TpnpQk0A3AZL8nrAbbPnzubLX1iQGGLZ9zz1iccTX7TcHDcoJby6hJj4\nNMieF3LD/zx1MvC7h50TX7gthaSWbqHrJN2vxEMqyHcpSMU7tqGR1xQV8gVLtuK1loL/pq/SAq2o\nkCqvsIdx+VgX1lvCNJQUOau2j4IcExdW836ikpA4QKKWkIn0g/Xdhb7UWxzHZBz77RiNuklk6uTg\n4DvIZzJ7ButYH/gvSbbJZOragnyeTc2ewc+jR/JexwW+EGVjWN8F1VSn/vQeqmF+eg8tjBoanUoo\nGk2+UhZfmpyo23nNPTmxbbO8CW72dPoBzZvtJliZGOfM4kLrl0+wvmuDaA2fquOynCPhSdMSw7hE\nDH08pD3zRzh1K+DCtvtMnczFnOQQCVvftcW4K13zUfed4/pNLkadeNpLGtil97Idlz/Gc5DxgXFK\nZycNKbZupwryzkVuQWWtE+4HWp1xjDEs91Vf4rjz87d0dnFMLL2HBiJicv3DuxPb3xc4GzZhBJBE\nN50amW+O+/37F1/b2of8E4c4yQUAuMBlRYWBY99eplmNxYBV7wOf7HYTUOVEHsZ9spvb9SsvpV55\nfDnw91fSlLF2P01ja/fzfhddQOe72v20Ylr5vnvhSkuAyy/mJLFxMw/4JlbQWWdUKc3u4hb4wue5\nevNDoAN86T7cwK17thcMTSbWthhfhkce4Jb+3TUu3MOnqV2DUNSlJXxxDbqvzrq6WI6cKO+3eg3N\nJEuKaELaFgO+8kWW99oraCL6vX/iId2D3+W9X30zsC4LPJAlLausGMOr68qJ3G3VN3DHNP14WqXs\nraN121FjgF88SOE+/XjgB8ucaeRnT6UKQbz243E3YctKPidK57anl/NcZucu97Lmj2AZxbxRnLqy\ns+lDsr+Bk+jW7ew3McNcu57183cQuTl8oeWsQgJFXnUpJw3xtv63JRw/I/KCwIkxfqctxkyTu2pY\ntvPn87u/f4n3n1DOMemHjfHb0+/LaDbDvRw3haG76+qDqMqF/L6EPTl1Jss2bzb/SVa89g5n6nzc\nMd2tjnzKj2IZOjq485ownu181jzg1htoinqgNXD23AwsPJ/OhX8OhGfZWPb9C6/yvfEDRhrw57aY\nC9nf2ZloiGFBJ7o9tS5HeqydY7GxyYWwmRMsvmCp5vz2Pfx7uO0QPPO5l2hJKIuBrAgtL++6Ffj8\nmRyLmz8G/u+/AnfdDvz1F2kG/oXPO2/z6cdzHvB3z7EY+z8SSezLgnyXF17Yup3zzpJbOJauu4Lf\nzcvl2Fz3J+DLC2jyfN9i1u//P9U9gnNg7vwY2oq+vviOu3ruzJ4ZHmccO4KkNwOxlCgs4ITo61SP\nnkSrhtZWbmcL8rmK8vM0+Lp+X0d81mdpw37vj+kZfu+PXb7nn/wrPZJXr2GZJTLpkvuczvXOm3lf\nSV0rGMPJU7blWRFgzskUJP6KoqKcwdtE/xp2JpRyiz56RB4nyIryQHDF+bvmA72Huxa9rdxvQTV3\nZQ88xIHsCz1JW/nlBcBjT3EC6M3R0n+Orx8Xpy85ABUurHY2703NTo9cOZG+Cg88xPbKibp89PK8\nZGGyV6/priKbOtmpzsR8Vcrge+HL+cfokVSD+W0gE9O0KcDOmu6qB3GOe/VNToriHOaXp2ws6yZ9\n53v9J6NyIgWwGC5EIszH/f6HzkoQ4FiZPo1jX6wE5VC4uYW75JJimrK2tgVWbOCEHQ637+v8zznD\nRaAGOG7OPp2m7GLCGm7nC6sTz62mTQG276Ig6snSsLCAYYFq9vV8jiAe+fJu9OUYK1SMA4ryXT53\nKad4cNfVO8uxc89M9PgW514xG/bH28wTgd013dM4SPmlfQU/Ta+Y4Mq4EOu5G67mZ3GiLRjBsS6W\nVkWFOLVp6xHsAPi/lyy+tjlo1VQtM3z8YG2jR3Il1h5Eft24OTGRTkdgQlu7n50xdjR/7ghNPHFL\ngbN9J1dW23fSuWvLx8DPH6Dz1a+e4GCtb+Qq4isXcbX3qyc4SVxxCfDFq7nKCeOrYazlZBAe+E3N\ndDS76tJgRZsLjCvnqnPaFJarqZkDqi3Gl7O9nW3Y1MzJLdnE3q39QH1tWywIsLaHq+Rlj3YPP2Et\n29CPX+X3AdD38yrKaXP/6JNc5e+p5T0L8jkxbv+E9SjId46H1nJFW1fPevnPDj8vmh2oRKxzTBQK\n8jlBT57E+82ZSQe0unqOC1Ex+IIvbCDgL9T27e8evM+AY2JvLf/PzeFi5flXEsvSFnPPmzyJk0+y\nnYAxLlFXQxMnpfZ2/qvb7/obFvjWDc4RTs5uAI7zxiae7dQ3ur7ultwoNJn77VC33+0MotkUKHNm\nAn/3NxzPr73Fts3KYrufPgf43XPOBwHgTmL3nsQ+88dOVhZX8PJ+SHrlUaU8CyoOBMQF51JoLbmP\nn/PzEqPd9oQEZPSp3U+n3cYm5++xdh13Vtt2sj3PPYvC8ZU3WD7ZjQotB7hjEKRdIxEAnrWgP6al\nvq1tHNOSU+W4Y9iuX17orO1q69zu+agx3L3uq8PS+t39dgDM/DMOOYgT/FDhqeDnExbvSwmrDiQ/\n4JVIlzt3u1wHM45PvKZsrNPZbtzCl+6JZTSjW3geV1NijrsxWI0sPI8rBjkjuW9xamc4+XnJzw82\nbmEQvI1baEP+zHJOTH98x61Q/BVZxLigdAX5zqzSR9pD9MqxdhfVVXIN7KphPcRkFkg8WwqbzVrr\n7usf6krebf+Au6GJ9XhmOeskk0T4AL3lgJu0RbD6ORQMkretRJHduKW7RUxpYFK6ZZszxRTy87rf\nSwJN+pSNdWHJge5nEjZ4Vl29C345vqx7P8iuRZJJrV6DbkSCxczefU5o+DHKZAIqKqSweORxjlHZ\n0UkgPQmsWTmRnyV1rI/s9CWMPuDqXjmR5tvRbJdv3uffA9N2WaRNn8ZJ3ve/qCh3kaqlbSvKqSqU\nsf9p2l+vTVsO0Dx+524KC+m3Bx5y/dvYy27DPz8JR5QQ5Lm+QJt2DMfRouv4LNlRFeQDJ5+UeN8T\nj3OpDuSMS1TQInxLilgXXxj7ZyFTJ1Pl/cxynsU88jh3Uw0h9ZeYRN+5CDEgBbPH5GS+4Bh3VOKq\nI6zz7YvSYg4Cf1B0dTlrBxFEn0ZtzeaLc6F3KHrWPGDShMT71uzlSh9wB+b/4wpuHR95nFt0/2B1\nxUr+fu16Z6MtB9klxcDN13dPWpU/gn87elKiBYjfHn5oaIC24XX1bsLyVz7xQM0iQenq9lONNr/K\nXX/M0XwhvvF37gC5ciJDjsh++vW3g0irHzkLp552EnLfuSdzovrc6e4Qv/psrqgmjnfWPb6gGz3K\nWab4ocgl+KBPfaN70aJR4DMT3c7Df4nlEHZEXqJFS2enUzl1dDhT61uW8AUVNYOfzzx/BMsvwrCo\nEPjlD4Av/qUr74nHcVLwnyXxlc6ax4nuqi85IVdUyP64sNqtQGuDvC/y3Nwctsuy+/n9+xZzDMbj\niUmF5H4i9F55gwuYU2cwNtaKldT/iyXd0nsZU+3KS7jjFn8UP/JwXi5DgVx+kbM8A4Clv+IYlT6o\nnEh1zoz53G3LYXdFOU2kV6x0Z3YAJ/1Va7gAiES4qv7urRQ6/3CNW5iUFANnnObaoqiQ9586mQJC\n+m3hea4twmPTX1CMHukOrkuKuJiSRQTA8SvWS34OmQ+D/Cf/8gPnvQ7wvXprtctlA3AXcNNXqRZu\ni/H+p8xgG80P/LQk3H7ZGGdB90/fcAuK9g6256jSREtLqWP+CFfmIKpyK2y/BUfmn3EcjJzjyfCD\nmVWM48B757/cCnf6NKByEiW8BH7zQ1B0dvJFES/kW5Y4nbHkFP/RzxK9rCVQmuhFfU/UBdU8/L09\nMPPNiVKFIF6iCVF5+/BlEOuyZGcXYRtyv6z+/cN6YfFI7smcNJlfh3+Pc6qA46cGbRLYzJeNcTnQ\n5VrfR0DOtC6/KDEYnnh1i63+vlCIFfm76OBPm53oeZ0K4egB2VmcMGbPoJpJ8qiHfV/E01l8N5Ll\n4ZZ858cdwwWG6OL9+t55MzC72gnS0SNZBv+cww+suG2n8xL3vcd9b/1k/eKfOZUU0yxV/JCkr/1z\nG7/tZ89IPFcDnL7+nCrGYWtodDGwdu5OHL/S13ImsXU7cP5X3PiScSL/50SBkSV8vn/26L/L4o3t\nn+EArv39ewj+vXo6D/HPvsLvoJxJSEgVaU95J8KRJISw532Yc87gnCRjQNphVCkP7sP3m1/Fuu+t\nBVoOZGzq2KQYY843xmwwxmwyxtzW5xfCqioguQqiLxVWQb5b0Rfkc7UrK/fmZsY7ks/GuIEoDkGN\nzS6nQtlYN3DmzOIKZ/0mfi4p4kt/6gw+r6GJ37n8Ir48/o7D30K/+iYd3QB+r2quJyyC+kazu6vW\nkqmwfGsjQVZskyoSdywr32NZK8oTXwg/PevUyZzgpk9jfX1kxXPabLeqlNufOtOpHjZs5uTmR9mt\n2efK2dTscr1LeTs6+JLsquHkFFYLNTTxHmErGTFxlnru+MTtJqJR582dLC2xINEDgMD5rYu/e2Y5\nzW8FaacReXyRd9VwQpfkPh1BfCnfPNfA+ahIGBrAqYNWrOTu1d991e5PnOwk0+TFX6MwbmlxK+aq\nuS6ToUQp8PH71h8nDY3MBbLoOrZ7WOU4epRrW2mLObM4WUtyM1mnbtjs+rmp2Y3ljk43PiSHRdUc\ntt1l17tnlhYz5pNcJ5OvtMH0412Gv/sW8/8F1VRvSt4RX/1nbeDfMd3dQ96loye5dgqnJRYfjTmz\nEusgRCKJZ2qdne6dFDX3xi3dz9J6QnZ/APPF+2NAhGddPcscfvcLC3i+k2qU8F4YUoLDGJMF4Edg\nOtkTAPy1MeaEXr/kJ48BXDTbMPWen0MYiVMv2+iWA8CDy5w1jzh2TfkM/+7v0kpLnM69MBA+113h\nOvjcM2n9I+Es/u07nAz+8U6XoOXZXzp9+YJqmiP+aUtiGRuaePBbkM+DQ5mES4r5PElB6avqKsqB\n73zLCVJjqPbJynKTpggMeSG3bPMikQZB/b59j0voU5DPZ132V7y/qLYADtaavZyc/G11RTkDHsog\nl+Zb8xFwwAvU+MxyDn5J9CQCcUQe2+W+xS6z2dhRzoT11bd4XUI+7B78DYAg78o4twPaudv1fcQw\nz8JtN1EXLSTLjlhRHjhuNrv2jURcObKz3SQkv9uyjbuA+3+aaGGRRN8oAAAMd0lEQVQkbdnUzEkV\nYDuLUCwb686etm7n73pKi2wMU5T+8GFOkkWFwUTdxLMQ0YMDnPDOqXI7llkn0iepbExyR7+GRiY+\nima758vYqdtP6zVR5RTkuzAcW7cDJ0513+ns5IQ7v4rvycwTnWWfjA+JqLB5K/CXlyfujuob2bY5\nUfbfhHFOMC6o5jXhs4v1m9yYKB+bePAuIWD8uFAS6XnNR9z9Lajm2BCi2RyPd96cqE4UIhEan8g7\nJguR4iKnjpXzIt9kVgThuWdyt7F5K9/3eafwedOPZ1mmHevC/QuSkXF3TXfNw0t/5ByYbDGZJkNK\nVWWM+SyAxdba84LPtwOAtfZfevpOyqqqvoId+qaDPQVXCwdmuzAYoGvXsTPXerkGigoSg8qJOstX\nqYhprp/jwjenC+eE8CkbA9Q1JCaLChNWMwHdD0hT6X/ZUsv5SXNL94CQ/n2TBYtMhYjhijW8IhLT\nYvG+70kdBlDYRLOdUOipLD2ZbotH+fRp7uA/TDgvSjqIiuvXv+1ZrZiV5SLeyljsLURFMqZPo8mx\nqCv8MS11FBWMr4pJmoumlxAYYZK9O2HTb4Bj6r0Xu6vh+kNJMa3EWtvYNxPGJapp/PfDD4SYrG97\nI1kgQqC7+TLgQun4JAsZdE4VLbCSjeeKcYlpEfyx7KsZZWxUlCdPpNVDHQaiqhpqguMSAOdba68J\nPl8B4DRr7Y2h664FcC0A5AMnH4/sAe+c4kA8EuzA2oGYAUwU+FRX0QG0b0PX1qMQOSoPJr8N9kAX\nEC+FGQUAtbB7c4BoEUxpOxDLAXK7gM4sIDsGtLbCtsq17UAbAOQAebWwe7eia9sIILcMWWX1iO8v\nRWRkDbpqAOAYZB+b43l4+uX06QK6srygZV1A5wZ0rg/ucUwOkGcBuw/xmjGIlBkvJrYFrAGMDdZ6\n/t/iQFcrbEsBTHE70Cb3kWt6+jlcnv7g368Wdm8NumqOQ/a0LC8dgAVsJ9BhAJMNRKXtpc92I/7J\nBEQ+k6y+7UCbASJRIEd+1wG0x4GuHejaDgATkTWpA7Y9ByYvCuTEgS4JDhceJ3EgHtw/Huw9ko7L\neti6LqArD8gtgCnuANrbYA/kweT7Y86nBbYxD6ZA2jRwZbX+M+KB778FrIzVIphSv8xSzxjQaoBI\nDpDbDsQ6YGP5MEXhvvT70R8nwf9xA0Q6gHYpt39NB9AeCw5gi2BKw3Wqhd2bBWTJe5HKWAi3PwCb\nA3QzaZNy+9fLzzGgdQe6th+NrGMiQCQOxFtgG3NhRuQAuf64A4BOoCM4FcwKv39yT3k3wuUN/++3\npz8PJKuzX/Zkvw9/vx62rgm2MTze/e/VIL6rDJFxESBrC7q66my8X9uPjMzHYa1dCmDpYJdDURTl\nSGRInXEA2AnAN1ifEPxOURRFGSIMNcGxEsCxxpijjTE5AC4D8PQgl0lRFEXxGFKqKmttpzHmRgDP\ng7q9h621Hw5ysRRFURSPIXU4riiKogx9hpqqSlEURRniqOBQFEVR0kIFh6IoipIWKjgURVGUtFDB\noSiKoqSFCg5FURQlLVRwKIqiKGmhgkNRFEVJCxUciqIoSlqo4FAURVHSQgWHoiiKkhYqOBRFUZS0\nUMGhKIqipIUKDkVRFCUthlQ+jv4wZswYW1lZOdjFUBRFySjefffdfdbasf35bsYLjsrKSqxatWqw\ni6EoSj+pvO0/Ez5v/d4Fg1SSIwtjzMf9/W7GCw5FUQ4vOtErKjiOUPyXX198RUlEhWPvqOBQeuVg\nCxgVWMpQYrDGY6a/Byo4lG6rq8F4bn9fnp7u0VudDvaLejiflWo5MnEyElLt08PZj4eSTOw3FRwZ\nSG8DbagNwv6+jP0RCKk+K9U26m2iGuqTTH/GwVBUzwxWOyu9o4IjQ+jPCzQUJrdD+Z3DzUDrdbAn\n+qFIJvTjQDmcwtunP886VP2hgiPDySThMBQ5nKvsg7H7Ohj3G+hzD3c50i3DoejDgbbFwW6jwW5z\nFRyKMsQ4nJPCYE9AA+FgqyaHOkNJkA87wTFcBslQJJMnmVQZjnUcjnUaKhypbTvsBIfPULF2UQbG\nkfpyKv3jYBhQKL0zLASHDgZFUZTDh0bHVRRFUdJiWOw4BoraryuKoqTOESs4dGJWFEXpH0es4EiV\n/lppqXWXoijDFRUcSTiUu5GhqBZTFEVJBxUcaXCwPX/Df1MhoihKJqBWVYqiKEpa6I5jCKEH9oqi\nZAK641AURVHSQgWHoiiKkhYqOBRFUZS0UMGhKIqipIWx1g52GQaEMaYJwIbBLschZAyAfYNdiEOI\n1i9zGc51A4Z//Y6z1hb154vDwapqg7X21MEuxKHCGLNK65e5DOf6Dee6AUdG/fr7XVVVKYqiKGmh\ngkNRFEVJi+EgOJYOdgEOMVq/zGY412841w3Q+vVIxh+OK4qiKIeX4bDjUBRFUQ4jGSM4jDHnG2M2\nGGM2GWNuS/J3Y4x5MPj7GmPM7MEoZ39JoX5nG2MajDHvBf/uGIxy9gdjzMPGmD3GmA96+Hum911f\n9cvkvptojHnZGPORMeZDY8w3klyTsf2XYv0yuf/yjDHvGGPeD+q3JMk16feftXbI/wOQBWAzgMkA\ncgC8D+CE0DVfAPAcAANgHoC3B7vcB7l+ZwN4drDL2s/6nQVgNoAPevh7xvZdivXL5L4bB2B28HMR\ngI3D7N1LpX6Z3H8GQGHwcxTA2wDmDbT/MmXHMRfAJmvtFmttO4DHACwMXbMQwC8seQtAqTFm3OEu\naD9JpX4Zi7X2NQB1vVySyX2XSv0yFmvtLmvt6uDnJgDrAFSELsvY/kuxfhlL0CfNwcdo8C98sJ12\n/2WK4KgAsN37vAPdOzeVa4YqqZb99GAr+Zwx5sTDU7TDQib3XapkfN8ZYyoBnAyuWn2GRf/1Uj8g\ng/vPGJNljHkPwB4AL1hrB9x/w8Fz/EhhNYBJ1tpmY8wXADwJ4NhBLpOSGhnfd8aYQgBPAPhHa23j\nYJfnYNNH/TK6/6y1XQBmGWNKAfzOGHOStTbpeVyqZMqOYyeAid7nCcHv0r1mqNJn2a21jbLltNb+\nHkDUGDPm8BXxkJLJfdcnmd53xpgoOKn+2lr72ySXZHT/9VW/TO8/wVpbD+BlAOeH/pR2/2WK4FgJ\n4FhjzNHGmBwAlwF4OnTN0wCuDCwE5gFosNbuOtwF7Sd91s8YU26MMcHPc8G+qz3sJT00ZHLf9Ukm\n911Q7mUA1llr7+/hsoztv1Tql+H9NzbYacAYMwLAXwBYH7os7f7LCFWVtbbTGHMjgOdBC6SHrbUf\nGmOuD/7+EwC/B60DNgE4AODqwSpvuqRYv0sA/L0xphNAK4DLbGASMdQxxjwKWqaMMcbsAHAneEiX\n8X0HpFS/jO07AFUArgCwNtCTA8D/AjAJGBb9l0r9Mrn/xgF4xBiTBQq8x621zw507lTPcUVRFCUt\nMkVVpSiKogwRVHAoiqIoaaGCQ1EURUkLFRyKoihKWqjgUBRFUdJCBYeiKIqSFio4FMXDGFNqjPm6\n93m8MeY/DtGz/qq3EN3GmOnGmJ8fimcrykBQPw5F8QgC3T1rrT3pMDzrDQALrLX7ernmRQBftdZu\nO9TlUZRU0R2HoiTyPQBTgoQ99xpjKk2QoMkY87fGmCeNMS8YY7YaY240xiwyxvyXMeYtY8yo4Lop\nxpg/GGPeNca8boyZFn6IMWYqgJgIDWPMpcaYDwwT7rzmXfoMGIJGUYYMKjgUJZHbAGy21s6y1n4z\nyd9PAnARgDkA7gJwwFp7MoA3AVwZXLMUwE3W2lMA3ALgx0nuUwVGXRXuAHCetXYmgAXe71cBOHMA\n9VGUg05GxKpSlCHEy0HCnyZjTAO4IwCAtQBmBOG5TwfwmyAuHgDkJrnPOAB7vc8rAPzcGPM4AD9C\n6x4A4w9i+RVlwKjgUJT0iHk/x73PcfB9igCot9bO6uM+rQBK5IO19npjzGkALgDwrjHmFGttLYC8\n4FpFGTKoqkpREmkCc0/3iyAJ0J+NMZcCDNttjJmZ5NJ1AI6RD8aYKdbat621d4A7EcmPMBXAgJLu\nKMrBRgWHongEq/wVwUH1vf28zeUAvmaMeR/Ah0ieP/41ACcbp8+61xizNjiIfwPA+8Hv5wP4z36W\nQ1EOCWqOqyiDhDHmAQDPWGtf7OHvuQBeBXCGtbbzsBZOUXpBdxyKMnjcDSC/l79PAnCbCg1lqKE7\nDkVRFCUtdMehKIqipIUKDkVRFCUtVHAoiqIoaaGCQ1EURUkLFRyKoihKWvw3GkT3Lf71VLAAAAAA\nSUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from bmtk.analyzer.spike_trains import raster_plot, rates_plot\n",
+ "\n",
+ "raster_plot('network/V1_nodes.h5', 'network/V1_node_types.csv', 'output/spikes.h5', group_key='pop_name')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "or we can plot the rates of the different populations"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEKCAYAAAAfGVI8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHARJREFUeJzt3Xt8FfWd//HXJyERi1CIpsCKFNeyKHhtU3+kCo90sVZd\nVNxabbWCPFAKbbetqAg/q0t/lodCqa6/XS9NiwJe60orlP6UKjUCNVRDQQEjUBVQlptcVlyVcPn8\n/pg55iQk5+Q2Z0Lm/Xw88jhz5pyZ+WSSM++Z73dmjrk7IiKSXHlxFyAiIvFSEIiIJJyCQEQk4RQE\nIiIJpyAQEUk4BYGISMJ1inLmZrYB2AscBA64e4mZFQG/AfoBG4Ar3H13lHWIiEjjcnFE8FV3P9Pd\nS8Lnk4BF7t4fWBQ+FxGRmMTRNHQpMDscng2MiKEGEREJWZRXFpvZO8B/EzQN/dLdy81sj7t3D183\nYHfqeb1pxwJjAbp06fKlk08+ObI6RUQ6ouXLl7/v7sXZ3hdpHwFwrrtvNrPPAc+b2ZvpL7q7m1mD\nSeTu5UA5QElJiVdVVUVcqohIx2JmG5vyvkibhtx9c/i4HfgdcDawzcx6A4SP26OsQUREMossCMys\ni5l1TQ0D5wOrgfnAqPBto4B5UdUgIiLZRdk01BP4XdANQCfgcXd/zsxeBZ4yszHARuCKCGsQEclo\nz549vP/+++zfvz/uUlqtqKiInj17Nnu6yILA3d8Gzmhg/E5gWFTLFRFpji1bttCvXz86d+5MuON6\nRDp48CDr1q1rX0EgInKkOProo+MuodXy8/NbPK1uMSEikk1lJdx5Z/DYAemIQEQkk8pKGDYMamqg\nsBAWLYLS0iZNOmvWLI477jiGDx/OlClTuPzyy3n66aeprq6mR48eXH311QwZMiTiXyA7HRGIiGRS\nURGEwMGDwWNFRatnedttt/Hggw82GAJTp07lhhtu4LrrrmPv3r1ce+21bNq0ifHjx7N27VoWLlzI\n6NGjmTBhAtu2bWt1LaAjAhGRzMrKgiOB1BFBWVmrZ3nHHXfQo0cPfvKTn9CnT59Px1dXV7N48WJK\nS0v55JNPqK6uZsaMGVx88cWMHj2aAQMGMHnyZObOndumHdsKAhGRTEpLg+agioogBJrYLJTJbbfd\nxqmnnnrY+EOHDjFo0CCmTJny6bi1a9fStWtX9uzZAxDJmU1qGhIRyaa0FCZPblEI3H///YwbN47p\n06dnfe+gQYPIy8tjwoQJjB8/nnfffZfbb7+duXPnsmvXLl599VWuu+46xo4dy8SJE9m+vW1uzBDp\nTefaiu41JCJRqa6u5pRTTom7jDZR/3cxs+VpXwHQKDUNiYjEZOvWrTz44IOfPr/gggsYPHhwzutQ\nEIiIxKRXr151+gPioj4CEZGEUxCIiGSxjEp+zp0so2NeWawgEBHJYBmVXMQwfsptXMSwZoXBrFmz\nWLBgAQBTpkxh9erVTJkyhSuvvJJx48axZMmSw6a5+eabG5xXavooqI9ARCSDJVRQQw0HOUgNNSyh\ngsG07lqCxq4jAHjnnXcAOO2007j22mtZvnw5M2fOBKC8vJy9e/cydOhQRo8e3aoa0umIQEQkgyGU\nUUgh+eRTSCFDKGv1PO+44w7GjRvHe++91+h7+vTpw4033khpaSkrV64E4Morr+Thhx/mueeea3UN\n6XREICKSwWBK+X8sYgkVDKGs1UcDkPmIIKVLly4AFBQUsG/fPgA++9nPAm1/dbGCQEQki8GUtjgA\n7r//fhYsWMCcOXO4/PLL27iytqEri0Uk0XRlsY4IRERioyuLRUQSTlcWi4hIu6AgEBFJOAWBiEgW\nla/AnXcHj80xa9Yshg8fzqhRozAzNm/eDMDOnTu55pprIqi0ZdRHICKSQeUrMGxE2nfXPwOlZzd9\n+nHjxjF8+HA2bdrErFmzuPXWW3nkkUfaVRDoiEBEJIOKpfW+u35p86b/1a9+xfXXX88NN9zAsmXL\ncHcWLVrEeeedF03BLaAjAhGRDMrOrffd9ec2b/rrr7+e4cOHA8H3D8+YMYMvf/nL5OW1n/1wBYGI\nSAalZwfNQRVLgxBoTrNQfSNHjuSkk07izTffbLsC24CCQEQki9KzWxYA1157bZ3nPXv25MMPP2yb\notpQ+zk2ERGRWCgIREQSTkEgIpJwCgIRkWwqq+DO/xs8NsOaNWu46qqr+OEPf8j06dNbtOjvfve7\nnHbaaS2atqnUWSwikkllFQz7JtTsh8ICWPSfUJr1zs4A/PGPf+Saa67hwgsvBILvIz5w4AB9+/bl\nsssuY+TIkVxyySWsWbOGhx56iNGjR9O/f382b97MiBEjOP/88/nlL39Z53sM/vCHP/DSSy+xfft2\n7r77boqKilr9K+qIQEQkk4qXgxA4eDB4rHi5yZOOGTOGpUuXMmbMGO69914KCwu55557uOGGGwAY\nNGgQN910E0VFRWzduhWA6667jmnTpjF37twG55mfn8+hQ4fYv38/L7zwQut/P3REICKSWdlXgiOB\nGoLHsq80edJu3boxdepUAL761a9y7rl1r0Zr6Osou3TpQqdOnT59Xt8DDzzAvHnzmD17Nh999FEL\nfqHDRR4EZpYPVAGb3X24mRUBvwH6ARuAK9x9d9R1iIi0SGlJ0BxU8XIQAk1sFgJ45plnWLhwIZ06\ndaKkpISPP/6Ym2++mRNOOIFLLrmkSfO49dZbWbFiBePGjePee+9l4MCBTJ06lerq6ja7TUXkX1Vp\nZhOAEqBbGATTgV3ufpeZTQJ6uPstmeahr6oUkajoqyoj7iMwsz7APwG/Tht9KTA7HJ4NjIiyBhER\nySzqzuJ/AyYCh9LG9XT3LeHwVqBnQxOa2VgzqzKzqh07dkRcpogk2ccff0zUrSNRO3jwYIunjayP\nwMyGA9vdfbmZlTX0Hnd3M2tw7bt7OVAOQdNQVHWKSLL17t2bzZs3s3///rhLabWWnkoaZWfxOcAl\nZnYR0BnoZmaPAtvMrLe7bzGz3sD2CGsQEcmoe/fudO/ePe4yYhVZ05C7T3b3Pu7eD/gW8Cd3/w4w\nHxgVvm0UMC+qGkREJLs4Lii7C/iama0Hzgufi4hITHJyQZm7VwAV4fBOYFgulisiItnpFhMiIgmn\nIBARSTgFgYhIwikIREQSTkEgIpJwCgIRkYRTEIiIJJyCQEQk4RQEIiIJpyAQEUk4BYGISMIpCERE\nEk5BICKScAoCEZGEUxCIiCScgkBEJOEUBCIiCacgEBFJOAWBiEjCKQhERBJOQSAiknAKAhGRhFMQ\niIgknIJARCThFAQiIgmnIBARSTgFgYhIwikIREQSTkEgIpJwCgIRkYRTEIiIJFynuAvIqcoqmPMU\nYDDym1BaEoyreBnKvgKr3oS5C+Abw2Hsd5o2rzfWw45dUFwUjM82XNQDdu2GT/bBmKuyL0dEJGLm\n7nHXkFVJSYlXVVW1biblj8L3JsHBg7Xj+p8Ib22EQ4cOf/+Zg2Dwl2oDA2o3/sv+Cq+tgbZYdUMH\nw1231i4jJbWsre9Dr+K6ddR/XyrIGnpdRBLLzJa7e9YNQzKCoPxRGH9Lwxv8bPLy4KbxsO4tmP/H\nls0jGzO46p+h62cAg25dYcYDdZdlBp/vA32PD44qehUH7/vFg0G45efDjeOgezeFgogA7SAIzKwz\nsBg4iqAJ6ml3/1czKwJ+A/QDNgBXuPvuTPNqcRCk9qrLH4tmA95eFRZCxVyFgUjCNTUIouwj2Af8\no7t/aGYFwFIzexb4Z2CRu99lZpOAScAtbb70yioY9k34+JM2n3Ud/U+ETgXN6yPYsKntmpYaUlMT\nBKCCQESaILIg8OBQ48PwaUH448ClQFk4fjZQQRRBUPEy7Ks5fPyIC+DCYTDzMejcOdgwp/QqhrNO\ng2cXwbyF0NjR0pmDgr3u1nT2VlbBpJ/B4r+0bPqsLKL5ikhHE2kfgZnlA8uBLwD3ufstZrbH3buH\nrxuwO/W83rRjgbEAffv2/dLGjRubt/DKKhh6GRw4UDsuPx+WPNO0PeXKKph+X91+gbw8eGBa257p\nc8vPDu8PSC3rkq/DxO8Fz6ffB2vfhqMK4LU3akMqLw9OPwVer66dR0EBvPRbHRGIJFzsfQT1iukO\n/A74F2Bp+obfzHa7e49GJ6YVfQTlj8IPJsOBsDP1vjubvxFv6JTTtpY68+fYIljxevZlNXYabNR1\nisgRpV0FAYCZ3Q58BFwPlLn7FjPrDVS4+4BM07bqrCGdXikiCRV7Z7GZFQP73X2PmR0NfA2YBswH\nRgF3hY/zoqoBCDb+CgARkUZFedZQb2B22E+QBzzl7gvMrBJ4yszGABuBKyKsQSS5KithzhzYuhV2\n7YIdO2DAAJg4MXh9zpzgceRIKC3NPq+KCigrC56nhjNNl1o+wFlnwc6d2aeRWCTjgjJpuvQPvD6w\n8WvO36OyEqZPh7VrobgY/vznulfSpzOrPeHADD7/eejbN3j+ySfB8tatg//6L+jfH558MjgZIS+8\nPdmhQ2G/231w2mm1Na5aBXPnBst/4onDT4Lo1CmYZuzYFq0OaZ5210fQGgqCHCkvhx/8IDjTKi8v\n+IAXFgYf8O7dazdG5eXBh/0b39AHOiqVlTBpEixdWrsBPvlk+NGP6q7z1F73G2/AkiWNn/IcpU6d\n6p6dl01BAbz0knY0ckBBIM1TWQlDh2b+QOflQUkJvPJK7biJE2HatOjr60iy7eWXl8P48Y1fDd+/\nP/ToUbun3thef3uVlwc/+xlMnhx3JR1e7J3FcoSpqMh+G45Dh+qGAMDPfw4nndRxjwzasqks1XQz\nf36w515YCC++GMw3tZw1a+DxxzPv2a9fHzzW/1tkcuaZsHs3bNoUz1FDuvz82r4GaRcUBBIoK4Oj\njgrah5uzoXCHceNg8WL4n/+pbZ8uKgo6KFPtzR98ELy/KR2TDSkvh5kz4e/+LjgKibpZoaGmmb59\ngw3qP/wDrFwZDK9b1/DvPGZM0LSW6izt1g1+8Yu6e+/79sFVVwXNbq+/3nb3w+rVC7ZvD/42qXb8\nVFCndyD36hV04j77bO3v8MEHQS31/wcsvFLdvbafINuRSH4+XHxxsL7uuSc42szPh//4DzULtTNq\nGpJaqb3SY4+FFStg2bK23UBBeHX2A8GGKdNZJem1PPZYEDQp6W3M9ffYU/0XDW2ke/WqG0T1z4RJ\ntbVv3Bj8tAcDB8Lw4cHfIlsfQH4+3H9/7bpt6ZFMY2cI1R+ePh1+//vaoDznnKDehs4Q0kkIsVAf\ngbSN9CaLJ56o3cu88srsTRiZ9OoF27Y1vOf5hS/A229n3uMsLobjj68Nqry8oIkq1WzSmFSIrFoV\ndIzv31+73Pb2Wajf/5IKzlRAp/4WEybU7czPJW3g2zUFgbS9+h/6W24J9gqPNEVFQXt5XP/7qaab\nxo608vLgppsyd8JrAyxNoCCQ3LjlFpgxI9iomgXt6H37BhvbDRvavmkp11JNM+vWBc1l775b9/dJ\n9R2k/86vvdZ4yKSOSKDuxV6pvpS49uylQ9JZQ5Ib06bBiBGN752mt/U/+yzMm9eyPfGhQ+Hqq+H7\n32/6OeupjXT37pk3zilmcMYZQSdu6grcTL9PY1fKNtQhu2JF8Fp6H4U29tJO6IhAciv96tcBA+DC\nC2s3kt26BRvZzp2DPfFu3YKzc9IvXKt/9WxRUd35pza89TfS5eXwve8d3u9w/vmwZ0/uzkYSySE1\nDYnUl34VbuoUz456/YMIahoSOVxpqfb4RRqQ15Q3mdmQ8C6i6eO+GE1JIiKSS00KAmAh8Ccz+1za\nuF9HUI+IiORYU4NgLfBz4CUz+0o4Tt+OLiLSATS1j8DDL5VZC/zGzB4C2n8vs4iIZNXUIwIDcPf1\nwNDw5/SoihIRkdxp0hGBu5+VNvwhcIWZ9Y2sKhERyZmMQWBm/07mJqAftm05IiKSa9mOCNKv4vop\n8K8R1iIiIjHIGATuPjs1bGY/Tn8uIiIdQ1M7i0FnCYmIdEjNCQIREemAsnUW76X2SOAzZvZB6iWC\nawu6RVmciIhEL1sfQddcFSIiIvFQ05CISMIpCEREEk5BICKScAoCEZGEUxCIiCScgkBEJOEUBCIi\nCacgEBFJOAWBiEjCKQhERBIusiAwsxPM7EUze8PM1pjZj8LxRWb2vJmtDx97RFWDiIhkF+URwQHg\nRncfCAwGvm9mA4FJwCJ37w8sCp+LiEhMIgsCd9/i7n8Nh/cC1cDxwKVA6gtuZgMjoqpBRESyy0kf\ngZn1A84C/gL0dPct4UtbgZ6NTDPWzKrMrGrHjh25KFNEJJEiDwIzOwaYC/zY3T9If83dnUa++czd\ny929xN1LiouLoy5TRCSxIg0CMysgCIHH3P234ehtZtY7fL03sD3KGkREJLMozxoyYCZQ7e53p700\nHxgVDo8C5kVVg4iIZJfxG8pa6RzgGmCVma0Mx/1v4C7gKTMbA2wEroiwBhERySKyIHD3pQTfbdyQ\nYVEtV0REmkdXFouIJJyCQEQk4RQEIiIJpyAQEUk4BYGISMIpCEREEk5BICKScAoCEZGEUxCIiCSc\ngkBEJOEUBCIiCacgEBFJOAWBiEjCKQhERBJOQSAiknAKAhGRhFMQiIgknIJARCThFAQiIgmnIBAR\nSTgFgYhIwikIREQSTkEgIpJwCgIRkYRTEIiIJJyCQEQk4RQEIiIJpyAQEUk4BYGISMIpCEREEk5B\nICKScAoCEZGEUxCIiCScgkBEJOEiCwIze8jMtpvZ6rRxRWb2vJmtDx97RLV8ERFpmiiPCGYBF9Qb\nNwlY5O79gUXhcxERiVFkQeDui4Fd9UZfCswOh2cDI6JavoiINE2u+wh6uvuWcHgr0LOxN5rZWDOr\nMrOqHTt25KY6EZEEiq2z2N0d8Ayvl7t7ibuXFBcX57AyEZFkyXUQbDOz3gDh4/YcL19EROrJdRDM\nB0aFw6OAeTlevoiI1BPl6aNPAJXAADN7z8zGAHcBXzOz9cB54XMREYlRp6hm7O7fbuSlYVEtU0RE\nmk9XFouIJJyCQEQk4RQEIiIJF1kfgcSj8hWY8yS8sRZ27ITiY6GoB+zaXfscGh4u6gG9PgcjvwWl\nZ8f3O4hIbikIOpDKV6DsYqipqR1XXe891U0YfvhxeHG+wkAkKdQ01IFULIX9+1s/n5qaYF4ikgwK\ngg6k7FwoKGj9fAoLg3mJSDKoaagDKT0bKn6vPgIRaR4FQQdTerY24iLSPGoaEhFJOAWBiEjCKQhE\nRBJOQSAiknAKAhGRhFMQiIgknIJARCThFAQiIgmnIBARSTgFgYhIwikIREQSTkEgIpJwuumciEiO\nzaScWczkKDoDsJMdHEtxneEiigDoSS+uZiSDKY2sHgWBiMRmGZU8xhy2sRWou9FbRiVLqGAIZZ8+\nT713F7sa3Hg2NlxEUbOnae30jU1TwwHeZn0Da6Ox7wyER3iY53gxsjAwd49kxm2ppKTEq6qq4i5D\nRFoptTGv5g3eYyOb2IRz+DboJPrzDm9xiEPkkccgTmc1rzX43iQwjJ8ylZuZ3LzpzJa7e0m29+mI\nQERyYhmVfJ0yaqjJ+t630vaYD3GIVayMsrR2r5BChlAW2fwVBCKSE0uoYD9t8KXaHcjpnElXuqmP\nQESSYQhlFFDQpCOCTAzjVM5gP/uOyD6CYynmFAZGvnFvDgWBiOTEYEpZSMWnfQQ72UF/BvB1LmQX\nOxlCGWtY9enZNKk94nS52DtOInUWi4h0UE3tLNYFZSIiCacgEBFJOAWBiEjCKQhERBJOQSAiknAK\nAhGRhIslCMzsAjNba2Z/M7NJcdQgIiKBnAeBmeUD9wEXAgOBb5vZwFzXISIigTiOCM4G/ubub7t7\nDfAkcGkMdYiICPHcYuJ44N205+8B/6v+m8xsLDA2fPqhma1t4fKOA95v4bRRUl3N115rU13No7qa\npzV1fb4pb2q39xpy93KgvLXzMbOqplxinWuqq/naa22qq3lUV/Pkoq44moY2AyekPe8TjhMRkRjE\nEQSvAv3N7EQzKwS+BcyPoQ4RESGGpiF3P2BmPwAWAvnAQ+6+JsJFtrp5KSKqq/naa22qq3lUV/NE\nXtcRcRtqERGJjq4sFhFJOAWBiEjCdeggaE+3sjCzDWa2ysxWmllVOK7IzJ43s/XhY48c1PGQmW03\ns9Vp4xqtw8wmh+tvrZl9Pcd1TTGzzeE6W2lmF8VQ1wlm9qKZvWFma8zsR+H4WNdZhrpiXWdm1tnM\nXjGz18K6fhqOj3t9NVZX7P9j4bLyzWyFmS0In+d2fbl7h/wh6Ih+C/h7oBB4DRgYYz0bgOPqjZsO\nTAqHJwHTclDHUOCLwOpsdRDcAuQ14CjgxHB95uewrinATQ28N5d19Qa+GA53BdaFy491nWWoK9Z1\nBhhwTDhcAPwFGNwO1ldjdcX+PxYubwLwOLAgfJ7T9dWRjwiOhFtZXArMDodnAyOiXqC7LwZ2NbGO\nS4En3X2fu78D/I1gveaqrsbksq4t7v7XcHgvUE1wdXys6yxDXY3JVV3u7h+GTwvCHyf+9dVYXY3J\n2f+YmfUB/gn4db3l52x9deQgaOhWFpk+KFFz4AUzWx7ePgOgp7tvCYe3Aj3jKa3ROtrDOvwXM3s9\nbDpKHR7HUpeZ9QPOItibbDfrrF5dEPM6C5s5VgLbgefdvV2sr0bqgvj/x/4NmAgcShuX0/XVkYOg\nvTnX3c8kuOvq981saPqLHhz3xX4ub3upI/QAQdPemcAW4BdxFWJmxwBzgR+7+wfpr8W5zhqoK/Z1\n5u4Hw//1PsDZZnZqvddjWV+N1BXr+jKz4cB2d1/e2Htysb46chC0q1tZuPvm8HE78DuCw7ltZtYb\nIHzcHlN5jdUR6zp0923hh/cQ8CtqD4FzWpeZFRBsbB9z99+Go2NfZw3V1V7WWVjLHuBF4ALawfpq\nqK52sL7OAS4xsw0Ezdf/aGaPkuP11ZGDoN3cysLMuphZ19QwcD6wOqxnVPi2UcC8OOrLUMd84Ftm\ndpSZnQj0B17JVVGpD0LoMoJ1ltO6zMyAmUC1u9+d9lKs66yxuuJeZ2ZWbGbdw+Gjga8BbxL/+mqw\nrrjXl7tPdvc+7t6PYBv1J3f/DrleX1H1greHH+AigrMp3gJujbGOvyfo6X8NWJOqBTgWWASsB14A\ninJQyxMEh8D7CdoXx2SqA7g1XH9rgQtzXNcjwCrg9fAD0DuGus4lOCx/HVgZ/lwU9zrLUFes6ww4\nHVgRLn81cHu2//WY64r9fyxteWXUnjWU0/WlW0yIiCRcR24aEhGRJlAQiIgknIJARCThFAQiIgmn\nIBARSTgFgUgLmdn/MbPzGhhflrqLpMiRIOdfVSnSUbj77XHXINIWFAQiTWBmtwHfAXYQ3PRrOXAq\nwQVAT5vZBQQ3D/sIWBpboSItoKYhkSzM7MvAN4AzCG4aWFLv9c4E96m5GPgS0CvXNYq0hoJAJLtz\ngHnu/okH9/7/fb3XTwbecff1Hlyq/2jOKxRpBQWBiEjCKQhEsvszcHH4vbfHAMPrvf4m0M/MTgqf\nfzun1Ym0kjqLRbJw91fNbD7BHSq3Edyt8r/TXv8k/Na5P5jZR8ASgu8RFjki6O6jIk1gZse4+4dm\n9hlgMTDWw+8MFjnS6YhApGnKzWwg0BmYrRCQjkRHBCIiCafOYhGRhFMQiIgknIJARCThFAQiIgmn\nIBARSbj/D63Fn/tKJd7MAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD9CAYAAACsq4z3AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAD7VJREFUeJzt3X+s3XV9x/Hny7ZoAWchdE2tZiwbq2MioFcmY3NxgDBH\npNNJcZurCbOYaKb7AYHNzC1ZJgmZ27K4pfXHaDJlICuUmGXYVQjbUoFikYrIWOTHqECLWJWsUijv\n/XG+1UtpOefee07P7ec+H0lzvt/P93Pu932+aV73cz73+yNVhSTp8PeScRcgSRoOA12SGmGgS1Ij\nDHRJaoSBLkmNMNAlqRHzB+mU5EHg+8Be4NmqmkhyLHANcDzwIHBBVX1nNGVKkvqZygj9LVV1SlVN\ndOuXAZuq6gRgU7cuSRqTmUy5nA+s65bXAStmXo4kaboyyJWiSR4AvktvymVNVa1NsquqFnXbA3xn\n3/p+710NrAY46qij3vCa17xmmPVLUvPuvPPOJ6pqcb9+A82hA79YVduT/DiwMck3Jm+sqkpywN8M\nVbUWWAswMTFRW7ZsGXCXkiSAJA8N0m+gKZeq2t697gCuB04DHk+ytNvZUmDH9EqVJA1D30BPclSS\nl+9bBt4KfA24EVjVdVsFbBhVkZKk/gaZclkCXN+bJmc+8Lmq+rckdwDXJrkIeAi4YHRlSpL66Rvo\nVfVN4OQDtH8bOHMURUmSps4rRSWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAl\nqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIa\nYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREG\nuiQ1wkCXpEYMHOhJ5iXZmuQL3fqxSTYmub97PWZ0ZUqS+pnKCP1DwL2T1i8DNlXVCcCmbl2SNCYD\nBXqSVwG/BnxqUvP5wLpueR2wYriljcfKNZtZuWbzuMuQpCkbdIT+N8ClwHOT2pZU1aPd8mPAkgO9\nMcnqJFuSbNm5c+f0K5Ukvai+gZ7kPGBHVd15sD5VVUAdZNvaqpqoqonFixdPv1JJ0ouaP0CfM4C3\nJ3kb8DLgx5L8E/B4kqVV9WiSpcCOURYqSXpxfUfoVXV5Vb2qqo4HLgS+VFW/DdwIrOq6rQI2jKxK\nSVJfMzkP/Qrg7CT3A2d165KkMRlkyuWHquoW4JZu+dvAmcMvSZI0HV4pKh0mPKVW/RjoktQIA12S\nGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakR\nBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGg\nS1IjDHRJaoSBLkmNMNAlqREG+iQ3bN3O1od3cdsDT3LGFV/ihq3bx12SJA3MQO/csHU7l6/fxp69\nzwGwfdduLl+/zVCXdNgw0DtX3nQfu5/Z+7y23c/s5cqb7htTRZI0NX0DPcnLktye5KtJ7kny5137\nsUk2Jrm/ez1m9OWOzrd27Z5SuyTNNoOM0J8GfqWqTgZOAc5N8ibgMmBTVZ0AbOrWD1uvXLRwSu2S\nNNv0DfTqeapbXdD9K+B8YF3Xvg5YMZIKD5FLzlnOwgXznte2cME8Ljln+ZgqkqSpGWgOPcm8JHcB\nO4CNVXUbsKSqHu26PAYsOch7VyfZkmTLzp07h1L0KKw4dRkfe8dJHDGvd0iWLVrIx95xEitOXTbm\nyiRpMPMH6VRVe4FTkiwCrk/y2v22V5I6yHvXAmsBJiYmDthntlhx6jKuvv1hAK65+PQxVyNJUzOl\ns1yqahdwM3Au8HiSpQDd647hlydJGtQgZ7ks7kbmJFkInA18A7gRWNV1WwVsGFWRkqT+BplyWQqs\nSzKP3i+Aa6vqC0k2A9cmuQh4CLhghHVKkvroG+hVdTdw6gHavw2cOYqiJElT55Wi0mHA+wxpEAa6\nNMt5nyENykCXZjnvM6RBGejSLOd9hjQoA12a5bzPkAZloEuznPcZ0qAGuvRf0vjsu5/QpdfdzZ69\nz7Fs0UIuOWe59xnSCxjo0mHA+wxpEE65SFIjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANd\nkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWp\nEQa6JDXCQJekRhjoktSI+eMuYLa55uLTx12CJE2LI3RJaoSBLkmNMNAlqRF9Az3Jq5PcnOTrSe5J\n8qGu/dgkG5Pc370eM/pyJUkHM8gI/VngD6vqROBNwAeSnAhcBmyqqhOATd26JGlM+gZ6VT1aVV/p\nlr8P3AssA84H1nXd1gErRlWkJKm/Kc2hJzkeOBW4DVhSVY92mx4DlhzkPauTbEmyZefOnTMoVZL0\nYgYO9CRHA/8CfLiqvjd5W1UVUAd6X1WtraqJqppYvHjxjIqVJB3cQIGeZAG9MP9sVa3vmh9PsrTb\nvhTYMZoSJUmDGOQslwCfBu6tqo9P2nQjsKpbXgVsGH55kqRBDXLp/xnAe4BtSe7q2v4YuAK4NslF\nwEPABaMpUZI0iL6BXlX/CeQgm88cbjmSpOnySlFJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCB9B\nJx0mfDyi+nGELkmNMNAlqREGukZq5ZrNrFyzedxlSHOCgS5JjTDQJakRBrokNcJAl6RGGOiS1AgD\nXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAl\nqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWpE30BP8pkkO5J8bVLbsUk2Jrm/ez1mtGVKkvoZ\nZIR+FXDufm2XAZuq6gRgU7cuSRqjvoFeVbcCT+7XfD6wrlteB6wYcl2SpCma7hz6kqp6tFt+DFhy\nsI5JVifZkmTLzp07p7k7SVI/M/6jaFUVUC+yfW1VTVTVxOLFi2e6O0nSQUw30B9PshSge90xvJIk\nSdMx3UC/EVjVLa8CNgynHEnSdA1y2uLVwGZgeZJHklwEXAGcneR+4KxuXZI0RvP7daiqdx9k05lD\nrkWSNANeKSpJjTDQJakRBrpG5oat29n68C5ue+BJzrjiS9ywdfu4S5KaZqBrJG7Yup3L129jz97n\nANi+azeXr99mqEsjZKBrJK686T52P7P3eW27n9nLlTfdN6aKpPYZ6BqJb+3aPaV2STNnoGskXrlo\n4ZTaJc2cga6RuOSc5SxcMO95bQsXzOOSc5aPqSKpfX0vLJKmY8WpywC49Lq72bP3OZYtWsgl5yz/\nYbuk4TPQNTIrTl3G1bc/DMA1F58+5mqk9jnlIkmNMNAlqREGuiQ1wkCXNCetXLOZlWs2j7uMoTLQ\nJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuac5p9fGIBrqkOaXlxyMa6JLmlJYfj2ig\nS5pTWn48ooEuaU5p+fGIBrqkOaXlxyP6xCJJc0rLj0c00CXNOa0+HtEpF0lqhIEuSY0w0CWpEQa6\nJDXCQJekRhjoktSIGZ22mORc4G+BecCnquqKoVQlSSPW0umK+0x7hJ5kHvAJ4FeBE4F3JzlxWIVJ\nkqZmJlMupwH/U1XfrKo9wD8D5w+nLEnSVM1kymUZ8L+T1h8Bfn7/TklWA6u71aeSHA73qDwOeGLc\nRTTkuGvf7/EcEv9vDtfhcjx/YpBOI7/0v6rWAmtHvZ9hSrKlqibGXUcrPJ7D47EcrtaO50ymXLYD\nr560/qquTZI0BjMJ9DuAE5L8ZJIjgAuBG4dTliRpqqY95VJVzyb5IHATvdMWP1NV9wytsvE6rKaI\nDgMez+HxWA5XU8czVTXuGiRJQ+CVopLUCANdkhrRXKAn+ZMk9yS5O8ldSV5wbvwMfvabk3wlybNJ\nfmNYP7cVSfZ2x/xrST6f5MgkNyc5Z79+H07yD+OqU2pVU4Ge5HTgPOD1VfU64Cyef/HTTD0MvBf4\n3BB/Zkt2V9UpVfVaYA/wfuBqemdATXZh1z6nJHnqAG1/luSPuuWrkjzQ/VK8K8nvHfoqDy/DPKZJ\n/jXJoj77uyXJrD1vvbVnii4FnqiqpwGq6gmAJG+kdxOxo4CngTOBdwJvB44Efgq4vqou7fo/1fU/\nD9gNnF9Vj1fVg9325ybvNMnRwAbgGGAB8JGq2jDSTzr7/QfwOuAjwF8kOaKq9iQ5Hnhlt10vdElV\nXTfuIhoz0DGtqrcdimJGqakROvBF4NVJ/jvJ3yf55e4c+WuAD1XVyfRG7bu7/qcAK4GTgJVJ9l0o\ndRTw5a7/rcD7+uz3B8CvV9XrgbcAf5UkQ/1kh5Ek8+ndtG1bVT0J3N6tQ290fm15etWMJHlrks3d\nFODnkxyd5BVJ7kuyvOtzdZL3dcvndn2/mmTTeKufnZI8mOS4JMcnuTfJJ7vp2y8mWTip67uS3N7l\nzC+NreADaCrQq+op4A307h2zk16QXww8WlV3dH2+V1XPdm/ZVFXfraofAF/nR/dL2AN8oVu+Ezi+\nz64D/GWSu4F/p3efmyVD+VCHl4VJ7gK20Jue+nTXPnnaZU5Ot0zBlZOmB046UIckx9H75nNWN4jY\nAvxBVX0X+CBwVZILgWOq6pNJFgOfBN7ZDVLedWg+yqzR95gewAnAJ6rq54Bd9L7R7zO/qk4DPgx8\ndMi1zkhrUy5U1V7gFuCWJNuAD7xI96cnLe/lR8fjmUkjyMntB/NbwGLgDVX1TJIHgZdNsfQW7K6q\nUw7QvgH46ySvB46sqjsPcV2Hk0GmB95E75bV/9V9ETwC2AxQVRuTvIvera1PntT/1qp6oOvz5CgK\nn8WmM431QFXd1S3vP6hbf5D2sWsq0Luvms9V1f1d0ynAvcC5Sd5YVXckeTk/mnIZllcAO7owfwsD\n3hltrqiqp5LcDHwGR+fDEGBjVb37BRuSlwA/C/wfvb/pPHKIa2vF/oO9hQfYNshg75BqasoFOBpY\nl+Tr3fTHicCf0psn/7skXwU2Ms3Rc5I3JnmE3lfWNUn23ergs8BE943gd4BvzPBztOhqeiNGA33m\nvgyckeSnAZIcleRnum2/T28Q85vAPyZZ0PV/c5Kf7PofO4aadQjMqt8uM9V9lf+FA2x6gt7Xzsmu\n6v7te+95k5aPnrR8HXBdt3wHvbtK7r/fJ4D2nmc1RZOP2wG23UBvZDmXHdkNCPb5+HR+SFXtTPJe\n4OokL+2aP9L9If53gdOq6vtJbqV3xtVH03suwfpuBL8DOHv6H2NWGcoxbYX3cpGkRrQ25SJJc1ZT\nUy5Sa5LcBrx0v+b3VNW2cdTTgpaPqVMuktQIp1wkqREGuiQ1wkCXpEYY6JLUiP8HB/BSc2tG77IA\nAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "rates_plot('network/V1_nodes.h5', 'network/V1_node_types.csv', 'output/spikes.h5', group_key='pop_name', smoothed=True)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "In our config file we used the cell_vars and node_id_selections parameters to save the calcium influx and membrane potential of selected cells. We can also use the analyzer to display these traces:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZgAAAEKCAYAAAAvlUMdAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXd4VMXawH+zSWAJJIROSMDQaxBDlaZYQQQsWLABXjvo\nLeq9eq+fesV27RUFK6jYAGlSlC5NCL23ECAQSCE92WSTne+Ps7vJJrvZvpuE+T1Pnuw5Z87MO6fM\nO+87M+8RUkoUCoVCofA1umALoFAoFIq6iVIwCoVCofALSsEoFAqFwi8oBaNQKBQKv6AUjEKhUCj8\nglIwCoVCofALSsEoFAqFwi8oBaNQKBQKv6AUjEKhUCj8QmiwBQgmzZs3l3FxccEWQ6FQKGoV27dv\nz5BStnCW7qJWMHFxcSQmJgZbDIVCoahVCCFOupJOucgUCoVC4ReUglEoFAqFX7ioXWQKhULhLzLy\ni3nsux1IKXlmVDf6XtI02CIFHGXBKBQKuxw6l8vyfecoM6lPenjCj9tOs/XEBZIzC7ljxha+33oq\n2CIFHKVgFApFFbIKSrh1+iYe+XY7d8zYTHpecbBFqtH8fuA8H685Rk6h0bovKb2A1pF6Vj15BUM6\nNefZ+XuZtuSA1wpbSsk3W07y2rKDnMku8lZ0v6IUjEKhqMKm45kUlJRx3+WXsO9sDjd9vJGDqbnB\nFqtGcjKzgIe+SeTNFYcZ+f56dpzKAiA5s4BLmoUTqQ/ji4n9mDQ4ji82nODB2YnkGYxOctXytadA\n1hxO4/8W7GPGuiSue2cdvx847/M6+QqlYBQKRRUOn8slRCf49w3dmfvIYMpMkls/2cRv+88FW7Qa\nx4ZjGUgJH0y4jLAQHXfO2MLPiac5mVlA++YNAQgN0fHi2J68fFMv1h1JZ/wnmzl9odBhnofO5XLV\n2+sY/sYa3vn9iI3V8/uBNCLqh7LmqSvp2LIRD32TyIerjlITv06sFIxCEUQW7T5L/AsreOy77Vwo\nKAm2OFbS80toEl4PfVgIvWIas3DqEDq3bMRD32yvsY1ZsDh9oYiwEMGY3tEsnDKE/u2b8PTcPWTk\nl9CuWbhN2nsGXcKsyQNIzSnipo83kph8wW6efxzJoMwkGdKpOR+sOsq9X/xpfT6SMwro3KoR7Zs3\n5KeHL2fcpW14+/cjTJ2zk8KSUr/X1x2UglEoAsS25AvMWHecjPzy8Yzpa44BsPJgGmM+3MDu09lV\nzgtGY55fXEqkvnySaatIPT8+fDk3XxajNWbflzdm53IM/HfxftYcTgu4nDWBjPximjWsjxCCJg3r\nMWvyACYMaAtA75ioKumHdm7OL1OGENkgjLs++5P5O1Ksx6SUTPpqK68sPUikPpTZ9w/gjVt7k3gy\ni7EfbeBgai7ncw1EN24AgD4shHfv6MO/b+jGsn2pjP9kMylZji2jQONXBSOEGCmEOCyEOCaEeMbO\ncSGE+MB8fI8QIsHZuUKIpkKI34UQR83/m1Q41lsIsVkIsV8IsVcIofdn/RQKV8kpNHLfF1t5bdkh\nrnlnHasPnafMJDmals/EwXHMfeRyAG6bsZmfEk9bz9t5Kov4F3/jhvf/sKt8quPbLSfp9cIKnvxp\nNzlFzn3+Fck3GGmkt13FoA8L4Z3bL+XZUd1YuldrzM5kF/HRmqN8tTGZyV9t49n5eyguLXOrrNrI\n2ewinvxpN7M2JZOWV0zziHrWY6EhOl67pTfb/nMNQzs3t3t+xxaN+OWxwfS9pAn/+Gk3ry07SJlJ\nsiclh7WH0+kd25inru8KwO392/LTw5djLDNxy/RNJGUU0CKivjUvIQQPDe/IF5P6czqrkHEfbeTP\npEz/XgAX8ZuCEUKEAB8Do4AewAQhRI9KyUYBnc1/DwGfuHDuM8AqKWVnYJV5GyFEKPAt8IiUsidw\nJeDeW6VQ+Ikdp7IoMpYx7aZetGncgPu/TuQ/v+ylzCSJjtLTOzaKxY8PpX9cE/45dw//t2AfJaUm\nZm1KxmAsI6uwhPGfbmLWpmSXLBopJe+tPEK9UB0Ld51h3EcbOHwuz2V584tLaVS/6jI5IQQPX9GR\nLyf25/SFQsZ+uIFvt5xiYPumPHplR77feprbZ2zhbA2f3eQtry07xLwdKbywaD/rj6QTFlK1Ka2o\nBOwRFV6P2X8ZwN0D2zFjXRKTvtrK9pPaBIF3bu/DfZfHWdP2aRvF4qlD6do6AoDGDcKq5Deia0sW\nTBlC4wZh3P35n3y7xaVoLn7FnxbMAOCYlDJJSlkC/ACMq5RmHDBbamwBooQQ0U7OHQfMMv+eBdxk\n/n0dsEdKuRtASpkppaz7XSlFjcdYZuLt3w8DMDo+mvmPDeaWhBh+2KZZKm3M7o6mZvfKw8M78M2W\nk9z12RY2J2UyuFNzlv11GMM6t+CFRft5/PudFBRX72s/m2MgI7+Ev1/TmR8eGkRhSRk3fbyRhbvO\n2KTbcDSDm6dv5K0VhykqKX9d8gz2FYyFEd1a8suUIVYrZ1CHZvxrZDc+vacvx9PyGfPhBjYczXD/\nYtUCpJRsPJbB+L6xvH3bpQD0atPYo7zCQnS8cnM8/7s1nj+TLvDSkgMAxDZpUCVty0g9Pz48iP+O\n7cndg9rZza9ji0b8MmUIQzs357kF+/jPL3spKTV5JJsv8KeCiQFOV9hOMe9zJU1157aSUqaaf58D\nWpl/dwGkEGKFEGKHEOKf3ldBofCenxJPs+9MLpe1i6JJeBj6sBDevu1Spo3rSddWEXSPjrSmDQ3R\n8ewN3flgwmXsP5vL+dxiYqIaEBVej8/v68fT13dl6d5Uxn28kWNpji0Sywyl9s0b0S+uKUseH0qv\nmEj++sMuXly039rovPzrAfam5PDRmmOM/WgDR89reeYZSqu4yCrTqWUjFk0dytPXd+WeQZcAMLJX\naxZMGULThvW498s/+Wj1UUy1cKFmSlYh//llL7/sTKki/4WCEi4UlNCzTSS39o1l9wvX8Z/R3b0q\n747+7fjpkcsJ0QlaRNRHHxZiN1390BAmDo6jZYRj73/jBmF8MbE/D1/Rge/+PMUdMzcHzaKs1YP8\nUvMVWO5+KDAUuNv8/2YhxNWVzxFCPCSESBRCJKanpwdOWMVFy9YTF2jTWM/8RwcjhAA0V9O9l8ex\n4u/Dad24amMx9tI2zDf76K/t0RIAnU4wZUQnvvnLQLIKShj70UaW7Dlrt8xc85iLxZXSMlLPnAcH\n8Zeh7fl6UzK3z9jMqcxCjqXl8/AVHfjmLwPIKtTynL8jxTzIX9UNU5nGDcKYMqKTjTuoU8tGLJgy\nhLGXtuGt347wl1nbyC4M7Ay55fvOcfXba7n78y1sOu6+JfXykoN89+cp/v7jbu6ftY3MChMzssx1\nadpQG3dp3CDMoUJwhz5to9j1/LUsmjrE67xCdIJnR3Xn47sSOHo+n9Ef/MHaCpMwVh86z/H0fK/L\ncYY/FcwZoG2F7VjzPlfSVHfuebMbDfN/y1VLAdZLKTOklIXAUiCBSkgpZ0op+0kp+7Vo4fRzBgqF\n16RkFXFJs4ZW5eIq3aMjmffoYK7q1spm/5BOzfn1iWF0ax3B1Dk7+e/i/VXcIHkGzYUW2aDcCgkL\n0fF/N/bg03sSOJ6Wz/A311BqklzStCHDOrdg6RPD6B3bmH+YJwU0qOd5o9mwfijv3dGHaeN6suFY\nBqM/2MCeFPcmKXjKzlNZTJ2zg/O5xSRnFHL353/y2rKDGMtcdxXtOp3NzZfF8NK4nmw6nsmo9/9g\n83Ft4PxCgaa8LQrGl0Tow6wzxHzB6N7RLJo6hFaReiZ/vY13fjtMdmEJU+fs5GWzO86f+FPBbAM6\nCyHaCyHqAXcCiyqlWQTcZ55NNgjIMbu/qjt3ETDR/HsisND8ewUQL4QINw/4XwH4/woqFE5IySq0\n61P3htaN9fzw0OVMHhLHVxuTmfDZFs7lGKzHLSvFI+xYISN7RbPo8aG0MVtOlrUaLSP1fPfAQB69\nsiMAbZuEVznXHSxW2s+PDAZg/Ceb+Waza5MUvOHJn3dTapK8ddul/P6P4dzZXxtEv+3T6hc3Wigp\nNXE+z0DbpuHcd3kcCx7Txpru/nwL7688arVmmoT7XsH4gw4tGvHLY0MYnxDLB6uP0eel3yksKePB\nYR38XrbfFIyUshSYitbwHwR+klLuF0I8IoR4xJxsKZAEHAM+Ax6r7lzzOa8D1wohjgLXmLeRUmYB\n76App13ADinlr/6qn0LhCjmFRtLyimnb1LvG2h71QnW8MKYnH064jIOpuTZukFyzBRPhYBylffOG\nrH7qSr6a3J8BceVRfkNDdPxrZDc2P3sVd/Zva/dcd+nTNooljw9lcKdm/N/C/Tzy7XavXGZlJsnO\nU1l2p15vS75AUnoBz43uzsherQmvF8prt8Tz8V0JHE/P54b3/+DXPal2ci3nfK4BKSE2SusU9GgT\nyeKpQxnXJ4Z3Vx7h0e92ANDEDxaMv2hQL4Q3b7uUN27tbd3XqWUjv5crLuYVuf369ZPqi5YKfzJ3\newpP/bybhVOGcGnbqovufMWxtDymfLeTw+fzeGBoe4xlJn5KTOHgtJF+K9NdTCbJlxtP8L/lh2jR\nqD7v3XkZA9o7D2GfnleMPkxntcZeXLSfrzclE6EP5ZlR3bhrQDur+3HKdzvYcCyDtU9dWUUBnL5Q\nyOPf72TX6WwmDGjH8zf2sOsGXLjrDH/9YRfzHr3cJsS+lJKfE1P457w9ABx8aaRXbsRgcSwtj71n\ncripT4zbblsLQojtUsp+ztLV6kF+hSLQlJaZ2JKUabcHLqVkx6ksG1fVwdRcwuuF0DvWs2msrtKp\nZQQLpw7h3kGX8PmGE8zafJIiY82apa/TCR4Y1oH5jw6hXqiOO2du5v2VR6uNLrz95AUGv76KAa+s\nYub645SZJIt2n6XfJU2Ij2nMf37Zx4OzE8nML8ZgLGNb8gWGdGpm17po2zScnx+5nEeu6Mj3W08x\n7uMNHDlfdSbeb/vP0zKiPpe1bWKzXwjB7f3bsvIfw/nk7oRaqVxAe1ZuvizWY+XiDkrBKBRu8MKi\n/dw5cwtD/7eGz/9Ismkcv9qYzC3TNzHsjdW8t/IIxjIT53INtI7UB+Rl1oeFMO2mXsy4ty+A35Wa\np8THNmbJE8OsLqe7PttCao79abTfbz1NiE4wsENTXl16iGvfXceFghJuiI/m278M5P9u7MH6Ixlc\n/94fPLdgH2l5xYzp3cZh2WEhOp4Z1Y3Z9w/gQkEJYz/awPdbT9mMCx05n0dCuybodPbvWaeWEYyK\nj/buIlwkKAWjULiIlJKle1MZ0L4pA9o35eVfDzLhsy3WgeOFu8/SoUVDboiP5r2VR7n1k01sOpbh\ndEW3r7m+Z2t2PX8tX0zsH9By3aFR/VDevaMPb992KXvP5DDq/T/sTrnem5LDkI7N+WpSf965/VKS\n0gsAiG6sR6cT/GVoexY9PoTmjeoxd7sW0+vaHq2q5FOZ4V1asPSvw+h3SVOenb+Xx77bYQ0mmZZX\nTMvIwN6zuor6ZLJC4SIZ+SVkFRoZ2bM1k4fEMX/HGV5ctJ+R763nP6N7kJxRwOje0bx6czwje7bm\nPwv2kVVoJNJOWA9/E1VLZjjd2jeWy9pF8fefdjN1zk5+23+el8b1tMp/LtfAgPZNEUJwS0IsA9o3\nZd72MwzrUr7EoFvrSBZMGcIna4/TJDyMUDthW+zRMkLP7PsHMGN9Eu/8fphtyVlMG9eTnCIjLRop\nBeMLlIJRKFxklznYZNfWEQghuLVvLIM6NuOfc3fz71/2AtDOPFtsVHw0/eKa8s7vR7i6W8ugyVwb\n6NCiEfMeuZxP1h7n/VVH+fNEJm+Mv5SB7ZuSU2S0WYga2yScv17TuUoe+rAQ/n5tF7fL1ukEj17Z\nkSu7tuDvP+6yzhALtNVZV1EuMkWt5pO1xxn53nre+e2w37+Fsel4Bg3CQuhfYVpvTFQDvrl/IP8d\n25PmjepzWYWZYi0i6vPaLfFc44LL5mInNETH41d3tgZrnPjlVqaYG3t/LGisTPfoSBZOHcLDV3Sg\nXojOGlRS4R1qmrKaplxrMRjL6P3ib9QP1ZFXXEpskwa8cnM8V3TxXYSGN1ccYuepbMZe2obvt50m\nTCeY++hgn+WvqIrBWMY7vx9h5vokAD6+K4HRvQM3qF5mkoQ4GOBXaLg6TVm5yBQBZd72FOZuT2F4\nlxZMHhLncQyn+TtSeHXpQUrKTEy/O4HIBmE8O38PE7/cyk192vDcjT1o7qIffeGuM2w+nsnYPm0Y\n3LH8+x17U3L4eM1xQPtGPcCkwXEeyatwHX1YCP++oTtXd2vJN1tOurRWxpco5eI7lAVzkVkwJzML\neHb+XuqF6nhoeAebBtXfZBWUMODVlYTXCyWnyEhcs3BeuTmeIZ3ckyG/uJRL//sbZSZJRP1QNvzr\nKhqHh1FcWsb0NceZvvYYDeuH8p8bujO+b/Xz/VNzihjy+moss42v6d6SF8b0ZHNSJv+cqy2oW/L4\nUM7nGpixPol/jexqs/hOobgYcdWCUQrmIlMwD81OZP3RdBo3CON8bjGjerXmuRt7EBPl21hZ9li+\n7xyPfLuduY9cTkmpiX//spfkzEJuSYjhPzd0p1k1FsfM9cd567cj9ImN4oquLXhzxWG+nNSPyzs0\nr7Lg7ej5PJ6dv5fEk1kM7tiMV2+OJ655Q7v5/px4mqfn7mHx1KFsPJ7BB6uOYpISg9FEeL0QXrm5\nl1crnhWKuojPVvILIWKFEE8JIRYKIbYJIdYLIaYLIUYLIdQkgVqEyST542gGt/Vty7qnR/DktV1Y\ncziNa95ex8drjvn9U7cnM7U1DF1bRzC4U3OW/204U0Z0ZPHus1z19jrm/HnK7rdDpJR8seEETcLD\nOHWhkDdXaB/v6h0bZXc1dedWEfz08OW8cnMv9qbkcP1763l/5VEMdla2n84qQiegW3QEj1zRkZX/\nuMI6hnNLQkzAVjwrFHWRahWEEOIr4EugBPgfMAEtIOVKYCSwQQgx3N9CKnzDmewiioxldI+ORB8W\nwuNXd2blP65geJfmvLniMKPe+4P1R/z3jZyMfC2mlOVLifqwEJ6+vhtLzaHn//3LXsZ/uokDZ3Ot\n52w+nkn7Z5dyPreYJ6/ryuqnruCJqzpx76BLqh1j0ekEdw+8hJVPXsE13Vvx7sojXPfuelYdPG+T\n7nyOgeaN6ls/edsmqgEz7u3H4qlDefr6bn64CgrFxYMzC+RtKeV1UsoPpJSbpJTHpJT7pJTzpZSP\no3333v4XjxQ1DstX7do2LXeHxTYJZ8a9/fhqcn9MUnLfl1t59NvtnPHRF/DS8gz8uieV0xcKSc8r\npnmj+lUsgs6tIvjhoUG8fdulnMwsZMxHG5i25AD5xaV8uPooUeFhjO8ba42O+4/rujLtpl4uld8q\nUs/Hdyfw3QMDqReq4y+zErn/621Wayo93/6q7fjYxna/e65QKFyn2llkUsp9To6XoIXaV9QCLB+h\nstdwjujaksv/1ozP/0jiozXHWHs4nalXdeKBYe2pH+rZTK+SUhM3fbSRszkGdAJM0nGIcMvCxau7\nt+SNFYf5cuMJFu8+S1peMXcPbMcrN8d7JIOFIZ2as/SJYczalMx7K49w7TvrefiKDpzPNShFoqiC\n0WgkJSUFg8HgPHEdRq/XExsbS1iYZ+9ItQpGCLHH0SG0Lxb3dnBcUQPJK3b8ESrQXFZTr+rMTZfF\nMG3JAd5ccZh521N4cWxPhnuwtuTQuVzO5hh4+vqu5BYZmbE+iW5OFrBFhdfj1Zvjub1fW15YtJ+0\nvGLaOxigd5d6oToeHN6BsX3a8NrSg3y4WusbXdlVfdlUYUtKSgoRERHExcVdtGNwUkoyMzNJSUmh\nffv2HuXhbB2MCe2b93OAxYBv/CaKoJBbVP1HqCxY3GZrDqfx30X7ue/LrXZnm5lMEiFw+ALuNodW\nGXtpG9o2DeexEZ0IC3HtZe3TNopfHh1M4sksn0cFbhWp5707L2PCgHa89dthru6uVtorbDEYDBe1\ncgHtvW7WrBnp6Z6PyzpzkfURQnRDG9yfg/YJ4jnAb+avTipqEeWf0XVtfa09t9mUER35y9AO5BUb\nuemjjRhKTdw9sB0PDO1A4/Byy6iopIyP1hyjdaTe+rlgd11ROp3w6yK7gR2aWT/nq1BU5mJWLha8\nvQZOpxlLKQ9JKV+QUiagWTGzgb97VaoiKOQZSqkfqnNrTMXiNrPMNnvrtyNc+dYaHpiVyNkcA22i\n9Hy4+hjD3ljNp+uOW6cC7z+bw/ncYp69oZt6URWKixRX1sHECCGeFEJsAO5BUy6f+F0yhc9Izytm\nypwdzFifhM7Dxt7iNvvp4ctpE9WAPSk5RIWHsXjqUJb9dRh9L2nC68sOceWba/lx2ykOntO+FJjQ\nromTnBUKhTs8//zzrFy5ssr+tWvXcuONN1bZn5mZyYgRI2jUqBFTp061ObZ9+3bi4+Pp1KkTTzzx\nBL5eeO9skH8dEAH8BEwGMs2H6gkhmkopL/hUGoVfeHPFIZbvO0d0Y73TQXZnDGjflPmPDmbN4TQa\nNwhDCEH36Ei+mjyALUmZvL7sEP+at9eaPhARAhSKi4mXXnrJrfR6vZ5p06axb98+9u2znRj86KOP\n8tlnnzFw4EBuuOEGli9fzqhRo3wmqzML5hKgCfAwsAJIBLab/y6uGCu1mDWH0xnTO5rNz17NV5MH\neJ2fEIKrurWqEpNrUIdm/PLYYD69J4G4ZuFc26OVw8/OKhSK6pk2bRpdu3Zl6NChTJgwgbfeeguA\nSZMmMXfuXACWL19Ot27dSEhIYP78+XbzadiwIUOHDkWv19vsT01NJTc3l0GDBiGE4L777mPBggU+\nrYOzQf44n5amCDhpeQbS84rpHRvlPLEPEEIwslc0I3upb5Yr6gb/XbzfJrqEL+jRJpIXxvR0eHzb\ntm3MmzeP3bt3YzQaSUhIoG/fvjZpDAYDDz74IKtXr6ZTp07ccccdbslw5swZYmNjrduxsbGcOXPG\nvYo4weVYYkKI3kKIsUKIWyx/PpVE4RfScosBiGmiXFUKRW1h48aNjBs3Dr1eT0REBGPGjKmS5tCh\nQ7Rv357OnTsjhOCee+4JgqTV49J8VSHEl0BvYD/a2hjQ1sfYt8nKzxsJvA+EAJ9LKV+vdFyYj98A\nFAKTpJQ7qjtXCNEU+BGIA5KB26WUWRXybIc2nfpFKeVbrtSvLpNdqE1NblJLvtGuUNQ0qrM0ajMx\nMTGkpKRYt1NSUoiJifFpGa5aMIOklP2klBOllJPNf/dXd4IQIgT4GBgF9AAmCCF6VEo2Cuhs/nsI\n8+w0J+c+A6ySUnYGVpm3K/IOsMzFetV5sgpLAIgKV+FQFIrawpAhQ1i8eDEGg4H8/HyWLFlSJU23\nbt1ITk7m+HHto3jff/+9W2VER0cTGRnJli1bkFIye/Zsxo0b5xP5Lbj6RcvNQogeUsoDbuQ9ADgm\npUwCEEL8AIxDsy4sjANmS21u3BYhRJQQIhrNOnF07ji0IJsAs4C1wL/M6W4CTgAFbshZp8ku0iwY\npWAUitpD//79GTt2LL1796ZVq1bEx8fTuLFtRAu9Xs/MmTMZPXo04eHhDBs2jLy8PLv5xcXFkZub\nS0lJCQsWLOC3336jR48eTJ8+nUmTJlFUVMSoUaN8OoMMXFcws9GUzDmgGNdikcUApytspwADXUgT\n4+TcVlLKVPPvc0ArACFEIzRFcy3wlGvVqpvkF5fyr3l7yMgr5vB57YFTAR0VitrFU089xYsvvkhh\nYSHDhw+3DvJ//fXX1jQjR47k0KFDTvNKTk62u79fv35Vpi77ElcVzBfAvcBeysdggo6UUgohLCuD\nXgTelVLmV7dyXAjxEJo7jnbt2vldxmAwe3Myv+5JpXPLRmQXGmkQFuJxRGSFQhEcHnroIQ4cOIDB\nYGDixIkkJCQEWyS3cVXBpEspF7mZ9xmgbYXtWPM+V9KEVXPueSFEtJQy1exOSzPvHwiMF0K8AUQB\nJiGEQUr5UcUCpZQzgZmgfTLZzTrVCrYkXaBb6wiW/204x9Ly7X7JUaFQ1GzmzJkTbBG8xlUFs1MI\nYYmoXGzZKaWsbhbZNqCzEKI9mnK4E7irUppFwFTzGMtAIMesONKrOXcRMBF43fx/oVmWYZZMhRAv\nAvmVlcvFwomMfGuIFkffX1EoFAp/46qCaYCmWK6rsK/aacpSylIhxFS0CAAhwJdSyv1CiEfMxz8F\nlqJNUT6GNk15cnXnmrN+HfhJCPEX4CRwu4t1uGjIKjDStKGalqxQKIKLSwpGSjnZk8yllEvRlEjF\nfZ9W+C2BKa6ea96fCVztpNwXPRC3VpKaU0ST8Hrow7QxluLSMvKLS2mq1r0oFIog4yzY5XPAdEdB\nLYUQVwHhUsqqk7QVfuflJQf4fMMJGoSFcE2PVtzZvy0dW2gusShlwSgUiiDjbKHlXmCxEGKVEOJN\nIcQ/hRDPCyG+EULsBcYAf/pfTEVlCopL+XpTMiO6tuDmhBjWHU7j7s//ZNBrqwBoota9KBR1EnfD\n9RuNRiZOnEh8fDzdu3fntddesx4Larh+KeVCYKEQojMwBIgGcoFvgYeklOoTykFid0o2pSbJpCHt\nuaJLC56/sQcr9p/jm80n2Xk6mw7N1eC+QlEXcTdc/88//0xxcTF79+6lsLCQHj16MGHCBOLi4vwe\nrt/VMZijwFGflarwmox8LQRMTJQWglsfFsK4PjGM6xNDaZmJ0BCX45gqFIoayLRp0/j2229p0aIF\nbdu2pW/fvjz11FNMmjSJG2+8kfHjx7N8+XL+9re/ER4eztChQ+3mI4SgoKCA0tJSioqKqFevHpGR\nkTbh+gFruP6AKxhFzSPbHGOscYOqYy1KuSgUPmTZM3Bur/N07tA6Hka97vCwL8P1jx8/noULFxId\nHU1hYSHvvvsuTZs2JTExseaE61f4Fykl7608wp0zN/PasoPsPJVVrT/UEiVZxRhTKOoevgzXv3Xr\nVkJCQjh79iwnTpzg7bffJikpyd9VAJQFU2PYnJTJeyuPEtcsnMTkE8xYl0RMVANG945mdHw0vWMb\nUzEETnb/a9GrAAAgAElEQVShkUb1QwlT1opC4V+qsTRqA3PmzGHkyJGEhYXRsmVLhgwZQmJiIsOG\nDQtuuH4hxD/N/z8UQnxQ+c+nklzk7E3JAWDBlCFsf+5a3rrtUrq2juCrjScY9/FGhr+5hteXHWLf\nmRyklGQXlqgAlgpFHcWX4frbtWvH6tWrASgoKGDLli1069atRoTrP2j+n+jTUhU27EnJZs7WU7SK\nrE+UeYHk+L6xjO8bS06hkRUHzvHrnlQ+/yOJT9cdJ65ZOMmZhXRppWaKKRR1EV+G658yZQqTJ0+m\nZ8+eSCmZPHkyvXtrgfD9Ha5f+Hrec22iX79+MjExuLrz8z+SePlXTY9f26MVn93Xz2HarIISfjtw\njiV7UvnjaAZjLm3DhxMuC5SoCsVFw8GDB+nevXtQZcjPz6dRo0bWcP0zZ84MSkRle9dCCLFdSum4\nsTLj6ieTW6B9a6UHoLfsl1Je5Z6oisrM2XqKy9pF8eKYnnR0EpiyScN63NG/HXf0b0dOoZH6YWr8\nRaGoq1xM4fq/A34ERgOPoEUxTveXUBcLxaVlJGcUMHVEJy5tG+XWuY3V7DGFok5TF8L1u9oFbial\n/AIwSinXSSnvB5T14gUmk+Sv3+/CJKFt0/Bgi6NQKCpxMQ8fWPD2GriqYIzm/6lCiNFCiMuApl6V\nfJGz6lAay/ef47a+sVzfq3WwxVEoFBXQ6/VkZmZe1EpGSklmZiZ6vd55Yge46iJ7WQjRGHgS+BCI\nBP7ucakKtp7IpH6ojldviVdrWRSKGkZsbCwpKSmkp1/cIwF6vd5mtb+7uBqLzDIJOwcY4XFpClKy\nCllzOJ1vtpykXdNwpVwUihpIWFgY7du3D7YYtR6XWjchxCwhRFSF7SZCiC/9J1bdpMwkuWX6Jv5v\nwT4MRhNxzRsGWySFQqHwG666yHpLKbMtG1LKLPM4jMINjpzPIy2vmOdv7MGwzs2JbaIG9xUKRd3F\nVQWjE0I0kVJmAQghmrpx7kVPSamJxbvPMmtzMgAjurWkvbJeFApFHcdVJfE2sFkI8bN5+zbgFf+I\nVPd4YdE+vt96GoCYqAbENVOWi0KhqPu4Osg/WwiRSPnal1uklAf8J1bdocwkWbw7lZv6tOHFsT3R\nh4XYREVWKBSKuorLbi6zQlFKxU0OpuaSX1xK37im1kCWCoVCcTGg5sj6kZxCI/d+8SdCQJ9Y90LB\nKBQKRW3HrwpGCDFSCHFYCHFMCPGMnePC/G2ZY0KIPUKIBGfnCiGaCiF+F0IcNf9vYt5/rRBiuxBi\nr/l/0EPZ/Hkik6xCI5/e05f42MbOT1AoFIo6hN8UjBAiBPgYGIUWhXmCEKJHpWSjgM7mv4eAT1w4\n9xlglZSyM7DKvA2QAYyRUsajBeP8xk9Vc5ntp7II0QmGd24RbFEUCoUi4PjTghkAHJNSJkkpS4Af\ngMqfSxsHzJYaW4AoIUS0k3PHAbPMv2cBNwFIKXdKKc+a9+8HGggh6vurcs54afEBZqxLolebSBrU\nCwmWGAqFQhE0/KlgYoDTFbZTzPtcSVPdua2klKnm3+eAVnbKvhXYIaUs9kx07ziens+XG08wunc0\nH91V+77hoFAoFL6gVi+WlFJKIYRNuFMhRE/gf8B19s4RQjyE5o6jXbt2fpFr35kcAB6/qpMKxa9Q\nKC5a/KlgzgBtK2zHmve5kiasmnPPCyGipZSpZndamiWRECIW+AW4T0p53J5QUsqZwEzQPpnsbqUc\nseZwGl/8cYJSk4ktSRcAiGumVusrFIqLF3+6yLYBnYUQ7YUQ9YA7gUWV0iwC7jPPJhsE5JjdX9Wd\nuwhtEB/z/4UA5mCcvwLPSCk3+rFeVcjML+bRb7dzIqMAY5kktkkDruneEn2YGntRKBQXL36zYKSU\npUKIqcAKIAT4Ukq5XwjxiPn4p8BS4AbgGFAITK7uXHPWrwM/CSH+ApwEbjfvnwp0Ap4XQjxv3ned\nlNJq4fiLtYfTMRhNajqyQqFQVMCvYzBSyqVoSqTivk8r/JbAFFfPNe/PBK62s/9l4GUvRfaI4+n5\nhOoEPdtEBqN4hUKhqJGolfw+IC2vmOaN6qPTqRhjCoVCYUEpGB+QnldMy8igLblRKBSKGolSMD4g\nJauQlhFKwSgUCkVFlILxkjPZRRxPL2Bg+2bBFkWhUChqFErBeMmW45kADOvSPMiSKBQKRc1CKRgv\nOZCaS4OwELq0jAi2KAqFQlGjUArGSzLzi2kRUcdmkJnKnKcxGiAnxf+yKBSKWotSMF5yodBIk4Z1\n6EuVR3+HV6Lh50lQnGc/TVkpzLwC3u0Fq6Y5VkhlpX4T04YTf0DiV1AalNim/kP6LJKRwl0MuZB9\nKthS1HqUgvGSrIISmlWnYI6sgMV/hWMrwWQKnGCesn8BlBXDgYXw+TVwbl/VNBvehfRD0LQD/PEW\nfHcbFGSUH5cSvr8LXmkNix6H7AqBsTd9CG92ggWPQcZRz+U0mbS8Fj0Os26EJX+DmVfCyc2QcwY+\nuwq+Gg2Hlrp33fPTYcunkLzRtfOkhMPLYd88rVFyF6MBfrgbPrsaEr+EkkKzHGnwwWXwXm/Y8Q2U\nlmjX9JOhWjqjwfUyspJh7v2w+pWqVmf6Yfh2PCz/N2TaDd/nG05v0+qRm2r/eO5Z2PktZBxzL9/t\ns2D2TbDtCygpcJzu1Bb45matQ1TxebRHmRE+HQLvxcP8h12z1EsKYc9PcHqr++956h74aSJs/AAK\ntTiGlBbD4r/BL4/Ame3avmXPwDs9YNVLtu+bKxxYCCv+oz3XAey4CHkR95L69esnExMT3T7PVFpK\nYWEexcZS7pqxkQFxUUwb20PryUsTSPP/0mKtoSvJ105s3Ba6jIToSyGyDTRsAWHhEFoPQsx/FRGV\n3W7C/jEptfLAXL5lWzrYtuyTVdP8PAmi2sLgJ7TfRRfgkiHQfjg07wxCp+2P6QeTl8KuObD0adCF\nQpfroe1ATUGtfBFa99YUEUCna6BVT1j/JjRuBwXpUGqAjiMgbqimrCLaQL2GENZA+xMhleoqyrcP\nLdEUt4WrX9AamdwKjUFEG8g7C007arK16qXVrX4k1GuklaGrFC/uh7vKX+jIGOg6ClrHa7/Dm2rn\n6kLL/47+Bouf0NKH1IfO10JsP2jeBcKbgT5Ku78VzxE6rRErK4Yjv8Gyp8tlbdAUeozVFPuZRO26\nXEgqly9Ur123Rq002aL7QFQ7aNBEe5bC9Np1E0K7XkIH8x+E5D/M20K7F20HQtP2WoObdUKTy1Sm\n3ee4YdCso1bn+o20MkP1ZtmF7X2w+W3GVKa9A5b/2adh1hjtNwLaXQ4drtCuUcPmUD9CU5555s85\ntYqHztdAyx4Q0Vq7hvUbafWyXscQ7Rn6ZIi2XVaspet6A8QkQONYCG+uXY9QPcwep6U3ma3qztfD\nJZdrz0ajVqBvDCFh2juYstX8jPc1d7AkdLwa2g2EZp21+9ogSksrdJosi56ApDVVn5vGsaBvUp6/\nzlwHEaL9Li2GnyfC6T/L72+X66EoC06sL7+mkTGQewYioiEvFUIbaNeo7SDtGWnYQnt36jXUyrG8\nL0JoHblZN5bn1bQjdLsBOl6l/XmAEGK7lLKf03RKwbivYA5vX0PXxTe5fsJ9C7Uex+4f4OQmMFbT\n06oJXPEMjHhW601t/QwOLYZze8uPh+rhb/ugkflLnWmH4M9P4OjK8gbekqasGDZ9BId/LXc5PLAK\noi7Rzjm4BDIOeyZnZCw8tFYrs81lWg828Ss4sAA6XAlX/EuzyHZ+o/Vgy1x0oQ35m6aM9s3TGmZL\nB8ERjdvBzZ/AgUVweCnkOOkhV6ZhS3jykNbIbPkEktZBcQ60G6wp8SMrtDroQmH8l5C8AbbO1Bqg\nYhetpgEPw+VTNOvn4GK4UMFaGTQFhjyhHTuwsLxT4EtC9XD3z5qFeWAhpB0AKrU9172sNdgHFmpK\n3uSii/WRDZoFsXUGJK2Fwkz76Ua9oTX8276A/fOrd4HpwuCfx8GQo1m0h5ZA9snq5ej/AMQO0ORP\nWgPGQtfkB7jqOegyCrZ9rrmpc1M0BXv/cs0y2jtX6+xNXKzJ9OenWuck19VxUAF/3aVd/13fac9a\nz1vglhmuy1gxN6VgnOOpgsk4d4qjv39BaGgooSEhdGkdRUO9uXcidOYepLlnE95M69VaMJVpD2re\n+fJefFmJ1pMpM1YopdJ9sblP0na/0Gk9FaHD2mux2a5w3N4+KN/WhUKHEVqPsSIlhVpP2pCtNYgt\nulS9MFJqdco5DfUiqqYx5EBRNjS5pOr+7NOQd05TvsYi7eW0WFhV6m/ejhsKrSp/hdsBpSXay5h9\nWhtbMhaaXSqV8tWFQa9btJ4gaO6OnFOay6owUzvXVKY1fqZSzerrdI1tnYqyNLdU4QXtd5nRnNZy\nnqm8txxaX7PsWnYvP99kgvxzmjUTpndcJ1OZ5lrKPqUpGmOh5jqTZRWsU6nd1243atZCxWuee1a7\nX20SbO+3IUdzC+Wmmu+HQXtOTUbb+2D9XcEatjxjluff0lNv3kWz7CxYnqeiLO2ZCm+uWRQV79eF\n45p8hhztXlW87qYy7dpEtdM6ExVlykvV/govaHIbDZps3cfaXs+CTO1ZzT+v3dcyo/YulpVoHYyK\n8lS8r0VZ5vtaWm6pCZ1medaPsH1u8s5r9SvK1q5fRevOVKbdf32kplzqVfh2VOGFcovUEVJqclw4\noSmfknzzdSq1vf8ATeKgU4UQjqXFWp0bera8QikYF/BUwSgUCsXFjKsKRg3yKxQKhcIvKAWjUCgU\nCr9wUbvIhBDpaB8t85TmgJvzBWskdaUeoOpSE6kr9QBVFwuXSClbOEt0USsYbxFCJLrih6zp1JV6\ngKpLTaSu1ANUXdxFucgUCoVC4ReUglEoFAqFX1AKxjtmBlsAH1FX6gGqLjWRulIPUHVxCzUGo1Ao\nFAq/oCwYhUKhUPgFpWAUCoVC4ReUglEoFAqFX1AKRqFQKBR+QSkYhUKhUPgFpWAUCoVC4ReUglEo\nFAqFX1AKRqFQKBR+QSkYhUKhUPgFpWAUCoVC4ReUglEoFAqFX1AKRqFQKBR+QSkYhUKhUPgFpWAU\nCoVC4RdCgy1AMGnevLmMi4sLthgKhUJRq9i+fXuGlLKFs3QXtYKJi4sjMTEx2GIoFApFrUIIcdKV\ndMpFplAoFAq/oBRMLSUtz8CFgpJgi6Goo5zMLMBYZgq2GIpajlIwAWTZ3lSeX7gPgMPn8kjOKLAe\nm772GPN3pLic14BXVpEw7fcq+w3GMl5bdhCDscx7gRUXJZn5xVzx5lpeWLQ/2KLY5WRmgfNEihrB\nRT0GE2ge/W4HAC+N68X1760HIPn10QC8sfwwALckxHpVxhcbTjBjXRKN6oXy+NWdvcqrpmIwlpFn\nKKVFRP1gi1InyTWUArDpWEaQJanKygPneWB2Ip/ek8DIXtF+K8doNJKSkoLBYPBbGbUBvV5PbGws\nYWFhHp2vFIwfyCk0kp5fTKeWjQJetsWtYc+98cT3O9l3JofVT13pdzke/iaRc7nFLJwyxLrv8Lk8\n9GE6LmnW0Ku87/psCztOZVuVc10mLc9Ak/B6hIUE3tkgA16icw6k5gKw/2yuXxVMSkoKERERxMXF\nIYSwOVZaZsJgNNFIrzWf2YUl1AvVEV7P/82plJJTFwpp0ag+4fU9K6+0zERhSRmRDapXGlJKMjMz\nSUlJoX379h6VpVxkfmDsxxu45p11GMtMlJSaWLjrDKUO/NlL9pyluNR37iyB7cvwZ1Imn647DsCi\n3WdJygiMe2HF/vPsPp1ts+/699ZzxZtrXc6jqKSM+BdXsPLAeZv9O05lOzij5vHb/nMcT8/36FyD\nsYwBr6zi3/P3+liq6hHOkwQd6WftZzAYaNasWRXlApCUUUBSRvk9PXWhkGNpnt1jdzGWmcgpMnLq\nQqHHeSRnFpKcWUCZqfoxNiEEzZo188qKUwrGD5zM1G5+32m/0+W5Zfz1h1189scJu2mnztnJm2b3\nmC+xvH93zNzC68sO+Tz/QHA6q5A8Qyn/W1475Qd46JvtXP32OrvH1hxOI+6ZXx02TsVGrQFYsf+c\n2+V+vOYYL3o5huLvRtwZB87mEvfMr+w8lWXd52/ll5ZroN/LKzGWmewqF6DWj29aOrSu3F9H18BV\nlILxkuLSMkwm+3fK4ssGSM8rdphHaq7v/LxePg81kproqnGXl5ccIO6ZX232/bonFYAdFRpQe3hS\n/zdXHObrTckenFlznqE1h9MA+K2SBQsgHVyVHaeyyC70fHbl8v3nyMgvpqC41HliP5KcUeBwMsPH\nb73KpvVrquxfu3YtN954Y5X9mZmZjBgxgkaNGjF16lSbY9u3byc+Pp5OnTrxxBNPIH3cq1AKxku6\nPrecp+fucZrO0QthPuhzgt379AUV27nCkqovfGmZibQ875TzLztTqrjyAOZuT6ni2tp8PJOiEvu9\nV5NJ8sna4+QajHaPf77BvgULOL7/QW7oq31mA0jFZ9mZ8rtl+ibunLnFvwL5kZSsQg6czSXXYCSn\nyP6zNOWpfzN4+AiX89Tr9UybNo233noLsH2sHn30UT777DOOHj3K0aNHWb58uTfiV0EpGB8wb0cK\nc7dXP8U4UA1+oNqknaeynPa8AZ+s1TmWlk+P51ewJ8VWEfzfwv0MeGWVV73Nv/+4m3Efb6yy/6mf\nd3P12+v4x4+7KDNJTl8oZMJnW/jXPPudiXVH0vnf8kO8uNB1t5TL9yrA7XzlcbxgUZ0ysfc+rT6k\nWTqHzuV5XqYbaX3d2wftfSk1j43MeO9NunbtytChQ5kwYYJVQfzf3x9jxZIFACxfvpxu3bqRkJDA\n/Pnz7ebZsGFDhg4dil6vt9mfmppKbm4ugwYNQgjBfffdx4IFC3xaHzWLzEc89fNuxvf1bIqxP3qK\n/u593jx9E4DTmVwJ03732WyvvWdy6B0bZd3+/YA2NlFYUkZDD2fUOGP+zjM8fnVnq+Vy5Lz9xqu4\nVGsU8jxQdo7ulaWBDZYdUVOsYFef5fu/9k/Yp/8u3s+Bs7nWbUuHxvLMVd52hR5tInlhTE+Hx/ft\n2sGqZYvYvXs3RqORhIQE+vbta5PGYDDw4IMPsnr1ajp16sQdd9zhcvkAZ86cITa2vM2KjY3lzJkz\nbuXhDGXB1DFqiv/cF1Sui6MGL5CunEPn8jiXU9Ut58l1d3aOv2/l+VwDk77a6tCt5yrrjqT7ZeC7\nplhSnlJcWubxk7kr8U+uvO4G9Ho9ERERjBkzpkqaQ4cO0b59ezp37owQgnvuucc7gf2AsmAUtZjA\nNEBSShtlsC35AmMubeMgrSf5Oy/fH3y0+hhrD6ezYOcZ7rs8zqM8DpzNZeKXW5kwoB2v3RLvWwEt\n2IzBiMq77HL6QiFtm4b7pPjKlobFVRsf0xghhHW7onWdmV/MmewimjasR2wT38hREe+eCAFIYmJi\nSEkpd+2npKQQExPjpWS2KAsmQFTXSNQUV0RNx+FlCsD1q6hg7BUnqj3qaZmBUqCVy7W/3x7ZRdoY\nW5KHa32qw5vqD3tjDd9scSngr+8K9SF9+g1k3crlGAwG8vPzWbJkiflIuXzdunUjOTmZ48e1dW7f\nf/+9W2W0jo4mMjKSLVu2IKVk9uzZjBs3zldVAJQFU2e5GJRWoMYoXMk/UMrAl/hC5GC5sVx5vmes\nO869gy7xKP/84jKz5Rqc+vXqk8CV146id+/etGrVivj4eBo3bmyTRq/XM3PmTEaPHk14eDjDhg0j\nL8/+GGFcXBy5ubmUlJQwd94vfPLdPLpHD2D69OlMmjSJoqIiRo0axahRo3xaD6VgagC+VAa1saFz\nTM2si6+s0ZoyxuALF5w/lXzFvN15vD2pVsXsLxSW0Kxh8OLdTXx4Kp+88zqFhYUMHz7cOsg/7d3p\n1tBBI0eO5NAh5wuRk5OTrb8PnM21zlTr168f+/bt873wZpSCqaPUSQOmBptl3qgKZ7XyV60dyexO\nI+7P/kyw1a+jBdQWJP6V8aV//Y2zyccwGAxMnDiRhIQESkpr1ycUlIIJEDW3aay5BHuWlW1Zvi+t\nptTP0bPpr8kF7lJT5Ag0r3/0uc3EgdqIGuSvo9TFd9JxQ+jncqXrPXVPRHE+i8yDTF3AkTvVIzer\nH2SsdqGln7psPrPILPnUwffQHZSCCRDVNRK+fFnq1BCME2pSXf2yDsY6iSE4rZRLkxv8LkWlUDFB\nd5y5Ru2Q0v8oBVNHqSlxpOoGttey2s6CD82NQDWmvhDZH89btfV3obiL1bVWk1AKJkBU9wKq98A+\nrjavgVCmFWWxV57LLjQ7N9uZ/DX5+QjErEVPZ5H5ixogQq1BKZgAUZMbidpClQWBNfBVd+821wz5\nK8tcM6RyNgbjpzLdWTIbpHfa3XD9RqORiRMnEh8fT/fu3fnsw7etx1S4foVbWF+QOqDQXO0d1wTl\nbbnungzYByvGmrPL6851rQn3oDL+E8l1FexrGQTuh+v/+eefKS4uZu/evWzfvp2fvv2aM6dPASpc\nv8JNaoILIVAEqq4uNZ5+GOQPFJV7rZ6sg6mB+sUjfHdPvM/IV+H6hRAUFBRQWlpKUVERYWFhNGoU\nocL1e4IQYiTwPhACfC6lfD3IIjnFHy9nXXjhK7+ijsz3QNTVJhaZFwXaOzV4Cy2rbwRrykQR21lk\nnp3nFcuegXN7rZvtzeH5Rf0QQNDB8omGCuH6I0wmOhhNhIYICA2pmmfreBjluGlyGK6/wgVwNVz/\n+PHjWbhwIdHR0RQWFvL0C6/SuEkTzp45osL1u4MQIgT4GBgF9AAmCCF6BFcqjUC9qjWkU1wHqeCb\n99HHSWvqvappY1v2FF1dnyHmy3D9W7duJSQkhLNnz3LixAlmzfiIlJPJfq6BRl2zYAYAx6SUSQBC\niB+AccCBYAijw0QrskilWbXp/PGu+PUFPPo7S+s9y5iSl/1Xhh1qZJOSuhutSW7tUnLtvrjZgAep\n4q48Qtb1hNUlPrsTLiRBr1vdKt/eGJy/3Yp2s69kaZxIyUEi6dWmMUInSLITrj+voISUrEKahNfz\n2WcDPGXOd98x8rprCAsLo2XLllzWfxD79+zkjjHXqXD9bhIDnK6wnWLeZ0UI8ZAQIlEIkZienu5X\nYf4aOo/N+seJFemB+2Syty9g4QUwOfl41MIp9NCdpCm59o8X59NdVA2V3kmkcLD+JMg+5aWQsKre\nkywqfgAIcm92xnCYMcy66Q9ZJBJMJuf3xRHpR6CkoMpuISAcA0KWVdnvKi6lnXklzL3f8XFTGeyb\nr9XRDg4nRhiL4Mx2h9naWD7Gqh+J85aKYoVg8rynWJwP0rbuVcP1L9beTXMZEtfD9bdr0ZDVS38B\noKCggN07ttG+U2eiAxCuv64pGKdIKWdKKftJKfu1aNHCr2UN02l+25Y4/3a9r/HoWS8phDfaw7J/\nOk5jyIH889Xn88NdLKv/LKHYfj54QsgaGogSOLjYJXGqa7w66lJpwQWG6fbQ+qsBtg3Iosdh2b9c\nKqMKFV5iC74M1185r6dDf2DoLjvX+8R6dMnry7d/nggvNXWpDBtMZfBxf/jxXruHD+jvZ+jBae7n\nWwmv1Oq2z2HuZNj1nc1ue1fUOlsPYNET8NlVkJtaff4ZR+GVVrD7R6eiCAF3hqympciuJpHtZhil\n9NSddP5e2MNYBJlHIfesze7Bfbox6tqr6N27N6NGjSK+SwcaNwhBGMs7ChXD9SckJNCyZUu7RUy5\n5ybyC4ro2bMn/fv35+Y77qZL914ATJ8+nQceeIBOnTrRsWNHFa7fCWeAthW2Y837goLJrL91mKih\nDh5bjIXa//2/wOi37af5uuo8+yqc2gyALgB1fiF0NqG5ZyH7JLToqu3cMVv7P+p/7mV24QR80Aeu\newVob3PI2QfHPEEImBK6CM7ZOThrDHoA5mj67uAizwqx9IxPrKtavvl/19SFnuVtk4sX5JkVREGa\ne+ed3aH9L3HysbPz5nD0h5bApc6/W/962Occ5DqXb3QYZgvQkAMRrrlKrZjMnTBjkc3uON15Xnp0\nPO+8864Wrn/wQPr27g5o4fpDda6H62/UMJyfZ74BbS4D4GBqLsYyFa7fE7YBnYUQ7dEUy53AXcES\nxvJ8On8Fa5jyqc78ObfH+tOZAhGOjnvoSqj2NF+4prLNbr2jK4BHbLJ2dg/LxyKqTxc0b56dgp2u\ng/GTKD7H0exC6+6aNWnBVR7658scOJGqhesfP5qE+O6VfAJeEoAb7LWCEUK0BIYAbYAiYB+QKKUM\n+IcLpJSlQoipwAq0acpfSin3B1oOqzxBeLBr2gwgb3ClLn65xk60gL1xFo+CXQbkXrlfhidS+URx\nupCJ7XWuO8+6PeZ8/KrV6iDjqHNLrQbisYIRQowAngGaAjuBNEAP3AR0FELMBd6WUjoYCfYPUsql\nwNJAlukMgXQSINH3ZXqVpYutpUMLpdYSiLhaNeiaOVWkzrPwzawuR58NcHyGK7LZi/rmXJKarrR8\n9/wE4kn0xoK5AXhQSlllSpAQIhS4EbgWmOdFGbUaS+86oB/G8kVhLmo8x0XZP+LtA12DmmaH+EOB\n+CZHN3KpYW2s1zPzrOEG3MlHulCuB1POaxneXntvFMzbUkp7w5NIKUsB38YcqMUIUb0F4w/q+Do0\nO/ivwhKJELoK21Vxtecb+PviuEDnM99cF9Yf1arum122itxZ6e4rAX1OEvm5vWkZqQ9IxGiXCeDz\nI6UkMzMTvV7vcR7eKJhdQoh9wPfAPCllNfP6Lk7KLRgnrohACFMLqfxe+329SzUNicufDnDHUPAi\nxL8/cTVwp5Y2iDi5gN5cttgd/2NDs+GU5OdUOXY+uwgpISRXj04I0rJyMYpsCKkPGdrQc2FJKRcK\njOTVCyH/fD3HBZUaID8NQnMhvZTzWdpssoPCvEYv56D2Pz8NSg2Y0iXnCyBEByKngWuVyU6zySs1\nx2+CbtMAACAASURBVECZSaLL0ROiq/4a6vV6m3Ay7uKNgokBrkGbqfWqEGILmrJZKKUsqvbMi5BA\n+949K8+95sLzMZgarlIrr4OpLK4d8f3ZyfVfLDIH+4M3yl9JDtcVXbW44yIzJw0ryWZ/Xn1G9O9c\nJclNzy2juNTE7heuo3GDMJ599n0W1H8eYvrCg6sBmLc9hScX7ebmy2J4947ujss78QfMux0uGQqT\nf2XUM78CkKw3T3590azgZj8LSWvIvvUHRi8y0bxRPRKfu9al6vPiIJu8Jr+6inO5BjY9cxVtolxU\nUh7i8UJLKWWZlHKFlHIy2tqTL9HCspwQQnxX/dkXB1K66jbxXRMSSHPeoYIJoAyBUFX+ClsfkKtU\nnUDCe8vaqgTcEMlV7D1Gdp9vh3WsvFDAlUF+z+SqXg4fUKnQ2uIC98lKfillCVq8r4NALlCNyr54\nKH+8Az8Go/AdrliD5U2Y9zOzfHGON5nUhmdVk9FFFe3RIL8LyaXNv8Dgw8IC0Q/0SsEIIdoKIZ4W\nQuwAlpjzGyulTPCJdLUcV8dgaivOHtDK9XZ3zUrgx1bt93SlrLyS356PzIPSatLgcTBx4zp4Nj8x\nCOvRrDrN1XffvTp41KI4c/36AW/WwWxCG4f5CW26suOocxcpwVQr3j083r4Uvnmh3WqAffG2VDvI\n75s6BX4djH/LczWCgTd4es2sMgnXXWRu5V/tDD2fFlWer7lMb9zqgezXeDPI/wzwh6zrH2bwAb6b\nDOp9WdWf7JkrwWF2Xtascl2C9aRVLtebRbM16W1x5r5w5dX2Z2NlN9ilN+W5VJ8K3/1xlMbhW+bJ\n0k4XsY51eZFjJVO8Ri+0lFKuBzDH/XociKuYn5RyrLfC1XYquoQC1a44egFd6ge42dPzt+uvpniQ\nXBuDqSHCOqQGaTY3cfjouvyA+OfeWOTyKlyRuw+5J98TCiK+CHa5APgCWAwEPP5YbSAooWI88re6\nZ8G4GgDSnwQiFplbs8i8PO5zqhHel8EufeL6q5yF6wuFPCzAs2yDM4vMYm96o2AC38nwhYIxSCk/\n8EE+dQ5bC6b85vrTq+jo0TP5oUx/WzBVXFN2XRAWpeiLvo3jIWRn4fo9sbZ0QTbRfFG6byw3B7HI\nzP9Ndi64K18GtZ5maZx9/A5YcjPZmRzizmJV1xLadv68GeQP5FPnCwXzvhDiBeA3oNiyU0q5wwd5\n1wkE0uaJqNpw+p7KebpWhsXm942LzNfh+u1m5bOcHOOWcnY6BlOhEQq6p8N3U6r90WfS2XHZuuPE\ntV5rN1y/FW+JI6vMIpe0NvZVOzn+u7eWQX7Pz7WMM5nsaW4f4wsFEw/cC1xFuYtMmrcvaioGu6zY\nSPnztjpa/exSI2lN46WEDt4ud91ZVZSkXbH8MUOoeheZvWvp8jqYCr+dROnwEV64yNwY5PeHgrHk\nXfFLyl6t7vfROpiqlpXlIlRN6zPPgbAtwyMviFkB1pZZZBZuAzqYF1sq7CCQNmZ+5YcuEBPx3Ask\n66sxmAD6fH0yTVlnzqp6+89eUTqdaw1fxePBXgfjyL1lUZLBnhZgT2mXt7MVZkQ5cI9WtXtcKLOi\nK9TBzay8xsVe19F/99YbF5mtggnEjEZfrOTfB0T5IJ9ahzPFIB30rj2yLlzEklfl3rFfepgBboLs\n+uJ9acFYffW2DVbVacqOLRhn97Imzep3POPQ/bz8USt7DaGtBePivbfW0z0pHXmQrC4mizVhZ3KM\n7+1qH4SKsY7B+GDKs4v4woKJAg4JIbZhOwZT56cpO+2tOvxte6LJh3Pvyt3Otg+ke0rMNw9e+SI8\nWUkeF0eEKslsrw6yPLHb8lXBQY/YlfEsVzusFRutgCibameRObBg3BCr8niEL7EX58ym4XbSFS8f\nY/DMgnGEpfNWbulVPcn16+LJNGUPO6Xm59oifwCGYHyiYF7wQR61ElfvT5UxGD9aMNZXqrIF49LJ\n0va/E1y1YEwSQoT3ysDey2pvBo/n2M+r8mCo/cFR216tIyre60C84NWhc1C+O8+jzqcTtGwzqc4q\ndMWCqerpdC5kxZl9jr76rhOO7rWskEb777POY6WJCj4Z5A9AB8ebUDFCaqxzlsbTMmo7Nj0bm55r\npXQ+vEKOLndwLJjy3lYIAknFufwuSFFFEdtJ4+banWpxODnBuRyVxmAdUl1HI9AIB9GULTK60rcW\n+KCxcnDdhZ2LajOzrNxEtnu+VSYPQ8U4G4OxdDTsdZysaXy2Rsf2WnhjwdSWMZg1QojHhRDtKu4U\nQtQTQlwlhJgFTPROvJqNq7qzck+/ysPhh/HAymssXFsm4p4F4xj77jmrteHhmpXqxfKplq623Opm\nkbnjN/WskXD3HMfpHb387hThqmL1BHtqwXZmmRMLpnJOLs2Kq/js2n9OK7sW7Y0D+vwzBpUtGE/y\nsCgYy2ZNtmCAkcD9wPfmcDHZQAO05/Y34D0p5U7vRay5OLs9FaMp2yy0dDcjN3DU+3RpQM/NacrO\nvgdTHpivUq6eKhg75ZnwZQvnYFKGK+snXGxQvHaLSemzeabOLBiX8rCc4wd/n72IxDbjH87GYLxc\nB+PoZlV23Xk3BuMhbmUrtBMsbYMfv+FTGW9ikRmA6cB0IUQY0Bwoupg+nexqZ1VUSltlmrIPb7Ul\na12laWRuvf8+smCqKhjvlIH9OvhyFpklq8o+zErJ7DTwTrw1VmzHYDx1c/jkM07oXB27cAG/RIqw\n87iUu+TANnyKPZnKz3K3TC3bMrtpLNmaZCUFYkcR+lzvejLIL4R2XqUZpjV6DKYiUkojkOqLvOoS\nNhaM/efQ7rY3WB7oyq+Uaw+TuxZM9VgasOp6eu7gf5PekQVji70FkjbrM6rB6zEYd62/agpxtNDT\nk4bHHxMWyq2ACjttjJbqOyxVnhc3B/mdzU4zSe29tpdK5+uBdK9mhVqweDfMsgUgcqRvukIBQgjx\nohDijBBil/nvhgrHnhVCHBNCHBZCXB8IeZxbHhVmpFR0kfmxobSW4806GBcT6xzFNjW/DJUVTIUC\nPBLD3ktlsm1xvMOBy6VC5w+wH0PM1dhTNh0NT6wun8Rcqx53lEV5b96b61/92JzdMS+bW+DAlWVJ\n5OkHzRxYMGWWDpMs/11ZDpevh9vuTss75cYpldZ3udoZ8gU+sWACzLtSyrcq7hBC9ADuBHoCbYCV\nQoguUjp4QnyEy8+QMwvGdyKVr4OpsjDLVsG5921zd7FYbtoDXb4gzbty7K0890ewyyqRqJE2DYm9\nnr91TYQ7CiYQFkw1OB7kd12wMpPFr++NICHmgm3rZneswMaaqb5zEWa9UR52Qhykt9RZSmn9XTm9\nsUz7Hapz1od39fl1rQPjThk1fRYZAEKIfwghYnwhjBeMA36QUhZLKU8Ax4ABQZbJ5l2o+DsQoWKq\nrOS3Kd/Z2V66yMwHdJS/iFqu3rnI7OpEv6zkr2rBlJZVDGRoR9FVVqLYv69er4Nx20XmOL1wYIG6\nI5ZVwXhzby3X3WTbH/z/9s46Totq/+Pv8+SyHcCy5NLdLYikCBbYXRgYeG3svuq9duG1G+tiXa6o\nhF5FFEQEBUHpXNhg2X5yzu+PiWfmiQ1CwN98Xq997TNnzpw5Z05883xPxDMvziOWHPFr7HJq5TaA\n+ln6JJFEZajI1P406yl06N/F7azj3QmiR8Tm01Vu+xBPLMp+82fYYPaHiiwN+FII8a0Q4iohRO5+\nKLM2TBNC/CKEeEUIkaWltQC2mvJs09IOCUQb+fdOYVQ/KAk4yvoZl/VVsr4EJlE+9eXOKAkm5j11\nILqe8ebUvkpF8RC9wc7pEAZHCokInf5sJF8oDgWx2mD+BBVZLfmVBEp4wxOxHqtYJG/DqmWBIYmE\n4ydbPDBlnAwJJJiYxb3u720Zcwm+Xdi0QCvSNA8sfa8+axC5REhAXONkVF8Rtroa1wtRESoiElhD\nCtk77DOBkVLeI6XsDlwJ5AH/E0LM29vyhBDzhBAr4/ydCDwHtAP6oDoVPLoX5V8qhFgqhFhaVFS0\nt9UEsIrHcWDlrs0Lyz69to53qoi2EySQ5OsopXbUtZPfIawSTEM3/IQaZIVs+EeN7b/o+mqp0po3\nng0moioSMWlmNKwf4mA/EhizVGZGQz57XXOgYYg/Zi3HGBtp5vwJJBhDPVV/NaqZwLhFIgIcqYu1\n/bESjKvOkNl6g+pYinXioNs3G0TRo/bQaK88bLzINBQCO4ESoOneFiKlHFuffEKIF4HZ2uV2oJXp\ndkstLV75LwAvAAwYMGCfvnAgVPuA1Y3gYZxYDP4HsGMT7oOpjwSjhNT/dQ12DYlVZPFtMKbK1Kt8\ns9SQCIarrXDWq0xr+VH9p7VfRrVfIgmaVt14a4a+oJgZ1ngSjMUWtleya0PtCInHaLgOCaY+aMiu\n/4bu4dHrF2+RFog6JRiXM4qw1GOMmItyJZgGZi8yRZGGpG7+CiFjPNSzvXV9l5Aa5jHs8AB7eVid\nTqQMCazhRTQU+8MGc4UQ4mtgPpADXCKl7LWv5SZ4V57pcjJqJGeAT4EzhBBebdNnR2DJgaiDGYEE\nHKAOp0FgHJZJspfrbb0QERaijfz1eF+wRv3valTft9VaiVgbTB3PRSEURWDiqWyc7L0fh5kAPPzF\nGop37wZAcSZZ8inSWpd49gZd2jJP/HgSgrlFe2eDadhDUmca4iARgWkI9CbWKzx9IjVQgjGrl+1M\nNHdCPu25+MuYW6f2uurNUTeBsUgwDli5vYzPfrXuwIj2IvMQ1CrqjlTNMPLX8V3q258hdW6GhfaO\nhtCXsHaSSpSKbP8q5+Njf0gwrYBrpJTL90NZdeGfQog+qF9mE3AZgJRylRDifeA3IARceaA9yKBu\nCcawQeCwTJLYjZb7D4mUUfWywQSr1f8ub73eJVA5uFe+28jkvi3ISdWe0wZ0xE05kl/9UT++Jhi1\nAMYzmLp07jEOpy6l5L+/FjC4bQ5N0mLbFDT137NfrSfcbC03A0oUgZVSJlQn6dAnrYXA1GmDqbXI\n+GigikwJBUm0rDqDVfGfqWfFftq8m49+VhUF9Vnvymt8pKemxrkTf9TGk2AsKrKAXv/49TXmnE5k\nHXUvd+YucyA54ZmFKBI2PXRsTB0kEkVKPOjlmwiMVvc6JRgZK/3ERVAlpqrdzNHAw+qsNiKzk8KB\nxj4TGCnlLfujIvV817m13Ps78Pc/qy4A/lDtNEznriXWnfXR89dblyGwIZCxC130OxOOK0OCSUqU\nwwIBrNi2h/v/u5qiCj+3TOyq3tA4S4ehIlPf6NUnotNTS/UlhRV+ft9ZwZyVOy334hlMk/HpT8bc\nW7WjnKtmqtGKNj10LNtKq9lTHaRHiwwgloBtKywGDyhOKzGSEoJK7QRa51jNEz+uDcb0SrOKbF1h\nBe2bpNYtCTSUwPgrVAITh6i7ghVqHhwWVUZ9F56Tn/ve+L2huCqx+7uG0oqa+AQmrEkAUQSgTjWT\nTmDifhMZIUzK3kkw4XDY+BZhRcbUQ5Fqf3qFWv8axYHOmujMZ50SjC5d1KkiU+emTDC/E2HZllL6\nab8/WraVyWPbRlRkfwKFOaw2Wh5qqAnUPtn1gSewDrToBcrr3n/doI+Z1QXlUen1kWB0AqMtsCE/\nzDgCVv8nbnaBpDqgTt7lW00RgvRzJ6I2WhqqhFokpE+W72DwA/M575UlvLNki+VevMmaKSot7zRT\n0ppghAGQUnLrRys57umFRlq0Cu4oxwoAwg4rgZVYJZh4Xy8UR4KJsfGQWIIZ+9g3zNTbG06s1kpI\nYIK+uMnSp44DxZ0cSawqhpo9eILqvaArJWEdG4LNJZoEvHYu7IgNQ1hR47cmbPif2tZdq9Rrr5X4\nKHG+qU6UVQlGe1/UNznV+TWbks4mzaG9TydE9WCczDayj3+OOKZW+IIxeRWp7o/KoQyAZcURAqbb\nD5117YPxqc/iTas9n26DUervRRYIKZw0Y5FxPe+3Aq3e6vWfIMDYBGZfYF7ADFTsJB11QKejTgCB\nlfuJnr+e/SjB6BPw81U7owzKpjyJ6KJOYNwaH1a2DQpXsWfWtfywoYQVazdZsgskVX7NMB5ntOoS\njH7Pa+iqYyWY1QXlFJb7mLt6V8K2OR0CQn4+8dxupOnf2HhJZeT5U/8V4bCrA2G++UP1GtQX/mgC\nc6rrGzXdtBgn4cdbvsXicBBvAdbHgllSjSfBmB+NLueT5TvUH6UbY56LFBCn89bNg783g6LfY24V\nF6plFvi9/LS5VE18aSzMuhi3RmAqSdbENJ1LTvz6aLgJGc4chRV+eKIXvH0KvB573mBFjWmR3v4T\nvHECvDYRfv+vmhY1LnSi7XII3vtxC6Mf+drkhi9QAipz8fzX69ixcQ0g6Sf+4GH3CwA0o1gtqLJQ\n/Z9at++Rucuq/JH6ltXEEhipGfmbCfW7FoTTjXuGBFPXPpgarU+SMoykuBEyNK2A1AlMPSSYaKKo\nM2hmJ4UDDZvA7AOqA1GcppTwaGfe8DxECjV0dmwD4EznArrWLLNkM8MswRSU1exTncwTpLwmUr96\neS8ZNhiN06tSF+RAMMgZL/zA/a/MsmQXqBNPoOBWYusdMfKr708TWvlxdOGTZ3zHea8swR+Mr+6Y\n6PgBwiGWLfyc3o4Nxh2X5kq65b0b+PSD1+IusgC7yiMcfkmlqpbQVWTtxXbyRQHbZGM1QyjCab/g\nfoxhn41FCUbaJ8NBNY8vIiX+Y84arnV9wEWlTxhpIUXSV6wli0g+iwQT5eLq14jUvz5faql7c32h\nVB+KbdzSVwEJWyIEdYBYg4cgr3y+GIDdMo1Fa4sILn5JJWDr5vLLus0AFAY88M3DKpEKVBtjpe41\nTLI26TyWeK8AJI4/5sAetUz85fy0uZSqmsh3r/Jpi6SUULxOTdy6OFJclBOAzggsWl/C9Fm/sqG4\nil3lat8IwKGZWbet/Znmrw/mHOc8XvZEgnw0lrvBXwHfP6smNMqiLpj7xxwMtKgiMiaS8OMmRKOi\nX6BmN7e43wGgmojtrtnuxWxKOgt3OL5kaaBGdSzBGyFOukQEqJPHVw57tmj1UzjW8QNdhcaEhAJQ\nZnKYLdsO29W1JhBWcBFZA3aV1VDhCxpahz9jH8zhGCrmkIEvWoLRuJE+jvWsSppiJI93LoUNS7mP\nmThQYhZ4rysiWg99cAGvXjCQUV32ztPbPGh2VweM34qFc456yF8BL46BYm1x3rGMY/75GZe22MxJ\nROwczUWx5TEHCuW+EK+6H2Zk4QoI7ABPium+pAVFuAuWEs4czglOdQEMK2E2bd9F+w8nQu8zIS0P\nXzCTNTsrSPXGDsnRjp+Z4XmK+asq6Lfr1bjtbu1bQ+tVf4Oi7nHvm9VtpdUBmmUkEfJV8Zr7H4x0\nrrDkzd7wKde7HPykdGKE81cAJn7Sh01J8FW4N8Pn/wHfJkHHSMi73HQvf/N9BD5g3Xxo3hfKivjI\nexe/KvkcH3iAZHxkbJgNKcMguy1Hlf7b8q1WbtvNb5sLmLpuqqU+Jzojag5jEf7+WfjiVri9ENZo\n3voVms3qm0f4t/c+FCl43H8yuKFUppG9+yfcC683isrQJO0KkuGHGWri7vUoUt2jXB0IU7F+MWlz\npsE5syCzFexYrkoC6c0Z6vgNgCainImOxXRb9o6l3p+8cBf93a8b1yWVNZT7gtz87xXMWHdpbCeV\nrLVcBsOS+10vs7akJWsZj0Chy9rnaUI/yiuyjXwdhbrAjnP8RJauMgWuKnsMPlsJZZG+DyuS1xdt\n4uT+LclopBrl/QE/Vf8aT+bJjxNWIkTIiUI3sYnfZD6n/Ot7vmn7Oq37jmNN0g1qhtlYYJY8Tvx9\nOgDt93wL9IWKnQRW/Jvbdx7JtNEdaZWtScmbNJXtT69C+9E4EAxxrI4UKiW8c6ZxqYQlz3qe0q6m\nwtcPwMLH4ZZtqprtpTFQUQC3F+IPKnzuudl4trjSx5F3z+JC53ye4wSbwBzq0DmBppSq3HnZtlrz\nb0o6C4CdG98Ek2+PN8rh/vOVOxtMYH7bUU6bnGQL8dpdFSEwtalm+PjyCHHR8HjldLquVSdmI/wM\ndaziSc+MmPdW+kKRBfrDS+HYx4x7AoWPvXfS5N9lBEffbaT/uKGERz56i39718GC+wDI5HnaiF2U\n+/rEvCMZlXsck4C4WFCo6vOD7jTOCs7nGtcsBvmfpajCT0tRhJsQe8orIC+dZguuoXMUcdExzfVx\n3PRRzhUQBmpq4Jd3I6/d+gfopqX590DBCtprlz0dm7ja+SHnu74g56sKWJoH16/h5KJnjedT8LHU\nezmbPjfitwIwybGQ6e7Ie/DtAdrA3LvU67JtqveSEoSvH4Ruk+CX9wB1o2ueKAEgSQRwBCotZbcW\nqjqxnSiAGk3K+tdw0ke/zAmOn3nK9Qz/fW0Qxzp/J/THXFwDzocXjiKcmofzhjXkUmqUdbRzKaWe\n5iT7I5uX7zURF4BH5qzm/c+/pq9jHUQcrvhVyaenYxMsfUX9634StB1BUnUHznHNB+Dj8DBWJF0K\nxZDm/pE5ZRcbzw9zqLsVjnL+YnlfjlIMG/8XSQj5WbppN1999i6FW/tz85lHA7D4o2cZsftneHEk\n2wf9YGR/0f0oHRw7CEonL4cn0LrgCyj4gkQwbzzelNKbLuXfcdL6OyBwMTzaGQ+w2P8Y3coXcUH5\nDKgqAbMn3/vn8nfXKM50fRVJ++RK2ByxG7pqTEze3RG1GuUF0CRNJS4A9zeltO0l9HXsMLJ4CTHX\nexNNRBnnu77k9fVfMLxj44Tt2R+wCcw+QNe7L0m6Uk14vn7PNZt9LjATJ2EGOdYgRCvL/RITYagL\nX60pZHjHxkx86luGd2hM9+YRUdtMYGp1j83pEFNuV0eE63MKyTueWAe91qKQheuKOENmkiv2qJz0\nmghbN831MU2EKu67F9xtpK/dVUaaSLaUtTzpMgDOL5rO1e6vOc65mCsDV3Ob+y2ai921fIH4cIT9\nPOB+GYDH3M+xcs/pLPT+DYCSue9Cxy/J2PiZkd9Y5BqIKuklRfhZ6L0mklgQS7Suc0ekFSoKIsZd\nDRc4P8crgnQu+MSS/kQ0UX9+BAy4KOJ6W75d3X+haPr2GYMt2XVueJDjdwatu8Fyr5dDVbM0FlaH\nkC4LpvC4W9WPHetUt5OV79xAtmbLcFYWULb5F3KTQuhM+yTnIqiAgHTyjdKLsc6Ikb/MkUGGUoZX\nBJjlvSfm22ySzejJpkjCqg9h1YdMMeWZ573R+H2U8xeOKrnauG7viH9SSImzKTnVJZFiVyxh8KJ2\nDPZA9R/JUPkLpDZhxGq1TpXepoR11R3QQVuc3SLMVFeUuBIFv3STIlUV8NdrdjGy/LvIzWcGGj8/\n9dxOyhY/JNi/ZSEuACtmWi47LL417nPhkvU4//eQJa3vxhct1xmi0rCD5oo9vP3DZm46pkvCNu0P\n2DaYfUBNIP4guTzwN64KTKOLr3aO+1znXN7x/J0xP19tSS+qSKC3DYfgi9tgt7owjHL8zKh3O8Jb\nJ9GMEhauK7YQktKEBCaKwvgr6+2aDFAqVW+fJzwzuKbgJpW4xMF459K46RlOv8o1x8Hrnn9wnFPV\nyz/reSohcXk8eHKtdXQqkbaf5FzInTunGdc5xT/Cjy8Z1zcGL6VQqqqRmaHRtZZrhjLkSs4N7KWX\n/kOWk8a53kyA6sLSVzDcNl4/PmI7M0Hvo3aOnTH3/h5UJem+jnUx93Q4o067zF72NDwWWYxS3zqG\nW5QXYp7ziDBXBv9mXE/238MLGep1HvH7MoyD73vel7AugMGoNAQ54cLIZkyge1VEOkmW1fBIB3hj\nEqVJKoNX40jj9g3nNOgdFfnjOTtwC1tkUyY5F1K5eh4Pv/6BNVN5RLORIapxmYlL1+PjlvtReFiD\n6lH2wxuwclateWZ6HiBdRMZKS0dJLbn3D2wCsw8I+KrpI2In6RxlMLOVofjw8lF4GE+ETuJU/52W\nPDe53uVu9xsAjHX+TBcRkRi27I5dMAD++8Vn8P0zyM9Ubu5Vz8MAuDd+xQ9J0+gvfrd4MemS0DnO\nueR/cjIYbsNRBdfshoyWcOIMHm35pOXWeiXPcp3vm8npgTuM62Hi17h1rQ3HFb/COc65lCe15Ikh\n3zb4+XMCt1BGSkz6e6GR6o+kzLoLmaN+wxdDE/kgPJKlSmcAvlJiVXQ6Vsl8evnURXVNz5sIjL6X\nFbK9Jc8WpUk9WlA37gtGFrqAdDLU+2Gt+WXLgZbracFpCXLCG+Gj46ZvVppCSv1Us5ZNmlESsJ+I\nN9jPsiMlqP3xgfdeI/3T8FDWXLKe99yTmB68lCUZE+CuPQRze1vKKpdWSfefwdPj1uebcE/j93ol\njydDJxnXi/o+nLghG74iy6e6I2f6tsbN8kjwVMu1X7oY6JvBsx1e4Muej/Gd0pOODtUOlPreydyl\nzevrA1NjyopG6eb48+el0EReD40zrueG+8X9rSN7U0QaH+N/mLVKJNZvR98bcd9xg3ytzvrtK2wC\nsw8YvO4JPvZaCcftwQst19cGr+SJ0Cn8KLvwemgcT4UmAXCF61NLvn+4X+BM53xOd37FrYGnVf3q\n3RkwX+Ps/BUMXqyq4pSS9XHrM8t7Dw4hcAjJDM9TNNv6X3qJ9dzvfpWUXT+So3kzxdhgqkugUTb0\nPZudGX0ttyYH7mWjogbIvrfF80zo0YyKqEkPGHlAHeA61iot+P7cDaw7dT43B1W9uQOFto5dFCa1\nITUllSsDV8eUVymtEtUKpR1nBW7l7dAYFindqSayl+bJzm8w3P8kz4WPJ5jbBy5ZEPf7ACzRCImO\nB0MqN/9a+Gh2DbuHb5ReXB74G8s6XGXk+RenAJCW5KGcVDr63uDLjFMJKJIwTk4IPkCV9PJheDi/\naARn3ZCIuuKawBXcHryQ8wPTeTs0xvL+RJN/k4x8z4mBB1V7X2bruHkBVox7D85637j+TunOWVnf\nXgAAIABJREFUGkXlzM8M3MbvSksAyhq1xI/HuH4w6z5mhYcDKuHxHasyGMuVdnwYHk6xTI/bPzp+\nazIBLv0fj7V9gV0yiwn+BwGYK46gSKo2gvWONpZnhvie5urgNAqrJE85z8ePh6JKHwhBkceqLn4q\nNNn43cX3KjPCJ5LvmxlDyL9UBnCk/3G2ycZ8feTbfOOIENzrv7eOpeH+J4gHt1QZst2OHDgt0i/P\nhCeT75tJD58q9S5UelJEJg+vTOX6D1Zo9Zxk5B/kUO2Zs5QRlvI3NYmVji/fcw4DfM+xWOlCB98b\nrFLUb7VKtuWu0IVcGrgWgCxRydxwf7WehLneeTOMfzBuO1q2bM07XZ5mZmgU1wemEoyyhJytSd2D\nHGviPr8/Ydtg9gGNqyJeL3PD/bgtOIVCsnjxvAFc8kZEPdQ41cNlI9pz12cq8TnLucDQe2+TjWkp\niunt2GBxvzXw7SMw+nb45T3jGWfpBtqLuLE8ySv4ijvc/1Pdetf/wCBPjnFvsnMhL4WPJfnHZ6Bq\nE5z4jOqLWl0C6eqCIwR8H+7GUOdv5PtU/e/kwL10c2zG6WhL2zQvJaRb3vmb0oargtMolal0FNvZ\nLiOGwwuDN3He9j30a92Od8OjecgdUU0FQ2EcQvC5MpAtShP2kMon4SO4w/02r4eP5mjHT6SJaob7\nnyQ5yUu5EmaR0kP7buoC82poPI+vcAHq9bpJ/6FrTqR+twSnEE5vxW3V/+Tm4CX8mDyCt7wP0aXq\nRwBGdmnGgjWF1JCEGDwV//z5zFEGc8+JY/hjyyl89UcRjyyuoam3mC35U2AFBHFR7gsZoWZ6DhhB\n98WqOjSPElyECWSN5KPgaWyTjXH2Pp23tJAqi5UunO2az85GHdgoWkLQzQLZn9HiJ15WjmdpqB1r\nZQu2yqZ8HD6CJ0Ins0nm4QmG4dTXqPj8ftK2agT0xBncviKTBWuKmV7qo0/vo6H9aEI9z0C+6+CY\nwD+M7zAh8BCjs4rp13cALNjC+MA/AbisUzvuLGjOYqUr74dHclr2kew69VfOe/NXQOImTAgH/wod\nz1TXf/hFaUcvbZwO9z/Jp+edBt5k/E17MXh1xGnhkpoIga5UvGyXObQQJSxX2rETdUwWV/oNT8zi\nCnVxX9HtRhZtKucUp7on6fXweL5S+jCqjYezWnThle9U9fCJgftIET4mOxbSRJTxVngsIBjuf4qP\nu7RnXbEfNL+VAnII37kH572Z7JapbJNNGeR7lilje3PZwuFE4+E2M3iwtbqYc+a78Kraz5Uko0xb\nzlUPL4t55rHQaWRTYTglrFDaAfB4m2e5dngudBzHnK/X84/P13BhynfcdcFkrpgf4Ic1qnr59IDK\nqE4K3GdxFlgr1Xn5vdKNj8LDGef8iSVKFxY6B8LQsfz7jwBzfy/leY9KNG8MXkrAnYknK5NbQ5cY\n5SxJGcmgqq8Z7n+CbbIpg33P8L97Totpx/6GLcHsLRSFJjURguAYcQPuzOac2Kc547rl8vMd4/jP\nVcMZlJ/NB1OP4KLhbQFonpHEuEDEV//t0Fi+8NQRQPqeTJQ9VoLyslst44rQ9Sw6/VfODNwGwHmb\nb+ZCxxwjXwsR0bPe7n6b8Y4lZH53Pyx/C37TPKWqd0OyOul9QYWLgjcw0BcxLu8hjUVKD/xBhaZp\nXgJmFyDURXyDbE4p6SyRXfGZpItQci5rd1VSpdmrLgpEDM1zU47DFwoTxsmIwJOcEPg7L4eP5aaW\nb/NY6FQmBB5kiP8ZQrjo2dKq9lqkdOf6wFQeDllVJrpjw6dD3mO9ksdFF13Opswh9Pa/xBxlMLnp\nXh7I/jukNuNleQJtG0dUbVkpEdVORiM3nbr3pWl+N0K4uM5/KeWpEVXQttIaY/Nls/QIh1xADlOD\n17Ktxs2z4Ul8ogy37HPy4SXfN5Mhpfdy5u5LCYYlQ/qqaiElKZM5ymDWSVXKuCZ4FZtkHo1TvQRC\nCv7cPvw+5mXyfTNpH3gH+p4Nma3ZQWMKylQJgHM/ItjtFKKh4OAnfwv+ucAaHSEnxUPAmcz74VGA\nYOvuasKGPU4QxEVakoeHQmfSx/c8JwTu5+rAlbwdGkNlo+Zkp6nSbJPUSJ8ne6whWWoCIW7RpNcC\nGWF4iir8xobEokrVU7DMmc0NwYhqKYiL9bIFmZ2G0bZJpK9KSWebbMrT4ZO4M3QhILjsqHZMP6YL\nvVtmkNukMTtlFh+EVCmiJhhmiO9pxvrVeVNIFg/O2wK37uCZts8akgKAP6kZpDaBu8ug8wRLW3Z7\nm1NDfHvl7aEpLG43jaB0Gnaon8IdoaOq6tKJ6Uz/kdBqILhjywniIicjsqt/o8xj6+RPeDJ0Ehtk\nc8b6/8nz4eOMjZ8rs8bxhTKIe3t8ATeu54PwSKqDYRqnWjetvpY5jX+1f5ZtUlWB7iKbJG/9Yg7u\nC2wCs7eo3EVyuJxHXVPgtl2MGXcsC6eP4skzVBVTVoqHni0zeH/qUNo2TsHpEPx273i+vnEUXdq2\n4duwyomfdPUj+FJjVR8+Vwa06G9cO75T3X87aCqVfIfqYjo/1JPNFSqHE5bxd8bNCJ1AeSNV9aBz\nOgB8cAELVmwgUF5IWNuE5guGqSGJImLtGL5QmBZZ6mayB3vM5lquZ4fMZpNsFpN3brgfv3p606Zp\nJhuKq6jRNqUuUPrR1vcW3XyvMD/cF18cR4k7zz2GWVeOIIQLEMy6/AjGdIk+x04wSxlBx1bWd+t2\np8KUTowJPEpui3xyNMIhBDRO9VJaHYQbfueh0Jl4XQ4+u/pI3poyOBJ9l8jepLyMyOa5JLeDl84b\nAKh2Mn0jYNP02Im6pSRiRxvfXa1j6+xk0uLs80lupdp9kpLiR7E+sU9zAPZUB/GHrBFx9TNPtpdG\nNoLqgRanaEyNjtLq2N3oHqeDrOTIYrS1tDrGCUQPFLoHdeH7VBnGbaEpNHI7Y/JAbOywXeV+flQ6\n83l4IM+YVF5FFX6jPcUagdGv2/neoq3vLWNfVCAsad841u5mRlayh8tHtkcIQausZIb6n+amkLrf\nZntpDTvJYXeU9I0nhTXubnypDOSswK309L3E4o2JvRZ3lfvi7tW6daLqADEv5yw6+t80JGy9Xea2\n+UMKVf5Q3OgAAJ2bqd/5+N5qv29J6aHNBVgnWxLGiS+oEAgpxhjcVu1i8S71u4/o2JicKAJTKlNj\n1N9/BmwCs7fQNlWWO7MNTqSu8A3JHhcel4PB7bK5OHgDj/b6Lx1z06hqoYrpb3V8gsDtpQxwzeLW\njp/CxfNhwBRLGSFcLCWymdCPR4s7Jhih6ZY/kUdxR4eIR8lrofG83i7+2Wwb//sIHoIUOtVF0F9L\nhGhfMMyEHnmcN7QNU8YP5Jbrb+L2tu9zw6QhMXkX9HkS70WzadcklQ1FlcaeIQCJg2qS2FHmixtu\nJ9Xrok+rCIHr3ybLImmYkZ5kney655wuXbgdDrI1AuN2qr9LqwMoiiQYlnhcDro1T0+4H6C5icB4\nXU7GdsvlvKFt2Lq72vhWSe7YIIobilQj+J3HdaOb5jq+pzpgEGgdpw9oBX3OhuOfYkXz07R6WsdR\nvtb20uqAJcBqWJGGJ+O20moCIYXL3/rJiAvXIrMRWclWaTMablfk+wBs3V0Tc+BY49T4nK6ZwJjz\nDG6bY8lXozEtU4PXskrmG+m7KvzGkRfF2k55nctXcCBx0FQjXJW+kEWCiYcxpr1jrbKTkVoZkNhx\nxhcM49OiRyxSelBBMteO65TwHYXl/rghgHJS1HoWVljjrZm3HJj77qqZy4yIEtGY3LcFb1w0iL+N\n6aiVGd+rdENxpUFgCiv8fLFKZTqzkj1GfXRM7JXHaQNaxZRxoGETmL2AElbY/qVqDK1xpdeROxZn\nD25DiyZZTBiiHpuT3/soevheojh3GB6Xg4H5WSzeuFvVxB73GExUxfoHg2fSs0UGP4ZU/e6/81X/\n/V+3qy6c22lCvm8mdzquwpWRx1PKafwnPIRCMllemWX1hjnlFchozZTA2wBs1ozKMdEJTJjQI48k\nt5N7T+xB07QkmqYl8coFAzl3SJuYvA+e3ItOzdJp3ySF0uogO/aoHHZL0wJbXOmnwldLYEcT9EXW\nHPDy+1tGM66bVbLRJ7QenNLlFDTPbGSkZSV7KK0KGAubJ9GpUhpyMyITVVd1tc5OpsIfMrhTj9PB\nGQNb8cDknrxzyRA65aayvkjd1JjscZKtSQiXjmhHi0wrgUlNcoHTBf3Pp7GmGjFLUvOuG0GHJqrL\ncUllwBJKZ2d5hEBvKqnmgc9WM2flTq54e5nR9mV3jOPLa0dwx3HdLO9N0tqiE10dW0urjWi7OqIJ\njK4Ca+SJL8Ec2ytWoo2H7aXqop+e5KIqEKY6EIphcHSHFF8oTG5arErpg6lDuXBYPpseOpaOuRHV\nUqts63feXBL/aIIbPljBL9usbvZd86yBJ8d3j4yxXeW+uCetZmsSgzkkEahjPGCSXHR89XsRa3ZW\nxIwHUDdej+jUhFxNMtbD40SjoMxnxNMrqvAbfXDGoFYWCearG0ZyzuDWdGuebjl24M+AbeTfC/y2\n6FN6rFd3V3tSs+vIHYsmaV4WXD/SuD6iQ2OemzKSgflqWUe0z2HOyp1s2V1Nm5wUGHQJq5P78+Jb\n27mkfQ7PbJ/EHplKi/YT8axdz0qNwHTLS+e3gnIcQrULPBiIeLZsLKlmfngyCoJpnv/QqNtkNWbR\n988AsCKUzxDAF1IY3qExr1wwkGBYoftd6s7ledcdRbs6VBTx0F5bHFftUB0UujRLZ1tpJKji5pL4\nnCWoxEjPqxOmK0a256kF68hJ8ZCX0Yhzh7QhGJbcN1sNW1KoTfCgKVCi/qwiISfVQ1UgbKgnzGF6\nAJ45q69lf5PX5cTtFATD0oi40DJLtTvoUorb6eChkyNn7PVvk22EpmnkceJyOoyJfd371mOT0kwS\nWDNtQTFLe80yGhlqsB17aizEZ3NJlUFgNhZXsbFYC/2iEW2nQyCEoFNumkWF9uipvXlwzhp8QT9u\npyDXZEPaujuiInvwpJ54nI6YBTg/J4XfCsqtKjITEcpOqVu3n57kMvq2RVYy5QXlFFcEYo7AePfS\noTw0ZzXXjOloccHXMTA/25g3ZkQTo60mCaZd4xQ2aN9q9i/qfqzcdK+xkEerwJ4+sx97qgMMemA+\nu8r9cc/5SfG4SDO1SYeUsLPMR+uc5Lhx9sZ1y+XuE7rz+cqdTH3rJyDCYKR6XTRyOw3mLBo7y3wG\no1RU6afCF8TpEDRyOy1MQarXZdGu/G1MR3q1zIgp70DAlmD2At2GncgLyZfxXbg7nbom3jfREBzZ\nsYmhahnaXlXXzFq23Zhw5cltUHAwtH0OVTTi+fDxOD1e8hsnG+qggfmqHUUIEaOK0SfUjPAkTkx/\nDxwOGHs3M5rdS1vfW/xWpk4qfzBMsseJx+UgxTTRmqR5405wHYkiQnfRuMEfN6kqxZ4trANbXxTj\n4ZMrh/HltaqR1u10sO7vE7h2XCc+uXIYs68ebrR1yvC2fHvTKAa1zWZtoSo5hBXFWGCNuE+oiyPA\nb9pxBtESzHG9mnNqlCpB5/D1/mmtladLKdERc5tnRBa36O+SnmRVWaWZrptlxHLobqcgLzMJIWD7\nnhrLArzVZAeKB7O0Z3ZgaJOTTEYjl5bHYfk+20prjJMk+7XO4uT+LSMHyWnQ1ZVmCSYz2c3kvi2Y\nefFgw+ZlRlLUkRQtspINdZLOxRdV+gx1lY5mGUk8cUZfmmpE8OYJXThBs03UhuixqqvILjginznX\nHMmsy4+w3O+WF9FERBMYj8tB0/QkspLdFJTVxI3h5XSo7YgmMKCqL0FVkUU7QOjvOqZHM0Ma1yUd\nIQRN070Gc9Ayak4XlPmM/g+EFArKfKR4nAghLHa16DF47bhOjOkabdM8MLAJzF7A4RCcOPVelo9+\ng9OGdd3v5bdvkkKyx8lT89cy5bWlBMMKPm3QpSW5maQZfQvL/RzRPmI76K9xcrurAhbR27zQpCe5\nInpip5vFnqFIHMZi6Q8peE2c6dB2qj49Ol5aNN65dDDH9sqLSc/LaETr7GRDndSzpVWluLPcR+fc\nNJ46sy8XDWvLyf1aGvdyUr10Mqk9XE4HQgh6t8q0GN9B1bl3zk3jj50VKIokrESMzboUBdApV/29\ncpsq9dXnsDedI9e/ga5+2aB9M3dUGWZbQfQhadGLl/m7miWJYzTHALfDgdflpGmal22lNRY1y+aS\n6pgjB8xwmc4iaZ4ZKTs1yWV8P6dJwgOo9IeMIKn6sGmaFp/AmBdaIQSPn96HIzpYDcw64xytCmpm\ncozQ319Y7scfChtBKONh6lHteerMvrxzyRCeOzt2w6EZ5nG/TuurYR0a43U56dDUevZMvkk6T4lj\nxAfV+J7IAcDpcMRVd4GqdgR1bjWPpyLV0FUz7ptPys1NS2K7JsFcPLwt7182lFP6t6RpmpedZTWW\n/t9QXGUwLGbGqc4jAw4gbAKzl8hNT+LKUR3iGnj3FUIIpmsxghauK+bOT1YZody9LgdXjVbdZfu0\nzrR4Ch3VMbL5zCzBmCdTi6xkzRtJLU+3uWwoUk8k9AXDJJkG54vnD+DDK46os53922Tz7Fn9mHfd\nCGZeYo2HNdbELfU2uRvrxuy0JBcn9G7Oncd349HTrDu5G4IB+VlU+EOMevRrduypMRYY84KV3zgF\nt1OwcodKYOqywQD0ba3WWTfKpiW5yUp2G9JX9EFoR5r6Idpgf8agVuTnJBvcqtkGZV6gnjijD9/c\nOMrgxFtnJ7OxuMpQszRO9RiebF2aRYjwTcdENpKa3aObRKlMdIJTWh2IISCbtXbp7zZLOI+e2pvW\nOep1zHEVGpqa1FNdmqkMhX6K6JguTTmyY2POGtzGlCcNl0Ptk0BIsajeEmFo+xwm9IxlaMzQCXb7\nJils3a0u0np/RzuHmMd3ImZqRKcmRp9fNqIdZw6KeH+6HLFag+vHdcLrcrB2l0rcfMEwqV4Xz54V\nIYxmhuPK0R24+/huHGdi1Jqkew31nsvpYFDbbB45tTd5mY0oMKnIAP5IEI28Pt/zQMEmMIcozj8i\nn40PTmTqUe15Z8kWLn1T1c96XQ46NE1jwwMTGdW5Ka2yk1l082g+uuIIMkweQ41NenCzWkpfxArL\nrW6h1YEw2/fUUOUPWcT4VK+Lfq3rPkdDR4emaRapClTjNqiqo5xUL12apXFyv5bG4tPIs38mwJiu\nuaQnudhcUs2nK3bgNOmdp43uwCn9W+J2qt9v4Vo1Km19CMw1YzsyrEMOE3pGjNcdm6axqSQy8c0w\nE7Ro9UTLrGS+vnEUN47vTJrXZTEgNzVJMElup7GQA/Rrk8Uv2/YYtqN2jVOZ/UsBZTVBi779dJN6\nz2xfMuvg05LctNOkuppA2HCL1SVjvV36SZKtTQTm5P4tjT0WVf74DiFmN2U9+GrTNC9fXjuC58/t\nz5tTBjPIZDfJTHbTo0UG81cX4g8peFwOXrlgAB9dcURM2Q2BLq2aiZneH9Een2aX4UTeoCM7RbzU\nWmY1MmxUoEoJZgbhu5tHM21MRzrmpvL7LvVoan9IwetycGyvPMMGYiYIXpeTC4a1tYynzrlpxh4y\nM7OSl57EzjIfYUXSWZPya4Jhi0T07Fn9OKlvi1pV2wcaNoE5hCGE4MbxnRnbNTKwdRHbPGiaZzai\nr0YEvr1pFP+5ajgOhzAIS2cTh6tzroUmt9A2OckIAc//bwPlvlCMGL+vaJaRxKaHjmXRLWqYlM+v\nGcGjp/VmgGYzMquw9gWpXhdzrzuK2yaqakuzquP6ozvzyKmqdHTWoFamSVv3FGialsTbFw8xjPsA\n08ZENl3GO8pZd4hItFh1yk3j13vGGwu9jodP6cV1cdxkh7bLIRiWLFijRjQe3E5doNfsrMDlFIY7\nstleEk08LxyWD6jf6YIj8rl6dAfOHNSavIxGLL9zHA+e1Itkj5N52qmierPyomxDbTQ71o56HI43\nsrMqzW0srqZTbpqxeJqZIa/LydmDW7NmZwWfLN9BitfF6C65xpjeW9x9QncuHJbP2YMjkkY8huKc\nIa2ZNjo2ong0ujVPZ0IPlcnQVYy6O70/qNDFZMfRx0Tn3HR+36kSmCUbdxsuzrqasdJfuxfl4LYR\nQmw+frlZhkpgQmFJVorbGG9m5vDYXnk8dvr+sRHvLWwCc4jD6RC8eN4AVtx5NPOuOyqhflhHq+xk\nemrc0WhtX4BZQtCJhx6x2R9S6NUyk2O6N+PNH9TTCM17UA4kLjmyHaO7NOWcOG7Oe4vc9CQuPrIt\nb04ZxHuXxe7PAZWj1dVK0YtnfXFkxybcN6kHHpcjrnFej9yQ3qhhjpqnDmjF1dr+BzOGts+hZVYj\nwznh2rGdGNBGXYDDiuSbm0bx8x3qjvGOmkrULHmAuifnj/sn4HQIktxOrju6szGeMpM9NPI4LepM\nHS6ng1Svy3if7ihRW7/dN6kHmcluxnbNZWzXXC4anh+Tp79WXjCsMLlvC0PyG9Fp/5xR0iYnhbuO\n706S22moks3MwKsXDmT6MV24f1JP8jIacd+J3Y1NrYnw+Ol9eO3CyIGAz5zVlynD29K9ebrxfSAi\nxXXNS6Owws8b328CIraeaaM70Cw9iaM61R4ctbdpLpqPQG6lucov2bQbRYEjOqi20mhHgIMOKeX/\n27/+/fvLvzL8wbCcuXiz3FMdkFfNXCbbTJ8tZ6/YIdtMny2fWbBWSinl0AfmyevfXy53ldXIcY99\nLYc+ME/6g+GDXPMDD18wJH/fWb7P5YTDSsJ7W3dX7XP5ZizZWCJPnvGdvOjVJVJKKRdvKJFtps+W\nU1770ZJvxdZS+dGybXv1jjUF5XLUw1/Jy95YKoOhyDjYUlIliyp8xnUwFJaKkrjt9cH364tlm+mz\n5frCCimllM99vU62mT5b/rC+eJ/KjYdQWJHf/lG038uNRpvps2Wb6bPl7kq/lFLKTcWVRlr/++bK\nPdWBBpe5oahSXvHWT5bxtKEoUm6b6bNlpS8o31i0UZZW+fdbW2oDsFTWY40V8s84N/MQxYABA+TS\npfHPLPmrocof4p0lWzhvaD7HPPENLbIa8eaUwfS7by4Tezbj/kk9CYYV/CElrqHQxqEHKSWfrtjB\noLbZMV51hwuklIYaUVEkvxWUGw4BhyNeXriR+2b/xh/3TzDUcaMf/ZoNRVVMGd42ZsPrvuC0579n\nycbdvDllkMWx5M+AEOInKeWAOvPZBOb/B4Ex47Evf+epBet49YKBXDlzGWcOar1fB74NGzYiWF1Q\nzsc/b+facZ32q9dpKBzZ6/Vno74E5pC0wQghThVCrBJCKEKIAVH3bhFCrBNC/C6EGG9K7y+E+FW7\n95Q4GF/9MMEVozrQOjuZBz5bTXUgHLMJzoYNG/sPXfPSuWVi1/2+pUHfF3Yo41BdWVYCJwHfmBOF\nEN2AM4DuwDHADCGE3mvPAZcAHbW/Y/602h5mSHI7uWVCF2PXe5Lr4PnJ27Bh46+LQ5LASClXSyl/\nj3PrROBdKaVfSrkRWAcMEkLkAelSyh80A9QbwKQ4z9vQcEyPZtw3qQcuh7AECbRhw4aN/YXDzZrb\nAvjBdL1NSwtqv6PTYyCEuBS4FKB168RH0P7VIYTg3CFt4kZCtmHDho39gYNGYIQQ84B4cb1vk1J+\ncqDeK6V8AXgBVCP/gXqPDRs2bPx/x0EjMFLKOs4JjovtgDnUbUstbbv2Ozrdhg0bNmwcJBySNpha\n8ClwhhDCK4Roi2rMXyKlLADKhRBDNO+x84ADJgXZsGHDho26cUjugxFCTAaeBpoAe4DlUsrx2r3b\ngIuAEHCNlHKOlj4AeA1oBMwBpsk6GieEKAI270NVGwPF+/D8oYK/SjvAbsuhiL9KO8Bui442Uso6\nd3cekgTmcIEQYml9Nhsd6virtAPsthyK+Ku0A+y2NBSHm4rMhg0bNmwcJrAJjA0bNmzYOCCwCcy+\n4YWDXYH9hL9KO8Buy6GIv0o7wG5Lg2DbYGzYsGHDxgGBLcHYsGHDho0DApvA7AWEEMdo0ZzXCSFu\nPtj1qQ+EEJu0aNPLhRBLtbRsIcRcIcRa7X+WKX/cqNUHod6vCCEKhRArTWkNrvehEG07QVvuFkJs\n1/pluRBi4qHeFiFEKyHEV0KI37So53/T0g+7fqmlLYdjvyQJIZYIIVZobblHSz94/VKfU8nsv8gf\n4ATWA+0AD7AC6Haw61WPem8CGkel/RO4Wft9M/AP7Xc3rV1eoK3WXudBqvcIoB+wcl/qDSwBhgAC\ndZ/UhEOkLXcDN8TJe8i2BcgD+mm/04A/tPoedv1SS1sOx34RQKr22w0s1upz0PrFlmAajkHAOinl\nBillAHgXNcrz4YgTgde1368TiUAdN2r1QagfUspvgN1RyQ2qtzhEom0naEsiHLJtkVIWSCmXab8r\ngNWowWUPu36ppS2JcCi3RUopK7VLt/YnOYj9YhOYhqMFsNV0nTBy8yEGCcwTQvwk1IjSALlSDbMD\nsBPI1X4f6m1saL1bUM9o2wcJ04QQv2gqNF19cVi0RQiRD/RF5ZYP636Jagschv0ihHAKIZYDhcBc\nKeVB7RebwPz/wXApZR9gAnClEGKE+abGqRx2LoWHa71NeA5V3doHKAAePbjVqT+EEKnALNSQTeXm\ne4dbv8Rpy2HZL1LKsDbPW6JKIz2i7v+p/WITmIYjUUTnQxpSyu3a/0LgI1SV1y5NHEb7X6hlP9Tb\n2NB6H7LRtqWUu7RFQQFeJKKKPKTbIoRwoy7Ib0spP9SSD8t+ideWw7VfdEgp9wBfoZ7se9D6xSYw\nDcePQEchRFshhAf1COdPD3KdaoUQIkUIkab/Bo5GPZb6U+B8Ldv5RCJQx41a/efWulY0qN7yEI62\nrU98DZNR+wUO4bZo730ZWC2lfMx067Drl0RtOUz7pYkQIlP73QgYB6zhYPbLn+nl8FfLmo9BAAAD\nUUlEQVT5AyaiepusRz0g7aDXqY76tkP1FlkBrNLrDOQA84G1wDwg2/TMbVr7fucgeFyZ6vEOqopC\nP7V0yt7UGxiAukisB55B22R8CLTlTeBX4Bdtwucd6m0BhqOqWX4Blmt/Ew/HfqmlLYdjv/QCftbq\nvBK4U0s/aP1i7+S3YcOGDRsHBLaKzIYNGzZsHBDYBMaGDRs2bBwQ2ATGhg0bNmwcENgExoYNGzZs\nHBDYBMaGDRs2bBwQ2ATGhg0bNmwcENgExoaNvYQQIlMIcYXpurkQ4t8H6F2ThBB37odyHhFCjN4f\ndbJhoy7Y+2Bs2NhLaMERZ0spe9SRdX+8axFwgpSyeB/LaQO8KKU8ev/UzIaNxLAlGBs29h4PAe21\nA6keFkLkC+0wMSHEBUKIj7UDnjYJIa4SQlwnhPhZCPGDECJby9deCPG5FuX6WyFEl+iXCCE6AX6d\nuAghXhNCPKeVs0EIMVKL+LtaCPGalsep5VupHRx1LYCUcjOQI4Ro9ud8Ihv/n+E62BWwYeMwxs1A\nD6lGr9UlGjN6oIZ/T0I9a2O6lLKvEOJx1PhOTwAvAFOllGuFEIOBGUC0CmsYsCwqLQsYCpyAGspk\nGHAx8KMQog/qwXgtdOlKj1GlYZmWf9beNduGjfrBJjA2bBw4fCXVQ6wqhBBlwH+09F+BXlqI+COA\nD0wn0nrjlJMHFEWl/UdKKYUQvwK7pJS/AgghVgH5wP+AdkKIp4H/Al+ani0Emu9r42zYqAs2gbFh\n48DBb/qtmK4V1LnnAPboElAtqAEyEpRtLtcoW0pZKoToDYwHpgKnARdpeZK0Mm3YOKCwbTA2bOw9\nKlDPcd8rSPVgq41CiFNBDR2vEYVorAY6NKRsIURjwCGlnAXcDvQz3e5EJPy8DRsHDDaBsWFjLyGl\nLAG+0wzpD+9lMWcDU4QQ+lEKJ8bJ8w3QV5j0aPVAC+Br7fjct4BbwDhcqwOwdC/ra8NGvWG7Kduw\ncRhACPEkqt1l3j6WMxnoJ6W8Y//UzIaNxLAlGBs2Dg88ACTvh3JcHCbny9s4/GFLMDZs2LBh44DA\nlmBs2LBhw8YBgU1gbNiwYcPGAYFNYGzYsGHDxgGBTWBs2LBhw8YBgU1gbNiwYcPGAcH/AQ2m9xVJ\n6bsOAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from bmtk.analyzer.cell_vars import plot_report\n",
+ "\n",
+ "plot_report(config_file='simulation_config.json', gids=[10, 80])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "anaconda-cloud": {},
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.13"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/bmtk-vb/docs/tutorial/05_pointnet_modeling.ipynb b/bmtk-vb/docs/tutorial/05_pointnet_modeling.ipynb
new file mode 100644
index 0000000..6d373ac
--- /dev/null
+++ b/bmtk-vb/docs/tutorial/05_pointnet_modeling.ipynb
@@ -0,0 +1,997 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Chapter 5: Point-Neuron Network Models (with PointNet)\n",
+ "\n",
+ "In this chapter we will create a heterogeneous network of point-model neurons and use the PointNet simulator which will run the network using the NEST simulator. As with the previous BioNet examples will create both a internal recurrently-connected network of different node types, and an external network of \"virtual\" neurons that will drive the firing of the internal neurons. And we'll show how to drive network activity by using a current clamp.\n",
+ "\n",
+ "PointNet, like BioNet and the other simulators, use the SONATA data format for representing networks, setting up simulations and saving results. Thus the tools used to build and display biophysically detailed networks in the previous chapters will work just the same. \n",
+ "\n",
+ "Requirements:\n",
+ "* bmtk\n",
+ "* NEST 2.11+"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 1. Building the network\n",
+ "\n",
+ "There are two ways of generating a network of point-neurons. Either we can take the existing biophysical network created in the previous chapters and make some minor adjustments to the neuron models being used. Or we can build a new network from scratch using the BMTK Builder.\n",
+ "\n",
+ "### Converting networks\n",
+ "We want to take the BioNet V1 network and change parameters so that the individual neurons are using point models. Luckily there parameters are stored in the node and edge \"types\" csv files, thus we can easily change them with a simple text editor (emacs, vi, sublime-text, etc). Here is an example of the old *V1_node_types.csv*: "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
"
+ ],
+ "text/plain": [
+ " node_type_id ei morphology_file model_processing pop_name location \\\n",
+ "0 100 e NaN NaN Scnn1a L4 \n",
+ "1 101 i NaN NaN PV L4 \n",
+ "2 102 e NaN NaN LIF_exc VisL4 \n",
+ "3 103 i NaN NaN LIF_inh VisL4 \n",
+ "\n",
+ " model_template model_type dynamics_params \n",
+ "0 nest:iaf_psc_alpha point_process 472363762_point.json \n",
+ "1 nest:iaf_psc_alpha point_process 472912177_point.json \n",
+ "2 nest:iaf_psc_alpha point_process IntFire1_exc_point.json \n",
+ "3 nest:iaf_psc_alpha point_process IntFire1_inh_point.json "
+ ]
+ },
+ "execution_count": 2,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "pd.read_csv('sources/chapter05/converted_network/V1_node_types.csv', sep=' ')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Changes:\n",
+ "* **model_type** - PointNet will not support the \"biophysical\" model_type and only support \"point_process\" neuron models.\n",
+ "* **model_template** - nrn:IntFire1 and ctdb:Biophys1.hoc are special directives for running NEURON based models. Instead we replaced them with the \"nest:\\\" directive (note we can replace iaf_psc_alpha with any valid NEST model).\n",
+ "* **dynamics_params** - We have new json parameters files for the new NEST based models.\n",
+ "* **model_processing** - \"aibs_perisomatic\" is a special command for adjusting the morphology of biophysical models, and since our NEST-based models do not have a morphology we set it to none which tells the simulator to use the models as-is (note: you can implement custom model_processing functions for PointNet that will be explained later)."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "We must also adjust the *edges_types.csv* files:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "scrolled": false
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
\n",
+ "
edge_type_id
\n",
+ "
target_query
\n",
+ "
source_query
\n",
+ "
syn_weight
\n",
+ "
dynamics_params
\n",
+ "
distance_range
\n",
+ "
delay
\n",
+ "
target_sections
\n",
+ "
weight_function
\n",
+ "
model_template
\n",
+ "
weight_sigma
\n",
+ "
\n",
+ " \n",
+ " \n",
+ "
\n",
+ "
0
\n",
+ "
100
\n",
+ "
pop_name=='Scnn1a'
\n",
+ "
ei=='e'
\n",
+ "
50.00000
\n",
+ "
ExcToExc.json
\n",
+ "
NaN
\n",
+ "
2.0
\n",
+ "
NaN
\n",
+ "
gaussianLL
\n",
+ "
static_synapse
\n",
+ "
50.0
\n",
+ "
\n",
+ "
\n",
+ "
1
\n",
+ "
101
\n",
+ "
pop_name=='LIF_exc'
\n",
+ "
ei=='e'
\n",
+ "
50.00000
\n",
+ "
instanteneousExc.json
\n",
+ "
NaN
\n",
+ "
2.0
\n",
+ "
NaN
\n",
+ "
gaussianLL
\n",
+ "
static_synapse
\n",
+ "
50.0
\n",
+ "
\n",
+ "
\n",
+ "
2
\n",
+ "
102
\n",
+ "
model_type=='biophysical'&ei=='i'
\n",
+ "
ei=='i'
\n",
+ "
50.00000
\n",
+ "
InhToInh.json
\n",
+ "
NaN
\n",
+ "
2.0
\n",
+ "
NaN
\n",
+ "
wmax
\n",
+ "
static_synapse
\n",
+ "
NaN
\n",
+ "
\n",
+ "
\n",
+ "
3
\n",
+ "
103
\n",
+ "
model_type=='point_process'&ei=='i'
\n",
+ "
ei=='i'
\n",
+ "
50.00000
\n",
+ "
instanteneousInh.json
\n",
+ "
NaN
\n",
+ "
2.0
\n",
+ "
NaN
\n",
+ "
wmax
\n",
+ "
static_synapse
\n",
+ "
NaN
\n",
+ "
\n",
+ "
\n",
+ "
4
\n",
+ "
104
\n",
+ "
model_type=='biophysical'&ei=='e'
\n",
+ "
ei=='i'
\n",
+ "
50.00000
\n",
+ "
InhToExc.json
\n",
+ "
NaN
\n",
+ "
2.0
\n",
+ "
NaN
\n",
+ "
wmax
\n",
+ "
static_synapse
\n",
+ "
NaN
\n",
+ "
\n",
+ "
\n",
+ "
5
\n",
+ "
105
\n",
+ "
model_type=='point_process'&ei=='e'
\n",
+ "
ei=='i'
\n",
+ "
30.00000
\n",
+ "
instanteneousInh.json
\n",
+ "
NaN
\n",
+ "
2.0
\n",
+ "
NaN
\n",
+ "
wmax
\n",
+ "
static_synapse
\n",
+ "
NaN
\n",
+ "
\n",
+ "
\n",
+ "
6
\n",
+ "
106
\n",
+ "
pop_name=='PV'
\n",
+ "
ei=='e'
\n",
+ "
0.00035
\n",
+ "
ExcToInh.json
\n",
+ "
NaN
\n",
+ "
2.0
\n",
+ "
NaN
\n",
+ "
wmax
\n",
+ "
static_synapse
\n",
+ "
NaN
\n",
+ "
\n",
+ "
\n",
+ "
7
\n",
+ "
107
\n",
+ "
pop_name=='LIF_inh'
\n",
+ "
ei=='e'
\n",
+ "
50.00000
\n",
+ "
instanteneousExc.json
\n",
+ "
NaN
\n",
+ "
2.0
\n",
+ "
NaN
\n",
+ "
wmax
\n",
+ "
static_synapse
\n",
+ "
NaN
\n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " edge_type_id target_query source_query syn_weight \\\n",
+ "0 100 pop_name=='Scnn1a' ei=='e' 50.00000 \n",
+ "1 101 pop_name=='LIF_exc' ei=='e' 50.00000 \n",
+ "2 102 model_type=='biophysical'&ei=='i' ei=='i' 50.00000 \n",
+ "3 103 model_type=='point_process'&ei=='i' ei=='i' 50.00000 \n",
+ "4 104 model_type=='biophysical'&ei=='e' ei=='i' 50.00000 \n",
+ "5 105 model_type=='point_process'&ei=='e' ei=='i' 30.00000 \n",
+ "6 106 pop_name=='PV' ei=='e' 0.00035 \n",
+ "7 107 pop_name=='LIF_inh' ei=='e' 50.00000 \n",
+ "\n",
+ " dynamics_params distance_range delay target_sections \\\n",
+ "0 ExcToExc.json NaN 2.0 NaN \n",
+ "1 instanteneousExc.json NaN 2.0 NaN \n",
+ "2 InhToInh.json NaN 2.0 NaN \n",
+ "3 instanteneousInh.json NaN 2.0 NaN \n",
+ "4 InhToExc.json NaN 2.0 NaN \n",
+ "5 instanteneousInh.json NaN 2.0 NaN \n",
+ "6 ExcToInh.json NaN 2.0 NaN \n",
+ "7 instanteneousExc.json NaN 2.0 NaN \n",
+ "\n",
+ " weight_function model_template weight_sigma \n",
+ "0 gaussianLL static_synapse 50.0 \n",
+ "1 gaussianLL static_synapse 50.0 \n",
+ "2 wmax static_synapse NaN \n",
+ "3 wmax static_synapse NaN \n",
+ "4 wmax static_synapse NaN \n",
+ "5 wmax static_synapse NaN \n",
+ "6 wmax static_synapse NaN \n",
+ "7 wmax static_synapse NaN "
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "pd.read_csv('sources/chapter05/converted_network/V1_V1_edge_types.csv', sep=' ')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "* **model_template** has been changed to use a NEST based model type (static_synapse)\n",
+ "* Use different **dynamics_parameter** files \n",
+ "* It's important to readjust **syn_weight** as values appropiate for NEURON based models are oftern wrong for NEST based models. \n",
+ "\n",
+ "Notice we don't have to change any of the hdf5 files. The network topology remains the same making it a powerful tool for comparing networks of different levels of resolution."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "### Building a model from scratch.\n",
+ "\n",
+ "We can use the BMTK Network Builder to create new network files just for point-based modeling\n",
+ "\n",
+ "#### V1 Network\n",
+ "\n",
+ "First lets build a \"V1\" network of 300 cells, split into 4 different populations "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "from bmtk.builder.networks import NetworkBuilder\n",
+ "from bmtk.builder.aux.node_params import positions_columinar\n",
+ "\n",
+ "net = NetworkBuilder(\"V1\")\n",
+ "net.add_nodes(N=80, # Create a population of 80 neurons\n",
+ " positions=positions_columinar(N=80, center=[0, 50.0, 0], max_radius=30.0, height=100.0),\n",
+ " pop_name='Scnn1a', location='VisL4', ei='e', # optional parameters\n",
+ " model_type='point_process', # Tells the simulator to use point-based neurons\n",
+ " model_template='nest:iaf_psc_alpha', # tells the simulator to use NEST iaf_psc_alpha models\n",
+ " dynamics_params='472363762_point.json' # File containing iaf_psc_alpha mdoel parameters\n",
+ " )\n",
+ "\n",
+ "net.add_nodes(N=20, pop_name='PV', location='VisL4', ei='i',\n",
+ " positions=positions_columinar(N=20, center=[0, 50.0, 0], max_radius=30.0, height=100.0),\n",
+ " model_type='point_process',\n",
+ " model_template='nest:iaf_psc_alpha',\n",
+ " dynamics_params='472912177_point.json')\n",
+ "\n",
+ "net.add_nodes(N=200, pop_name='LIF_exc', location='L4', ei='e',\n",
+ " positions=positions_columinar(N=200, center=[0, 50.0, 0], min_radius=30.0, max_radius=60.0, height=100.0),\n",
+ " model_type='point_process',\n",
+ " model_template='nest:iaf_psc_alpha',\n",
+ " dynamics_params='IntFire1_exc_point.json')\n",
+ "\n",
+ "net.add_nodes(N=100, pop_name='LIF_inh', location='L4', ei='i',\n",
+ " positions=positions_columinar(N=100, center=[0, 50.0, 0], min_radius=30.0, max_radius=60.0, height=100.0),\n",
+ " model_type='point_process',\n",
+ " model_template='nest:iaf_psc_alpha',\n",
+ " dynamics_params='IntFire1_inh_point.json')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "We can now go ahead and created synaptic connections then build and save our network."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 5,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "from bmtk.builder.aux.edge_connectors import distance_connector\n",
+ "\n",
+ "## E-to-E connections\n",
+ "net.add_edges(source={'ei': 'e'}, target={'pop_name': 'Scnn1a'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 0.34, 'd_max': 300.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=5.0,\n",
+ " delay=2.0,\n",
+ " dynamics_params='ExcToExc.json',\n",
+ " model_template='static_synapse')\n",
+ "\n",
+ "net.add_edges(source={'ei': 'e'}, target={'pop_name': 'LIF_exc'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 0.34, 'd_max': 300.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=-1.0,\n",
+ " delay=2.0,\n",
+ " dynamics_params='instanteneousExc.json',\n",
+ " model_template='static_synapse')\n",
+ "\n",
+ "\n",
+ "### Generating I-to-I connections\n",
+ "net.add_edges(source={'ei': 'i'}, target={'pop_name': 'PV'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 1.0, 'd_max': 160.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=-1.0,\n",
+ " delay=2.0,\n",
+ " dynamics_params='InhToInh.json',\n",
+ " model_template='static_synapse')\n",
+ "\n",
+ "net.add_edges(source={'ei': 'i'}, target={'ei': 'i', 'pop_name': 'LIF_inh'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 1.0, 'd_max': 160.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=10.0,\n",
+ " delay=2.0,\n",
+ " dynamics_params='instanteneousInh.json',\n",
+ " model_template='static_synapse')\n",
+ "\n",
+ "### Generating I-to-E connections\n",
+ "net.add_edges(source={'ei': 'i'}, target={'ei': 'e', 'pop_name': 'Scnn1a'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 1.0, 'd_max': 160.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=-15.0,\n",
+ " delay=2.0,\n",
+ " dynamics_params='InhToExc.json',\n",
+ " model_template='static_synapse')\n",
+ "\n",
+ "net.add_edges(source={'ei': 'i'}, target={'ei': 'e', 'pop_name': 'LIF_exc'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 1.0, 'd_max': 160.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=-15.0,\n",
+ " delay=2.0,\n",
+ " dynamics_params='instanteneousInh.json',\n",
+ " model_template='static_synapse')\n",
+ "\n",
+ "### Generating E-to-I connections\n",
+ "net.add_edges(source={'ei': 'e'}, target={'pop_name': 'PV'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 0.26, 'd_max': 300.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=15.0,\n",
+ " delay=2.0,\n",
+ " dynamics_params='ExcToInh.json',\n",
+ " model_template='static_synapse')\n",
+ "\n",
+ "\n",
+ "net.add_edges(source={'ei': 'e'}, target={'pop_name': 'LIF_inh'},\n",
+ " connection_rule=distance_connector,\n",
+ " connection_params={'d_weight_min': 0.0, 'd_weight_max': 0.26, 'd_max': 300.0, 'nsyn_min': 3, 'nsyn_max': 7},\n",
+ " syn_weight=5.0,\n",
+ " delay=2.0,\n",
+ " dynamics_params='instanteneousExc.json',\n",
+ " model_template='static_synapse')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "net.build()\n",
+ "net.save_nodes(output_dir='network')\n",
+ "net.save_edges(output_dir='network')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "collapsed": true
+ },
+ "source": [
+ "### Building external network\n",
+ "\n",
+ "Next we want to create an external network of \"virtual cells\" with spike-trains that will synapse onto our V1 cells and drive activity. We will call this external network \"LGN\" and contains 500 excitatory cells."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "lgn = NetworkBuilder('LGN')\n",
+ "lgn.add_nodes(N=500, pop_name='tON', potential='exc', model_type='virtual')\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "We will use a special function for setting the number of synapses between the LGN --> V1 cells. The select_source_cells function will be called during the build process."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 8,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "import numpy as np\n",
+ "\n",
+ "def select_source_cells(sources, target, nsources_min=10, nsources_max=30, nsyns_min=3, nsyns_max=12):\n",
+ " total_sources = len(sources)\n",
+ " nsources = np.random.randint(nsources_min, nsources_max)\n",
+ " selected_sources = np.random.choice(total_sources, nsources, replace=False)\n",
+ " syns = np.zeros(total_sources)\n",
+ " syns[selected_sources] = np.random.randint(nsyns_min, nsyns_max, size=nsources)\n",
+ " return syns\n",
+ "\n",
+ "lgn.add_edges(source=lgn.nodes(), target=net.nodes(pop_name='Scnn1a'),\n",
+ " iterator='all_to_one',\n",
+ " connection_rule=select_source_cells,\n",
+ " connection_params={'nsources_min': 10, 'nsources_max': 25},\n",
+ " syn_weight=20.0,\n",
+ " delay=2.0,\n",
+ " dynamics_params='ExcToExc.json',\n",
+ " model_template='static_synapse')\n",
+ "\n",
+ "lgn.add_edges(source=lgn.nodes(), target=net.nodes(pop_name='PV1'),\n",
+ " connection_rule=select_source_cells,\n",
+ " connection_params={'nsources_min': 15, 'nsources_max': 30},\n",
+ " iterator='all_to_one',\n",
+ " syn_weight=20.0,\n",
+ " delay=2.0,\n",
+ " dynamics_params='ExcToInh.json',\n",
+ " model_template='static_synapse')\n",
+ "\n",
+ "lgn.add_edges(source=lgn.nodes(), target=net.nodes(pop_name='LIF_exc'),\n",
+ " connection_rule=select_source_cells,\n",
+ " connection_params={'nsources_min': 10, 'nsources_max': 25},\n",
+ " iterator='all_to_one',\n",
+ " syn_weight= 10.0,\n",
+ " delay=2.0,\n",
+ " dynamics_params='instanteneousExc.json',\n",
+ " model_template='static_synapse')\n",
+ "\n",
+ "lgn.add_edges(source=lgn.nodes(), target=net.nodes(pop_name='LIF_inh'),\n",
+ " connection_rule=select_source_cells,\n",
+ " connection_params={'nsources_min': 15, 'nsources_max': 30},\n",
+ " iterator='all_to_one',\n",
+ " syn_weight=10.0,\n",
+ " delay=2.0,\n",
+ " dynamics_params='instanteneousExc.json',\n",
+ " model_template='static_synapse')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Finally we build and save our lgn network."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "lgn.build()\n",
+ "lgn.save_nodes(output_dir='network')\n",
+ "lgn.save_edges(output_dir='network')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 2. Setting up PointNet Environment\n",
+ "\n",
+ "#### Directory Structure\n",
+ "\n",
+ "Before running a simulation, we will need to create the runtime environment, including parameter files, run-script and configuration files. If using the tutorial these files will already be in place. Otherwise we can use a command-line:\n",
+ "```bash\n",
+ "$ python -m bmtk.utils.sim_setup -n network --membrane_report-vars V_m --membrane_report-cells 0,80,100,300 --tstop 3000.0 pointnet\n",
+ "```\n",
+ "\n",
+ "The network files are written to *circuit_config.json* and the simulation parameters are set in *simulation_config*. The simulation time is set to run for 3000.0 ms (tstop). We also specify a membrane-report to record V_m property of 4 cells (gids 0, 80, 100, 300 - one from each cell-type). In general, all the parameters needed to setup and start a simulation are found in the config files, and adjusting network/simulation conditions can be done by editing these json files in a text editor."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### lgn input\n",
+ "\n",
+ "We need to provide our LGN external network cells with spike-trains so they can activate our recurrent network. Previously we showed how to do this by generating csv files. We can also use NWB files, which are a common format for saving electrophysiological data in neuroscience.\n",
+ "\n",
+ "We can use any NWB file generated experimentally or computationally, but for this example we will use a preexsting one. First download the file:\n",
+ "```bash\n",
+ " $ wget https://github.com/AllenInstitute/bmtk/raw/develop/docs/examples/NWB_files/lgn_spikes.nwb\n",
+ "```\n",
+ "Then we must edit the simulation_config.json file to tell the simulator to find the nwb file and which network to associate it with.\n",
+ "\n",
+ "```json\n",
+ "\n",
+ "\"inputs\": {\n",
+ " \"LGN_spikes\": {\n",
+ " \"input_type\": \"spikes\",\n",
+ " \"module\": \"nwb\",\n",
+ " \"input_file\": \"$BASE_DIR/lgn_spikes.nwb\",\n",
+ " \"node_set\": \"LGN\",\n",
+ " \"trial\": \"trial_0\"\n",
+ " }\n",
+ "},\n",
+ "```\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 3. Running the simulation\n",
+ "\n",
+ "The call to sim_setup created a file run_pointnet.py which we can run directly in a command line:\n",
+ "```bash\n",
+ "$ python run_pointnet.py config.json\n",
+ "```\n",
+ "or if you have mpi setup:\n",
+ "\n",
+ "```bash\n",
+ "$ mpirun -np $NCORES python run_pointnet.py config.json\n",
+ "```\n",
+ "\n",
+ "Or we can run it directly"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "2018-09-19 17:25:44,305 [INFO] Created log file\n",
+ "2018-09-19 17:25:44,338 [INFO] Batch processing nodes for V1.\n",
+ "2018-09-19 17:25:44,355 [INFO] Batch processing nodes for LGN.\n",
+ "2018-09-19 17:25:44,379 [INFO] Setting up output directory\n",
+ "2018-09-19 17:25:44,380 [INFO] Building cells.\n",
+ "2018-09-19 17:25:44,395 [INFO] Building recurrent connections\n",
+ "2018-09-19 17:25:45,524 [INFO] Build virtual cell stimulations for LGN_spikes\n",
+ "2018-09-19 17:25:45,682 [INFO] Network created.\n",
+ "2018-09-19 17:25:45,849 [INFO] Starting Simulation\n",
+ "2018-09-19 17:26:19,515 [INFO] Simulation finished, finalizing results.\n",
+ "2018-09-19 17:27:17,135 [INFO] Done.\n"
+ ]
+ }
+ ],
+ "source": [
+ "from bmtk.simulator import pointnet\n",
+ "\n",
+ "configure = pointnet.Config.from_json('simulation_config.json')\n",
+ "configure.build_env()\n",
+ "network = pointnet.PointNetwork.from_config(configure)\n",
+ "sim = pointnet.PointSimulator.from_config(configure, network)\n",
+ "sim.run()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 4. Analyzing results\n",
+ "\n",
+ "Results of the simulation, as specified in the config, are saved into the output directory. Using the analyzer functions, we can do things like plot the raster plot"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAELCAYAAADOeWEXAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXt8VdWZN/7dCZcEAoQAAnJpQA83Caho0XZqRUEdNWYU\n6+iEis5Yp21o0xrtiM7byPtqsSpOY7H2Yo20Wv3p0BG1nRqwWnrxAlIURTmHq4AICCbcb8nz+2Od\ndfZzVvbeZ1/WOgm4v5/PTvbZl/Wstfbea63nbhERYsSIESNGDL8o6OgKxIgRI0aM4wvxxBEjRowY\nMQIhnjhixIgRI0YgxBNHjBgxYsQIhHjiiBEjRowYgRBPHDFixIgRIxDyMnFYllVoWdbfLct6Mf27\nzLKsxZZlpdL/+7JrZ1uWtdayrDWWZV2cj/rFiBEjRgz/yBfHUQvgffb7dgAvE1ECwMvp37AsaxyA\nawGcBuASAD+xLKswT3WMESNGjBg+YHzisCxrKIDLADzKDlcBWJDeXwDgn9jxp4noMBFtALAWwOdN\n1zFGjBgxYvhHPjiOHwH4HoA2dmwgEW1L738MYGB6fwiAzey6LeljMWLEiBGjk6CLycIty7ocwA4i\nesuyrPOdriEisiwrUNwTy7JuBnAzAPTs2XPSmDFjItf1RMFmfIid2AkAsACcigR6obfr9YdxGDuw\nHSdhILqje2B6Xvevw1q0oAUAUIYylGNE4PJz0T2EQ9iLvQCAruiKCkyIVC7vPwDohm4Yj4qcddmG\njwAAg3GyYz/K+h7BkUyfFKAAYzEOALAD21GKUjSjOfKzaManOIpjAPT3u4qN2IDd2A0A6IouqMDE\n0GXtxR5swRYMxVDXd3YV3s60bQAGYBiGh6YXtD5RvhW3e/nxjdiA/dgPwPxzA4C33nrrEyIaEOpm\nIjK2AZgLwTVshOAsDgB4AsAaAIPT1wwGsCa9PxvAbHb/SwDO9aIxadIkimEjRUnqQQVURKAiAo2m\ncs/ra6mGighUSzWh6HndP5rKM/WYTpWhyk9RkmqphlKUzDp+A1VTEYHKqGeGRikVh6Kh0uvHyhxJ\nQ3LeI/vAqx/lNRU0KnNtEYFuoOrMuUlUoeVZ8Od/A1WHKssv5HOQdKNAtn8SVbhew9umvhO6MT79\nrMbTKCKK9q243cuP96TCTNsuoQu0tMELAJZT2LE97I2BCQHnA3gxvX8/gNvT+7cDuC+9fxqAtwF0\nBzACwHoAhV7lxhOHjRQlsz5kPwOf28Ds93qv+y+hCzL1OI/OCd4gcv/gZDuH08AMjf7UKxQNjhQl\nsyajz9PErHNObZX9fgNVu/ajvHcBNWY9n0toSubeJdQU6Fk40VCf/xJq8qx7VKQomaHVkwojlbWE\nmmgSVWTq7IRisqiIQMVkRaLlB9PpivSi5woiitaHXu+OPN6Lumb6cjgN1NIGLxyPE0c/CGuqFIAl\nAMrYdXcCWJfmSv4xV7nxxGGDr3z5wGSChnzZvT6G8+icTD3KqEcoek40+EB9Hk3O0NAxmKh9WEpF\nmXNyUI6yilfLH0lDInEZucqX5fpZLfO+9jtI8omjiBC4vkEH4yi0giLqZCvf0+lUmdm8Fhd84uhH\nPY3X87iYOExt8cRhI0XJLPEQXy3ppMEnDS/2u4x6BBL5+P0AOF3e3qiiElmHgdQnU2Yf6p45p2Pi\ncBIl6uQE+EBeTJYvzlCC96tfsQyfqMJM3EHFP1Kc05MKjXFRfhCkP9XNra19qDvj0Cf7rktYEVo8\nccTIQBVVmJSV5uI4TqZ+mQFlATXmLM/PwKyKhThXE2SV5gUuThpEpVm0dQxUE2kMFRGoN3XzFMuE\nQYqSGXFOL+oa+N6gHMcSasr0VQl18V1+WJp84lAHzHxOJH45OMlxXEJTaDSV03S6wrV+A6hXqAVf\nrna7nY8njhgZqLLSET5W+qbq0ZeKc66yOPxMHOoHO4pxHFFl7Go95GCoeyCSHE1v6qa9bL7K5dyS\nG6IOtpxerokq12DrZzDmiyK17jo4Qr/w6jenc37axpXjYY1JnOBGO544YmSBr8L9iIhMgA8oJdTF\n18razyCmXsMV8AMZdxAFKtemS/8gsYAaM5O7CVFib+rme2IKK+aQz2EJNWVEb72pm697gq6MOaTh\nQpkDd5nPicMLTn3qp22DqNSIbjLmOOKJwxc+T6eHkpVyRF2JLqDGjMhE9wqKgyvHT6Z+Wsrkoqoe\nVBCJK3Dqx/tobqb8UTnMpcPQ4/0uRXpBVsd+rJvk4KhyZ1Hq7WfRINvm9Fx0i6rClhdG5EdE1J+J\nqnKZ0futh5e1XzxxxMhCCXXJvIAVaRv0oIjq3yFt8k1zPlwu3FeDHwdRdt2jKtyd+pE/H90ch6qQ\n5X4ifp+lH38KOSCq3FnUenvV0c1izBR06FCC9H22bjI6x8H7y4l+PHHEyEDVcdxHc0OVMZ0qqYJG\nhVbeLqEmOomx3qZ0LVwsZ4LjCKpgVuE02PDnY0I5LiemXtQ18KqXyB/Hwa/VMXH45ThUHUfQMqLU\nKcxiKkidsh0Ao08cMccRTxy+oa7KxofgOHKtVHJBfixD0lZV4kMIZt3l94O7hKZo13FIx68iAg3Q\n4FSoIkq/5AI3982lc9ABVZcVFEEHe9k2J04wKpecC1yvY8J6S3UANGUlJstFMVZRPHHEIGrvJxBG\nFJJrpZIL8gOOouPwOwjwEB66rKrGszJ1+IaoGJF2+isiUH8q0Vo2H8hN1F0F5wLC0As62HtxN/ky\nxzU1QfHvtox6GKMjy7XKsYNCjrtxBsATDKciga7oCkAE0bsXD/i6by1S+A5m4WUsxj2YAwC4E/U4\nFQnH69Yi5VrWLNTi31ETKUibLGMWaj2vuxWzM/vF6BGaHsdtrMwuEeOAOvXXMAzL7J+FsyOVr6IS\nVZn9HuiptWwnbMLGzH4xigPfX4kqnIaKrHq7YS1SKIRIz1OILu3ewVORwI8wv907GwT8O3B7z/2+\nm2q5N2IGbsSMTJnqu9GbBXY8FaNC0XFqi9oGWS5tx/ZQBQMxx3EiQjreBZH5y1UIVww7mRNKZaif\nVRD36vYrsw26auTKWV3K8elUqU1v4qRglR71ZdTTqB9HVIswP4hqSBBkVZ0P5bj6Heii4ST+Vds+\nkOkEw4iY3Wi6tQGxjiMGh5Sh96CCdgpONzEUl986Kca5+aXfgV16SEurKj/ir6DsOTfH1RHkkIgy\n8aOKKNtzPAzcFKxSlOfHoz4oPS4ijKKj8iPL58rxMBMH758l1ETjaZSrdzUXi/FwKmHh5cm+hJoi\niWtVOtOpsp3nuEqfP7cwRi1OdL2eXzxxxMgKb8Blpao5rh/Fd1gHJhXc7JRHsfUazILS4ZFso8r0\nJW1eV916ghQlqZSKsnQcuuXy/PmHsdoKsupWLZ1ywev5qtyu0yJHp44j1yLF63wQWn6NTXjbgvpx\nhHGujCeOGO3YeDeltB/Fty4lI3do8stxBMXJzEIpqumsU6j2KPGe3M5xqypJS6fYhfe7Do7Di/NQ\n37tc8BqMVY7DacDlq3J1UvTDrQZxzvM675czlm06j87Jad5eykL0BPV7CjMJdtqJA0ARgDchcmy8\nB2BO+vhdALYCWJneLmX3zIbINb4GwMW5aMQTh0CKku2SBBXlEIUE9SgOUpcbqDpr5Rs2H0cu8EF+\nIJVGqreOicPrA5bnZEgQqfuJYuLp1F4eZVWHyMOtTVIEw8VHYerrds5pkcMHV9VB0c+zd+OmJbfu\nd2Hj9z2TXNTJVOY5sKsixqDfS65+dVq0deaJwwJQkt7vCuANAOekJ45bHa4fh+xETusQJ3LyjRQl\n23lseynZ/AxyYVasTtyPjhAKTuCDfF8qjlRv+fFxHYcJjoNPHJIjDFtvp/u4iPBkKgtUnhPc2uT0\nnKPATx9wWmHEcE5tMal0l86UC6jRc6JR66Az0oJbv3baiSOLENADwAoAkz0mjjh1bESkKOk7BaVp\njsNpgNRRPgefOE6mfjkVnn5ocgdAExFmedRgGZAvbH843cc5Pd3Kd05TGlIE4Tj8lOv17ExkAJTv\n6yU0JUucpOsdVWk5lanqinQutNxoduqJA0BhWhy1D8AP08fuArAJwDsAHgPQN318PoAZ7N5fArja\nq/x44mgPbs3kN8ihH91HUMhB3cl6SJfikU8cg6iv4zVBlL1E2SFH/HijB5WtT6SxVERC8a5jYFcH\nV7lwiBJ00Au8vXylrCsfigTnoFUxrC5nTw71OTpF2406wXuZs3NOUYeIMRc69cSRIQSUAngFwHgA\nA9MTSgGAewA8RgEmDgA3A1gOYPnw4cMNdOnxDW5p5Ddkhl/LjyDgimt1BaVD8UhEviyggnIcfJDy\n4xsSVLbOMyN6BRL0C3VizNUfUaEqmHVxHCp4uzhnY4IWUfvn6DRxRBUpepmz50O0y3FcTByinvi+\nKqICUA7g3fR+LKrSgJNDxEIywXHwATLIhxBkVcedDKNaVUmYiH/F28StqkxwHDJisK6654IJ8RGR\nswUUD6tuGl6iMx0iRRUDFCtE0+i0EweAAQBK0/vFAP4M4HIAg9k13wXwdHr/NEU5vj5WjgdDiuzM\nez2pUHv01SCQkWuLydLGeqsfIBcr+dFH+AH3RtdVpkSKklmWQToVsRKSCxtOA7WXrSJF+Q2q2C/N\nTesWi6nQsZAKOslw7tmUFSJHlInDdKyqwQBesSzrHQDLACwmohcB3GdZ1qr08SnpyQNE9B6AZwCs\nBvAHADVE1Gq4jicM1iKFazEdB3EQANCKVjyCH3dgjSj9l/BL/ExLifPRgJ/hYcxHAwDgefw2c64r\nukUqW8b2GYfxmWNtaItUpop7MAeH0s8HAM7EWVrK5XGJ9mMfAGT+6y6f4x7MyfSRjJFmCqJt+wEA\n+7EfL2OxMVrz0YCn8SSexpOZdy1MGfxdzYUDOJDZ346PQ9HMF6JFcMsBInoHwBkOx7/qcc89EHqP\nGAExHw14D6uyjv0dKzqoNsC7eFd7mTLgWyWq8B3MwjK8mTl3kH14YSA/9D7okznWCrPrlgcwF9fj\nhsjlyLoDyBpcdYGX/yPMd7zmQIj+X4sU5qMBs1CbMzihOgD/B+qwHO8EpukHs1CLFjRn9oNAtqkS\nVWhBM1rQjLVI5Wwfn+hHBAwQGqQftSAsq9JZtlhUZUOyxmNpZCQlm25TWRMiM6ls5F7SUWNVyXbz\n5FC69QRclKhTJMGfmSxfV9BHtXz1uLTicsoDngtBlM0pspOUOcVh6yxwsjrz074B1DuwbtKJpl+g\ns+o48rF9VicOr8GdO7C56RaCWDWFnUjkACyT0uiErNN/UJ12fQRXjuvKKsjBzaV15xwnooz/TD6i\n4xJR1mBuQmnMr5XKcV2GELrq5nZfkDJ4dNygyvEwdY0njs8gvFYYXMnm5jkexI8izGomRUnqTyWZ\negRN5OQX3KrK78SR6yPjZaoKXx3cGHfQ1J1znCjbrNOE8l1F1Gi8fsFNxk2Y47rRC9umoO8Kfy/6\nUrHxST+eOD6D8Hop+cThxvIGXekFHSxNhlDgUGNVBamb24DAOQ7V7DPqYEJkO3rpCA3uhKjRcYOA\ncwGmORyTPiNO4EEXw8QSC/qucLFrPib9eOKIkQXux2FqpZ8LS6ipXVj1XAgzQfG2+jUHlaaWalA7\neZxPRqrcXgfHwXNLm8jHwQcf7rxmAtx0uaNzjusGDz0TJrFT0Lbx90JyHCbCnkhEmTiMWlXF6Bhw\n65ZeLB1lPvECFuEYjmV+F/lIK+rHckcFbyun54VTkUAflOJpPAkA6INS/AjzMyaYHNx0Vt7rt25u\n4JZa38N3tFhVSYQ1HQ2LvdiT2ffb/xxBn7k0/dVtJu2EVWmLrRL0wg8xDy9gUSALq6DvCu+/vijD\nqUjgO5gV+JvIB+KJ4wRET/TIDHhTMLVD6lCJKizAYziEgyhGMX6Cn+e8R36UQT5O3tYgk6STuaU8\n9kcswY50OmaZ41on+mNApvzPoVxr2bNQi1/gEbShDYUoxJ2o11q+F8LkZw/6zAtRiFa0GnkuEmuR\nwj2Yg2M4CgD4MqbgQkzDhZhmjCYg+u9omuYZmAQg3DeRD5h2AIzRATjGVrT3Y65RWm6OYU9gQWZA\nP4IjvsqSK7Qgdujkv6rtaDXiCTTiiQw9eWwcTstc1xM9Q1Jwx3WYkdn/BJ+EKsOt33nfmfZBAbIn\nawtWu/rkAl+V34gZuBEzHMuQ7ZVoQ1tgWn4hOc+PsBWnoQL34oHQZbk9JycQe5uvwJUAwn0T+UA8\ncZyA2Ie9mf3RGGOUlh/v2Fa04rvso9eJQziU2eftjoINWJ/ZP4zDWsrk0OFF79XvXIxjWnQ1AzMz\n+0dxNJKXtZentmyvnAwJhHswJ1ylc2AWanEtqnEtqvE0FkYatIN4j/OJ/gGNC74gk5dvhFWOdJYt\nVo63h7QH703djMfZ8XIM680y0V1CU0LVIxfOo8naHAAleCRWv9GFneDWN36s3sKWTRTNryIouPVc\nFHq5YkOlqH2GS9OKf5mEKYplWhDlNleO+0mH4LdsN+suxFZVMTjkwBTFFlyHDXs+Jo5L6IIsSxQd\nWEJNrua4QeDWh9wz3cTgJ63ZTOXj4ODe3Kaj8XILriKC8UlRWlJFDX3vd4DnE4efiA9+v1E3+lEm\njlg5fgJCWmccxEF8C1/H/+LlwGVEVcrNRwOOMDHPBmwIVY4X1iKFN/FG5nd3dNdS7iPMeiWKEtat\nDzeyvuABFXVgLVKZ5+9m5aQ7rpEUsUSNFZYLd6K+ndWbSfwQ8/AfqMMPMS/U/bKft+BD/A4voAXN\naMQTrtdbsDL7PF6aG/x+ozosAdsh7IzTWbaY42iPUipifgg9OqQOakwmE/4kqpOhrjAU45lIxMSq\nnYdV15ETnEPtE69rdDiYqaIqk1B9VPLhFR8Fsm9kJIJc30AfxqHn47tFZw2rbllWkWVZb1qW9bZl\nWe9ZljUnfbzMsqzFlmWl0v/7sntmW5a11rKsNZZlXWyyficqejBLoFL09bjSHE5FAhekTYE/h3Lc\nG3LV5oVZqMVlqMz81mVFdD1uzOyb8BfogR6Z/bma+2UWajMrV7cw57NQi39HjRYTz1mozdAx7TOk\nKpg7m4mqCtnP4zEBQO7+6YmSzH4JehmtW1SYtqo6DOACIpoI4HQAl1iWdQ6A2wG8TEQJAC+nf8Oy\nrHEAroVI6HQJgJ9YlmXOYPsEhcX2v4JrO6wek/EFFKIQO7Adf8NfjdBQzUF14P/DbzL7xT4cF/1C\nWrfIyag7ivAFfDHn9UGtYaR4rQhFjud1mnieikTWQiUogrSxElWZ/TA+I0HrshapjInwy1jsq568\nDNnP38AsnIpR2Iu97e7nNKQPBwCU+wirbsRayi/CsipBNwA9AKwAMBnAGqSzAEIke1qT3o9Tx2oA\nF4XosjQKg4HUR4t1khtUsYyTaCZMyAaeVTBKTCTVUkjWlwez81KOhxEp5VN0JBElDEiuNvLnpz5v\n3aIqXpcUJbNyuPsNOeLUHl5v9X41cKPc9xPbLarIEZ1ZOZ7mGN4CcCqAh4noDcuyBhLRtvQlHwMY\nmN4fAuB1dvuW9DG1zJsB3AwAw4cPN1X14xOLF6PstMP46GTxs3cHhRwBgDP2j8GrPYXyukR3PVIp\nzPrph3j5292x9nNCCd+9tStUXXagkBapFNDQgBU17wNjxaGCNis0X85DmPRBKWZtrAI2LcX/TN6M\nHUXNOe8PY6Awa2MVfjbsYaAQsNqQF0+t7q1dcbDwsGP/50KuNvLnN2tjFR4d+jBauwCFbQWYVaBX\nVMXrIpOinfpxH5zV48uY0XuWr5Aj7dqTSmHWb5rR8t1KoHfvdvfzCAZNB57D7h4i+VYXH9kUO9Sr\nPOyME3QDUArgFQDjATQr5z5N/58PYAY7/ksAV3uVG3McCsrLacmFoFFrQaM3dWCym2SSUucPoen/\nA6rY3FN/PWpqiABKnQq6pAk0cDdowSPtbd8DcRyyzDGFdN5fQL0Pgu77zcTQVWznm5Auf8mlXWn8\n+6DpfyrVb1JaXU333QbqcwB0389G6i3bCU1NtOBrXenknaAFc8doLz7r+VVX04LrYYxWO7qvVlDq\nVIjnFgbJJFFFBRF8lJFM0pKLLPHdrjcf1ZgoGseRt4lD1BPfB3ArYlGVOQwZIh4rQNQzeEY2baiu\ntusxyjknSCQkk6JcSQMQNKOWKT90uV2hMV+GWueuBpIRXXGFXX6vPIgpy8tten36mKXF21ZSYpYW\nkXheNTXifxikFwpUUZG7DHktQGSZDxlPFG3iMG1VNcCyrNL0fjGAaQA+APA8kIlVMBPAovT+8wCu\ntSyru2VZIwAkAJZUOkZunHmmvT9sWMfVQ6KgALjxxtzXBUUiAbz4ItA97btRWAjMnOl9j58yFy4E\n+vWzj1HYaFg+cMop+svk9R3STsqrH0dthS5uusksLd628Xr9XxyRSADz54v/YVBbC9TUiHcqVxln\nnWXvd80tpupomJaADgbwimVZ7wBYBmAxEb0I4F4A0yzLSgGYmv4NInoPwDMAVgP4A4AaIjIfqe1E\nQSoF9O4tBmsA2LSp4+oyc6b4ANragJ9Fj83kiEQCKC0V+62twL336imziwHVXyoFTJ8OJJN2+Xv1\nxNZyxebNZssHgFGj7P2FC83TkxgwIH+0wiLIxPPgg/Z+r85tigsYnjiI6B0iOoOIJhDReCL6v+nj\nu4joQiJKENFUItrN7rmHiE4hotFE9L8m63fCoaEBePJJMVgD9v98I5UC6urs1ejBg97XR6HT0mL/\nflMTc7rHzjGB3pqU+g0NwKpVYqDVtaJMpYBZs8R/JxQ5m+NqhcVMoDn3oQu8jfv2OdPVTcfrmI5y\nna7p3z88jaB1SP8uRvhQC3HIkRMJtbXA0qVigAKAkhLv601BDpIFBWLyOhY8wU9OyBX8ITs6Lj7/\neT1l9+ghJruCgujiL4natOVLc7PgOgBg6NBoZTY0AA8LiyPMT1uM8YnOBOekYssWe9+EaJS3cQML\nW6NbhOjUl07HdJTrdM0rr9i/D2gO3aLWIf17oG3NGhxhlSOdZYuV4wqSSaKiIqFkG5g7XauxOtTU\nCOW8KSWtVCZalv2/SZMlCjcwCGtR44ZkUiiRdRgNOClv8/38m5qIunQR9IYYyCvP21hZaT+XKZqD\nZrr1ZRTluCyjulpsbuU0NYl3QfZjv37h6bnVgbcj/bsYWEWdUTkeowOQSNicRmsHq4cuuUT8/8pX\n9JctFY8jR4rfRMCCBdHLTaVsDqmwMFtpGaYsVUyRSAglckEBUFnpfq8fOMnQN240IzLqKPA2zpsH\n9Ex7qUuuzQQdr2Nhyi0tFSLkBpecHIsWifZI8ZtuEbOOdqgIO+N0lu0zzXG4rZIKCsTKpVu3/NKW\n4KaFJsyCOW3J1QBEF4TLbZFBU5PgjnjdKyKE1HYyx0wm7ZWliX4pKbHrng+Og5sXn3OOWVrJJFFx\nccdy027w4lgaG8U74MYRy+s492wS6feyHNhBIcfdDh/4o26f6YlDDkxcnMIHbRN+Al60iWzWnNv3\nd+9uhnZ1tZgcdfmLqD4chYXRxF9ODmDcv6U4Qv4Qp4GKl52vwXXyZJueTlGVk3iFT1KdbeLw+hZl\nvUeNar/Qku1sarLbpnPi8JjQooiqOnzgj7p9picOp5eisdHcgJ2LNpH9sXA9QanGBD9cZswHyoIC\nf4O8F6fU2Ji9Ytcha1bpcSe2sWPDl+s0UHXExDFwoE2vvFxfuWr7VC62s00cbgO0uohSF1qyXfya\nLhpD+bst8IiOH89xE9tneuJwAl+V6Ryw/UJ+QKY4Dv4hJJPZg4mficPjQ8qck6I+3ROvumrWwXE0\nNdl9kUxm97tuJasTpOgIEBOvLjhxHFLEl0/Fv5eIyQ/kO9Wnj1g0qAsWSYNPwKY5jjTiiSOG/YJc\ncIGZATtoXbgFjM56qB8C10n4EVV5cRzynBR/FRREs6hRoa6ao0wcaplOorB8hJzhA17UkC9BaOVj\n4pBixih6Li6udAo94sRxmBQxM8QTRwz7BeQDtknluJ+6yAHYJOfTr5/d3kpNWQZLS80MhmosrInh\nAyhmoK6Kk0m7T3SUnwsmTWRVTJli0zJh+qtCB8dB5B3skHOOUjl+HEwcsTnuiQJpnsodwAo7KAeW\nrIs0Lzx82Pv6KJDOUpYFfOtbesrk5qzbtrlfFxTSpFR6jnOv97BYtEg4Wy5aZB+TTpGffBK9fC9I\nM2PZnq1bzdLjDq081IkplJcD550n/keBjIFWU2M7gvJz8+cLGvJ7Kej8w3Lnr2EMf5AvYH29HaQv\nHx+XV13kh96jh/f1USCDHBJlD55RwD2g16/XU6bEggX2xCR9UKJATtJyQGpoAPaLnA5GPPY5GhqA\nF14Q7enWDZg92yw9vijSHXLECdLj2s3/Ighy+VI0NNj+G8X6sk6aQjxxnIiQK3wdK9ookCEvTIa+\n4NxMFGc9Dh4cUsfg7gai6GWoA1JtrT1hmx6Aamvt1fiRI8Dy5Wbp1dfbbdM9oTtBnZRN05KchqnY\nbhphOqz6MMuyXrEsa7VlWe9ZllWbPn6XZVlbLctamd4uZffMtixrrWVZayzLuthk/U4YcA/lhgY7\nGNygQWZpBTlngnYqle1lO3du8DLUc1dckf3hDh4crc4qZs60J9Io3s9u7di4EejTR0RYra8PX34u\nWvJdkxNrr15AVZXz/VFpz5ghto0bbc/xgeHDLPmmOWeOGNB1ely7IZGwJ46OjvjgB2GVI342iLDq\nZ6b3ewFIAhgH4C4AtzpcPw7A2xBRG0cAWAeg0ItGrBynbMuapibhuGZKgejHnLW6WuxLJbMus1DV\nFFd12PPjOe6n/kFNfIOAK5OjWAa5tYNb5+hS7Hs5t3F6uowTnGhzRzrTynFOU1esMj9xr6QZeEF+\ncsWjs+YcJ5FXfFt6f69lWe/DIYc4QxWAp4noMIANlmWtBfB5AK+ZrOdxD8lK19aKVZJcsfDIsSZo\nuZ3bvFnE5pErKB4OWxdtGYE3ShlO5x5/3NYRAEInMW1acDpuWLPG3o+ig3BrB+eWPvoofPm5aMn9\n1asFJwA/Mhh1AAAgAElEQVQAK1booafS/vBD0W9c9GZSf1NbK6IYy/0oSKXEN/nXv4p+am4Gnnii\n/TUNDbbeJh/6m6gIO+ME3QCUA/gQQG8IjmMTgHcAPAagb/qaOOd4FCST2R7bHeXHwVfVpswLnVLH\n6vBc5n4wgN7UsUTZZpc6/DhUcA4gHyar/Fmboic5gB498tu2MOCcRVMTUVlZ9vvkxAWqnO5xwHHk\nRTluWVYJgIUAvkNEewA8AmAkgNMhOJJ5Acu72bKs5ZZlLd+5c6f2+h63aGgwbxKpwkn+ra6cdJoX\nSnoAcPbZ2ecqKqKXr+o0SIMCm2PaNKCsTOybsDb7+c9tM2zTVlVAtqVT1PwibpBK6r597WOdIS2y\nCpkjRlpi1dUBu3cLc+ULLgCqq531TrJ98rnF5riAZVldISaNJ4notwBARNuJqJWI2gD8AkIcBQBb\nAfA3Ymj6WBaI6OdEdBYRnTXgeEghmS/U1gJTpti/85HISZosTp9uTx5ysJUfgs56cHpTp9rHCwqE\nj0RU1NdnZ+gzITbQbW3GJ+9p08xmk1MhUwQDwPbtZmhIy7G777afRz7S4gaFFJ+WlQlDgXnzhKHC\n0aPeVmCyfXLCOA5EVaatqiwIcdP7RPQgO86XdVcCeDe9/zyAay3L6m5Z1ggACQCa8oGeoOCDRiIB\njBuXX/q1tWKlv2qVbe8uV6Fy4tBpjsvp8TzNhYV6rF8SiexVdND8z7mstmbMsFPT6uoX7m/AU6ya\nzgC4eLHItSL9UkxaOgHC3FcuSkzo71QEtRKU7+bu3cKnaNo04PLLxbmNG91zckg6urlbkwgr4/Kz\nAfgHAAShy1iZ3i4F8GsAq9LHnwcwmN1zJ4Q11RoA/5iLxmdex6FavJjM9+AGlwxj2q2qVHpcX6Az\nrpTMoFdYGLzMIFZbukKC8P7nsaomT9ZTvhtUqzbTegceetyEfkiF17N0g9O3UF0tdEFuWQDV98J0\nPo40EMeq+gxDfTGbmvIXq0hNi6l+NDJyatSPXI0Ey/9LE8ao5qe87v37i/L6949Wjnq8ulrEW5KT\nnY6Uuio9rtz3M5BHSY/a1CSU8bI9JgMPdkQ+jqipY/3eL98N2Y9hY8wFrG88cXzWwVcsMp91lI/L\nb/5l1d5d9eOQkWujBjnkmfT4f25BFHXicEoO1b27Pi7Gye9hzBh95cpVcdCw6mFW1Rz5sKoiap9r\nxDQ3RRR94gjSt7x9YRcUAZ9llInDsBA0Rl4g7c6XLcv2Rpay9KCQMnNAKO3cjrnZuzc3i2uljiNq\nkENZdlWVkB3L/7/7XfZ1M2dGp9HcLMJnAKLec+a0t7uPUv5zz9nHdCh4VR+LESNsvwoZADLI/UHB\nfWnyaelkShHP4fTOB0HYvuV+RPmgFwZhZ5zOssUcB4Mudt4vx5HrfikyMyVW4DJvXV6+yaTgNEz5\ncfAVuolESzzDYD4SOfHUsVFT93ohmczOvWI6v7mkGYXjCEpLtk1nBkAPoLP7ccTII0aPFpFKgdzh\noN2sRpwieXpF91TLSSTEqkd6sHP7e52YNs2OldS7t56VViIBlJbav0mzpcu3vhXO2syvhU9QK7Co\n2LLF3r/xRnN0EgngoYfs3xs26C3fVJw1v+UmErZZ83FgjptzZgHwAoTlk+MWdsbStcUcB4NqnZFr\nxRlVvu1VDpfZFhZGK98L0oLMsvStDPOVyCmI7sfvs8q35ZGpnONO4O+U7rZ5xeMK+n1wTsVPGfw6\nueUBMKzjeCD9/yoAgwBIge91APIgaIzhG1Ln8OST4rfUP3hdz/9HoSv/y7g769bZ5016wpaUiHYS\nCbphZNEqeGytKHoTFWp8rb17/d/r91nxnCQm86BIcK6JJ8AyAa6z0902r3hcQb8PqRuRMbauuMK7\nDK5LAU4MjkNucJidnI7le4s5DgXJpIgNpcOaSS3Xj7xXrpy4PNqkrF1yB926+ec4crVlzBi77rqi\no0q61dXRn49X/Xl0ZJ26JTeaEyfafRUldaxq2u0Ebmqso228TW6+SGG4WHmv1DfKXONeZtqqhaAO\neLUhmaRyYAeFHHeDTBzvAxjJfo+A8AiPJ47OBF1hu1X4Zdvly6r7I3cD9+Pw+5HnagsPFDl3rr66\nctpRJlSv+nNxTj6evy6FtZ9Q5kFNjf3S5KbkkrYOMS7PWe5WnpOZti7Rbg5n1EligjI+cVwCEd32\nVQB/ArARwMVhCeva4olDAX8B87Hi9Lq+Z08ybt8vnaaCfOS52sLl9n366KurpC05grByeq/6m5o4\n3Ghyq6ooDo1+OI5zzrFp6YiKYIrjyEXL6Ti3htPlOd4ZOA4Sk0d3ABPTW/ewRHVu8cShwBTHEQZT\nplBkEUYuSNNZHSFH5IfGJw6djmaqqMqECM+UqMoNfMAzHamAL4pOP90sLaL8muNyo4bjIORITq2l\nZVkXpP9fBeAyAKekt8vSx2LkC7lM+1IpYZYqFZZRHe+i1keahpowEZW0TzpJ/B4xInqQQ6mk3L3b\nPibL14GGBmG4IJXIJoIQLlhgm0GbDqueSgFvvGH//vhjMzTkO8aDKJpIp7B4MTBhgvifSokAhQ8/\nLJxAw9RX7i9e7Pyd8PM332wfl+banRh+3twvA/gjgEqHcwTgt1prFMMduTxZ5cAkEcRqx0R9JH0T\n9VAtUaS3dBTU1gJLl2ZbPvFIuTrKf+IJoKVFX5kqeF+bjiDb0JDtwa0r06NKQz7nTz+1j5uYFOvq\nxLOvqwPOOy9cTnj1vXz4YeCpp8RiZOlSYOFCe4Ejr126NPv9bWuL1Iy8ICyrom4AZuoqK8j2mRJV\n5WKdo/gJmKiPVDLr0HG4yaCl2CdsYDgVPOJuEEstv2hstMs34ffARZWmvauTyWyrKhOiKv7c5861\naZnQm3Flth+dS6768u9RZgLkejh5bWNjtkGG6e82DXSGIIcAVjgcGwbgFQCrAbwHoDZ9vAzAYgCp\n9P++7J7ZANZChFXPqXz/TE0cuZBMioFDDkwmzWD5R+Z0btQoe1DXIWvngQ7lh9zYaH9sBQXOdfEL\n+RFz5bKJiYOXX1enp0w+WHFrNtOBAJNJopISs4O5Gy3T+hRdkN9JY6P7Ikt1/tMRNdkHOsvE8XeH\nY4MBnJne7wUgCWAcgPsA3J4+fjuAH6b3xwF4G0IJPwIiL0ehF9144mBQI4jqzDmurvh5pFoVap4G\nXTb3sly5auORgN3q4hc8Oi4vU6fnOJFtMACIHNo6wCdVvnI1nY9FHfBMKuPV55KvXDNRIevt9R6p\nMeaEmaxxRJk4dLr0UrsDRNuIaEV6fy+EL8gQAFUAFqQvWwDgn9L7VQCeJqLDRLQBgvP4PGKEg06P\nbZ5lDhBpMSsqnNO1ypSZOpFICPlwTY3thTtpUnu6YSHzPtfXZ8eq0q2f4Tnhqd0nEw48KyKHrpS9\nbkYQtbXZCuviYjP0Uingr3/NvqaoSA8tv3WIimXL3MuSceCOI+icODz95C3LKgdwBoA3AAwkom3p\nUx8DkG/fEAA81vSW9LEYflBfnz1g68w9LQdWOWhPmwa88474r2LaNODZZ+3ffsJ7+wH/wGbMAHr2\ntC2Tok6SbkEcdQ3uEvPn28HsPvc5feVOmABUVmYrjQcN0lO2mpqWpyrmlmHTp+unJ3/rMH6IUgcV\nuVIEy3NTp4qgo8mkt3XWggX2fpgJ2FSQRjeEZVXUDcB8j3MlAN4CcFX6d7Ny/lNZBoAZ7PgvAVzt\nUN7NAJYDWD58+HCNzNsJgA6wB3cEFy3orocqIpGbrrDeMnAioDesuhT3ScdIXbJsNdGV3EpK9JTv\nFbSPi8Z0tcfJEEIV5ZgO4JjL8COHV3bmHH8mXuIqVRQXFCE83WFSxwHgFq/Nx/1dAbzEr4VQfA9O\n7w8GsCa9PxvAbHbdSwDO9So/1nEo4LGqevc2T8vt4+KOYbrrwS1euMI0yiAv29LUZC46rvy4Zfk6\nMgASZdedD+QmrKrUZ246vwiny9uWj1wjuerj4ZWdOScNRa64wtvQImo+jhDOiqYnjnqvLce9FoBf\nAfiRcvx+ZCvH70vvn4Zs5fh6xMrxYMjXh0xkr5JGjWr/wpryYFc/Ss7VRLGqkoM6X9l26RKtTBWN\njUKhL+NrmVDwcqsqkx77Evn0VA/zTuXT+9uJdhCTXrmg+CyZ4zoWDvwDhNL8HQAr09ulAPoBeBnC\nHHcJgDJ2z50Q1lRrAPxjLhrxxKGAh2Xo2tXfPWE/Ls5eSxZZlsXjCpkKtqeKL6IEpHMKzqib41BF\nSSYGCFOxytzArcRMmuMShQucGUSEozteFRepcjNyN8hc97p8knIgysSR03PcsqyHvM4T0bc9zv0F\n7krzC13uuQfAPbnqFcMFEybYikQZeiIXwuZWrq+396XSXJbFLXp0WdtwOrW1It8B9+6tqgpfrsxa\nePnl0ernhXnzgMsus0OOmAgJU1FhP/+wOeeDYPlye990znGebdAvguTUUL8DHTnHm5uFRdWqVbnz\nxcj3wnReEx3INbMAmOm1hZ2xdG0xx6GA6zj8rmj9rLT8rr4ke87l0f/6r+Ha4qfeXJFdWRmtLJUj\niJpV0KnP+ApdN8chHUBNcjQqPa4PMs1xcM9xE9yU6vUdxnPcqczqavFc3MqS10TRcYQA8imqAtAj\nLDETWzxxMOiKvho1jaZqIaLzQ1AHYz5wRQnhwS2TeKh2HXoT3md8YNdlGeTk9Q7odQB1gmrdZnri\n0J2Pwws68nHwclTnVadr+IIlD8jLxAHgXIjQIR+mf08E8JOwhHVt8cTBoL6AOvM9BJH3qgOYH11L\nrnwF8rj6QUvFLCBk4GHB6fDJKIo3uhqWJZk0M/jJPqmszO4P0xNHMplNz2QYkGRSvM+SlinPcW6h\npkOp7qe8piZbv3ECThxvQMSe+js79m5Ywrq2eOJgkByH/JjzFPPGsR41NcIMFyDq2zf3PbkypKnK\nd/kB8gx0UURVHDIfh2UJS6iwUOuuTqi6Jg43jiMfJqs8yKHJMCDqosjUJKWL05Dws+By8kvKA6JM\nHIHcbYlos3LIp/Y1hnEsXiw8d6dOtZXi+/d3TF0SCeCss2zlrB/PcdUz3e049/BOpbLbaHkGL/AP\nqbQmApYsCV+OW5skdIUhl30ycybQo4eeMv0glQLWrrV/S4/4sGWpns/8WG0tMIQFkdCZ+4PnxWhu\nBqqrxfs7erQwuAjqjc3rLRXs55wjyneC2jYTeVp0w+8MA+C/AXwBwAoIp75bIeJKxRxHZ4Aavlnn\nyiWMWSIPQGhKZKKu1KKazsp2cvGLLi5Gls9NiHV6Pzsp9016V+um50evZkI/xOlwPQRvW9B3gNc7\nmbS/SS+xJ/9eTjBRVX8ATwLYDmAHgCfA/C86avtMThxOA7mUp9fV2cpdXZ7JYdh3U34cHNyCLGpY\ndSJb1MOV41EmI9VKp6ZGWAbJ8nWKW+Qz6tPHbGpalV5RkZ7n7EevZtqptLHR1klFyZui6jV4uW7X\n1tXZbfPrfxUR+Zo4FiA7b0ZfAI+FJaxr+0xOHF4DOV/R6hqYwnAc3OzUpNJU5hzXwXGoOoLCQn1W\nVXKfc4Q6rZA4B5APT275TvD87KYTR5lOUuX0vKLQ4qH6c+Xi4BxOnkLGR5k4ggjTJhBRJncjEX1q\nWdYZUcRkMUIglbLlsE6y86FDbae4997TQzNM2GcePnz9ej31cILONJv19cBHHwGvvCJ+t7YCixY5\nRwD2A9X5rLlZ6ARknm6d6U8TCeFgeO212TnTTUG+E889Zx/brKpANYPrsEzQkg57zc1CXyTT/Ibt\nT/7c3RwJ5TVnnQXceKPY7yjdZAAEUY4XWJbVV/6wLKsM/nKWx9AJmVe8tLR9CHAAGDzY3tedEyMI\n5s8X4aQBYORIc3R4OPWpU6OVlUgA48Zll+3H49irPKnITyTEM5OThgksWiQGucJCczRUDB1q70dR\njudCKgV88IH920TOcfmMnnxS9OWzzwpP/LC5MuTzr693N5KQ13AP/BNMOX49gA8A/L/09gGAr4Zl\ndXRtnzlRVS6xEY8iGsWvISqSSTt8uEnHsH79bBY/is+FBPcXKC7WGxxPmktL8ZpuUZIqPjLtkEdk\nv2u6A0KqkCIdGSnAVNu8UiKbRDJpB788DmJV+eY4iOhXAK6CUI5vh8it8WvdE1mMHHBLOATY5n+9\neonfu3aZrYtX8piGBpvl1rU65GaTTnRvuSU6jUQCGDVK7B88CNTVRS+Tl/3EE/bz8RtLLEj5tbV2\nf+tM5JUL3bsD5eXB7/ObgEiaNp96qvhtqm2LFom4UosW6SkvSIIlsUC3/3diBOKJiGg1hPd4p8bR\no0exZcsWHDp0qKOrkl/s3i2C9PFAfe+/3+6yoqIiDB06FF2jiha8gsCddZa9r2vikPSWLhUf94cf\nZk+OS5YAN9wQnQ7/yNV0rDrK/jStKjQhy25osPuEB4A0hdGjhT5r/34xyT7/fLD7/QYSlAsmGTyT\n+4/oRJCgiH7gt31z5py4E0dQWJb1GIDLAewgovHpY3cB+BqAnenL7iCi36fPzQbwbxCOhd8mopfC\n0N2yZQt69eqF8vJyWLqcwo4HHDok5MByoLYsYOzYrEuICLt27cKWLVswYsSI9mVIrqW21pmr4fD6\nyB580N7XlTpW0qmqEivC3/0u+3zYaLBqm7t1s+scVj+jlrl4sRhYR4ywOQ3dznqplJhMJXQaDrjR\nW7PG/r1iRfAyggzUixcLLhAwn3N840b7+QH+vwkVXu2T70hVlYigK2H6uaVplwPDQ98fVsblZwNw\nHoAzwUKTALgLwK0O145DdhKndciRxIlcdByrV6+mtra20LK/4xotLUTLlolt+XLHS9ra2mj16tXO\n9+sKudDYaOseTIU+4Sa/UZz11DZzE9+wWQXVMqW5ZXm5OXNZ1SHSdHRclZ7p/B8mgxyqYVu4M2CU\nb8JLJ+lkiivNqU2jpoYmCQfhUGO7UY6DiJZallXu8/IqCE/0wwA2WJa1FsDnAbwWhvZnitPg6NZN\nWAO1tblaZ3j2jS5WnYfqkKtEXZArNbUdvXuHK8+rzWHFBmqZ8+bZHIfMl6EbtbViVS5FVCbyfaj0\nfvUrYO9es3ScoIuLlZAipepqoUuRXC1/J8J8E1y8unBhNsfCOei6Olssqlv35YTaWux6+OGduS90\nQdgZx+8GoBztOY5NEFkBH0PaqRDAfAAz2HW/BHC1S5k3A1gOYPnw4cPbTaauq+nPAjZtsjmOd991\nvcx4H3FnOl0e7BJOaV51eI5LcEst3SFHKitt6xkTVlWVlbbHcz6CHPIcGSatuJJJosmTzXMculPM\nNjXZTp9uHEsymc095yk4KfIV5FATHgEwEsDpALYBmBe0ACL6ORGdRURnDRgwQHf9tKCEZ8BL4667\n7sIDDzwAALjhhhswYsQInH766Tj99NPx0EOeiRZz49AhId8uLbWPmV5xeoH7VEhlsA5IOX55ufAh\nkH4cbW3Aj3+sj45EWC7GCQ0NwAsv2DJsHUYDakC9F17Ir3K1sdHeN5kBsKEh2wdGt6+Dl7WiXzhZ\n/S1YIIxWRo1y51gaGmynUyA/Oo6IyLunCRFtl/uWZf0CwIvpn1shwrZLDE0fO2Fx//334+qrr9ZT\n2EcfiRe0IycLjrlz7X1dUWABe3AEhMiHOwByRW0YyMFX1rewMDs9blRIz+Tf/EYM7jr6hVvt1NYK\ngwEpCsuHVWFZmb1v0nNcbVtnhGr1B9gGG6NHu09KtbXA6tX25HGCeY5rgWVZzLUZVwJ4N73/PIBr\nLcvqblnWCAAJAG/mpVKtrcAPfgBUVor/nWzGb2pqwrnnnoszzzwTX/nKV7Bv3z60tLRg9OjRWJMe\nLK+rrcUv/ud/gH378Ie//Q1nzpiBidddhwsvdEztbh7co7iiQl+5tbVCDl1ZKf5zjmD27Ghlyw//\nyBHxu1evaCtQFYmEmIh06t946PZEQuSc5/RMgHM5OsObeyGRAH7+8/x6xQfxwQDsZzFvnv1M5LPO\n9cw/+cTez2cbwyKsjMvPBuApCHHUUQBbIExtfw1gFYSO43kAg9n1d0JYU60B8I9+aLhZVQXCPfdk\nWzXcc0+w+x3Q0yFQWX19Pd1///1ERDRz5kwqLy+niRMn0sSJE+mdd95xLGfnzp30pS99ifbt20dE\nRPfeey/NmTOHiIiampronHPOoaeeeoouvvBCohUraEdTEw096SRa/9xzRG+9Rbt27XIsN7SOw4/n\nek1Ntsx2ypRwtHLVoakpOytcVEswNex5mHDwufK3c92PCR0EDwRoSufAg/fx3PKmghw6vVNR2xYk\nwVKQ90rNVS6fR3m5O60wGTM1AHkKchhmUrrO4fAvPa6/B8A95mrkgtde8/5tCH5EVa+//jpWr16N\nL37xiwCAI0eO4NxzzwUATJs2Dc8++yxqamrw9u9/D7S24vVVq3DeGWdgRDoxTBkXJehALocmeV56\nXwPAO++YqcPSpbbFVkGBsE6JgkQCOPvsbKukxYuDBTlU+0f9zX1NTFjPcA7MlKiKB+/jwSw3bDBD\nT/Yhj70WVZ/ixzGvqkq8Y0HeKxlLDhD6Rvk8pF9I2LhXnQzHQTStPODcc4EXX8z+3UlARJg2bRqe\neuqp7BOHDqHt44/x/nvvoUePHvi0rQ1De/YUoR8kTIjccpnrSjn+Rx/ZA3Bzc3h6qiOdjA5cWSnO\nS1lyW5tQRIaNZCvpzJxp6yAAIaoIojtR+0f9z+XXJsxYp061By7dJqsSUpGcSgF//autdwjrgJkL\nsu+GDrXFkVH1KX7Mznn4Ea/3ir+j8v3fs8d2xpwyRUyqmzeLa1UR4syZgobUeR09Gr5d+UJYVqWz\nbFpEVa2tQjx1+eXif2trsPsd4EdU9eyzz+YsZ8eOHTRs2DBKpVJERLRv3z5as2YN0aZN9EBtLX3t\nuuto6dKlNKmigo689hrtWLzYFlWtWKFfVOUHOh3RVHGB6jTFky5FMZ3ldGQiJN3muETZTmwmHPS4\nqC0f5rhcfGTaAZC/V6ZpEfk30VXfUTU7It93Enup38txkAEw5jgAIea44w6tRR44cABDmYL4lpAB\n+AYMGIDHH38c1113HQ6nLabuvvtuUHk5Hn3xRbz52mvoNWAAzjv/fNy9YAHm/Nu/4ed33IGrvvc9\ntBHhpM99Dovdch2bQlUV8Pjj9uo6irKPrwx5LpKpU4Xl1tq1NncQxXSWO2M98oh9/FvfCl+mEyZM\nsFfoJpSgo0fbnF4+HMl4PKxBg8zS4vHP8gG/eWhU7qWhQXAqo0YJ0efUqSIWFeAs9qqqynbcPJHC\nqnfWTQvHcaJg3Trb+c8j5AiR4T5SlX268kPzlZ1KA4juAOiURzuqwl1FU5Nddo8eessmEm2QXFiX\nLvrLV8GV4yYz1yWT2Xm58xEy3qsufgxE5HnOUfjhOMIYZYQAYo4jBgDg5JOFXFsqRWUipY6GrmB+\nfGUnV3AcUbL1AfZK0bLEJ1xcrC9KqsSCBfa+CXPZjRvFivXoUTsEuUmMGmUryB2cXrWhoUFk4+N0\nOwq5FOsqpyL1HnJfhRoqprhYb30NoCM8x2O4YPLkyRlPcrmtihLWW/ojmISTrXt9vV7fDQnu3Vtf\nL0RW3AGQ+46EQVWVsNyRoq8ePcz5QgDAli36y5w1y1au6vTYdwP3T3CKtqwLVVVmJyYnuPlxcN8Z\nP5B5WJ54wvl9SiSEcY7sy+PAATDmODoR3oiaVnTHjmwTTDkAmoTT6kvmv774YlEHE2ahMs0ntxy7\n917g9tvDl7loUfaq1oRVErd64rR0geecN2XlxMHzyZt0BuRWR0B+co24cRZ+dR9BkEjYnG4+dFMR\nEXMcJxJOOimbzS3Iw+N1W30tWmRPXKZEJrW12ebHkyZFL49nsWtr8+817Bc8FIuJfunZ097Xne/D\nCTxfiUmOQ302JnKOO9EMwllEQSplf6/8GXZSxBPHiYQjR7I/qHxMHE7B4aT1k8wwuGmTfrrSdl7q\ncbp1A37602hlJhJA2tESgHAA1Jk6FhBWTxImREncsiwf1jmDWQQh3eHzOdRwKvmYOHQEPgT8hS5p\naLDblA8Rc0TEE8eJhM2bs52H8sHyOn0U0ntW1sWESGbOHCFGkE50x47p0UfMnJktt9edOnbePNsM\n14QoiUclzgc4vXffdb8uKlIp4K237N/HweCa+Tbkuzp9uvvkweNadbJYeU6IJ44TCcOGZVtS5SOZ\nlZQDNzTYx6qqhHJc2vWbFGHwNuoQK3ERG6BXyZ9KiUFEThwmREk8ZW8+8J//ae+PH2+OTkNDdniT\nm282R0sX5LexbJmwAlu1Kvs74ZApi4H8SAoiovPX8DiFznwcl156KZpzhO04/7zzsPyPfxSWJzIf\nM5f/m4KTHHjBAvGRSG7AxAqqvl7QlfGL2trcP0q/kCI2ObB36SI4BF2QnJhcLZsQJc2bZ5ebD1EV\nfy9POskcndpaIB1/DYAdWr+zgXPgtbVi4ZFMCkfAXPoSOWbk23osBIxOHJZlPWZZ1g7Lst5lx8os\ny1psWVYq/b8vOzfbsqy1lmWtsSzrYpN16wy4//77sXLlSqxcuRLf/va3Xa/7/e9/j1KeoMkJR48K\n0cfu3fbA1NFyYFkPHpxON/ikFNWzWA7sskzdKz9VwRt1YFfFhKmU4Jik0r1//2jl+8Hkyfb+9u3u\n1+lAvhX/YcA58EQCuOUWkbNk6tTc+hIpWjYhYg4aIj4HTHMcjwO4RDl2O4CXiSgB4OX0b1iWNQ7A\ntQBOS9/zE8uy8hKYvhWtuA8/wHRU4j78AG3oXDLG8vJyfPLJJ9i4cSPGjh2Lr33tazjttNNw0UUX\n4aBUSHbtimdffRWfnzkTo668En/++987rsL19YI1lzqOtWv105AfKNcTRBXTyBWiFFUdORKdi+FQ\nle9RB3ZVTCh/y6CMJvpdxe232xPs6tXm6DQ0iJW7FE3qtnbTBSmmraoSdayrE4u5urrcdZbvsgnd\nl7d7BowAACAASURBVJNIOQKMThxEtBTAbuVwFQDpPrsAwD+x408T0WEi2gBgLYDPm6yfxDz8EPW4\nE7/Hi6jHnXgA9+aDLG677bbAjn6pVAo1NTV47733UFpaioULF4oTBQU41rMn3nzmGfyorg5zfvEL\n80o2r1UMd2LSzXrzCLlcxxFFrCSttP7lX+xjOkK1SyxeLKyCxo+3B1ruAxEGqphQcjRy4pMiS5OY\nP99+z8aNM0fnrLPEe5TPtoVZpfOIug0NYtIoKBD/vZTjqZTdNhMxzDSbFneEjmMgEW1L738MYGB6\nfwgAHit5S/qYcbyB1zx/mwIXVVX4VMJKvQgATJo0CRtZKs2rzj8fOHgQp42egNS2T7CehgfzvQv6\noUhrETX8R1qRmSo4FTN6/AozevwaqXUB6pELUqS0cSNAhMVdLsSE3u9gcZeI4UYefhi491673kWP\nI3X/Qj11rqsTA8r99yOFkaL80mei9YsqJkz/z9QfP9Pb70544w3R/73exuI+miZZJzz4ILBvn00r\nyrP2g1RKDPRBV+l8gJYcbFubEFd5KccbGpCyThHPrfvj+p+bLtPiNDrUc5yIyLKswO7NlmXdDOBm\nABg+fHjkekzGufg9Xsz63VnRnSm8CwsLbVEVgO6DBgEnnYSd+/riaGsbdlv90PoRkBjpVJID/CS3\n8YN0fug5O76PJ7t9FfgU2Pt/gEW/CV9ku/IBoKoKqStvxfTChdhb0Ac3fwfY8HbEMlevRsPrV4l6\nA9i2ciRejl5jwQ3V1QG33II5t/fFk4ergL2a+wUAJkzAv++YhVe6TQNagdKfAvPv11i+igMHcGPx\nY9haOBw3bhgDA0FUBObNA6ZPx41I06KHzNEC7LhlFRXBVumqV/nChaKsqirBhXjksalrvAgvdLsC\nALDnP4Hnn3K+tDOgIyaO7ZZlDSaiben84zvSx7cC4Gm9hqaPtQMR/RzAzwHgrLPOihxX41ahZsEb\neA2TcW7m93GH7t2B4cNx7CNbSXk4CMfhJ7kNR329CPuhXp9IAE1N2Hv5nszTfWtlgHrkAvs4Gy5/\nAXtfEsr35ijuIiw5UdWs/8ZP3mwFoRBvFEzOfa8fTJuWyYS457f7gD+Lw2/pVkU98ADePGcY0AZY\nFqHqUsMm2Q89hN3f7QcA+Hh3N6TWAYlTDNCZNg146y3snjwQIODjY/3M0QKyv4Uoq3Q+kXgF4Ewk\nsGLAyUD6HV4RdgGUJ3SEqOp5ADPT+zMBLGLHr7Usq7tlWSMAJAC8mY8KFaAA38MdWIgX8D3cgQIN\n3SLzccjtwTza1x86astI+/ULcGNQdtbr+kQC24dHDAHiA7X32Bxniw6dYiKBRRNmgyD6sM3Sv7ba\ncdDW+XTVHcA4kUDXEiH/J7KwwPSq9YYb0KVEWDu1tgJzfmiQViKBLj0Fx93aVmCclk7Rjh8cKrSt\nxg4dzhvZUDDKcViW9RSA8wH0tyxrC4B6APcCeMayrH8DsAnANQBARO9ZlvUMgNUAjgGoIaLOH+3L\nBW05FNOPP/6477KkHqN///54l3nn3nrrrZn9V199NbNfVtYfz/9O3LPzE2DwQHQIVn9g748yFK6K\nrzh1xXSs/Trw8KNi/6CB+Iy8X+q/p7/8w3l2qj5gMNJIR9LKN7g9icnoLTpg2qrqOiIaTERdiWgo\nEf2SiHYR0YVElCCiqUS0m11/DxGdQkSjieh/TdbtRMYAZuWZjwC5bhhZbu+fbDA5nG5/t8QptuGT\nCSfekcyRfsmf9JffM+3iYAGY+mX95asoSqvdunQB6v/DLK0e6RieXbuap5VvlDA3FVMLLV2Iw6p3\nIkyePDmTHlbi17/+tW+LK4kdO+39jszlxEVH48eaoZFaZ/s5trZCi9w7tc62MDVh0cyjj28z4DMn\nFwsEYO5/ATdU66fBIbmyfPibchGOMf1GB4Ev8rYZjFCvA3HIkU6EN954I2OeG9RMl4M7nnZkLLiB\nLALF/T82Q4PLuYmAhogBctUyTWDPXnt//Qb95R9g4rWyMv3lq+CTq47+94J8t48eBRa/YpZWvrGf\npX/h70hnRDxxnIAo5E+1A0VV23fY+7d9Kz80a7+utzwTvli9e9n7Iw3Ef+zBfOM+NhwFBMiOzae7\n/1X0Zr6kdf/pft3xiOMhoopEPHGcgCjqJCmLR3zO3n/3fTM07v7n9fjxgTtwaut6FBToEV9840t2\nmSZ0RKcxsV3PoANEaj0w6w7x3wVdutr7FQaduSV6pQe8vn3Mi4+6pyfFwkJg3t0GCfnoZ90Ya9nv\nXT7iU0ZBJ69eDF84dFgoNk4aABR1Rw/rMPq27cR2awCoIECE3NR6YE46bEd9nX/PQX7fzGuABc8A\nH23HU8s+xGuHxuK24noAfr0QA9S14VGUv5/CrCN/wdC2rfj6oAW578tRHmpvQte58zDryG/Rh1pw\n2+CHI5WV6UPZR3v2Yc77JfiwtQ5rC0dmcR++0PAo8HAjsPhPwNmni+cEZMpG7xJMPXoNzjnwEn7Z\n9ybMuztiv8t6f7Qd2PAhMGEs8EC9aFe6nZfgYpxz4CUsHHgTAj9n3leyfVUXi3co3Z5MGxsexVjr\nJmzHSAwbAkybErI9ksail+z/8lnJ8x9uBV5oEs5BT0R4B9TyPc7/32PzMCX93n2/KCLNBc+IY/V1\nwMbNQN1dwLy7gGmarCWI6LjeJk2aRCpWr17d7li+UVBQQBMnTqTTTjuNrr76atq/fz+df/759Ic/\n/CHruv/6r/+ir3/969GIbdpMtGyl+H/wELWu+oBo2Ur65O+bqaXF+RbHPqqZTYRBYquZnX0uuU4c\nS67zvq/ifHs/vb00bjYl10ZroivNPgkiDKL1RV+gpj9qKK9mNu298HoiDKLFfa4PVyYri4hEnyn9\n8tK42VT9NQreL2pZNbOz+x+DaM8IcX5T1ezc5fltC9/ke5CuRyR6vK/kvvoOsXNbL/wm/WbQbPrL\nrxzewyD0JA35Xz4reX7UF8T/6m/qo8O/nepvZtOpmU0tVeLYc2XfjPbeqe8HrwMDgOUUctyNOQ5D\nKC4uxsqVwl26uroaP/3pT3Hdddfh6aefxsUX2xHjn376adx3333RiJ00wP6/YycKDh0CiovQ75QB\nQJBYcLU32e7XcgUoIVe6S18DFv4ye/XE75Mcx5KlwPZPgCGDcNFzNwG6RRiyfkXdgfmPYUT9tRgR\nZgXKy2tuAZpbUNImDOqn9n0fGLoegVfRVReLfqpKP+eGR4FVHwCjRgKjhZ3lRb1bcNFt64FTApad\nGCn6X3J4sh+aWzIr9F4zrwEWvYTh6jMMA9kvr/4N2PoxUD5MHGNt6jV6OPCFcRheH4KerCOvq8px\nsHMnN7fgupcbgTcAfPUH4ek5cRzqeblqT63PzX2rnFNzC1B9lfge6u4SfdXwKDBfqfPoUwUXUHsT\nem/cDKxfjap51wBh3mWnutfeJH5LjkMXws44nWXrrBxHz549M/uPPPIIfeMb36Bdu3bRgAED6PDh\nw0REtGHDBho2bBi1tbWFI3LwUIbL8DzmgMB9xFe6KjfihEqxaqfK64PRCQqX1VQoqKtNv211K4dz\nHNXfFJvk3MKW3VHgHKdsT+X1WSvmvNfDNJ0g77sT58Sfv1pnp2Mm3gsP2sXAKgo57nb4wB910zFx\nHDtGdM8DRJf/s/jf2hrodkfIiePo0aN0xRVX0E9+8hMiIrrsssvoueeeIyKiuXPnUl1dXXgiiogq\n89/H5BFqcvX6AJpezT7HWXHdHzkfiBufEjSuuD46HdmWqGXmGhSCDn7qoO12r65BNVc5ucQwYcrO\n14TglyZvo586Nb0qrm18yp5Uq7/Z/rsIWx+/UMuQ3yEXuaWPnYLCXRRPHDaCDor3PECEUnu754FA\ntztC6jgmTpxIs2bNynAZTzzxBF177bVERDRx4kRavnx5eCJ8guCTCN93gTauTB1EnOT6ulejqi5G\n90otXyu/MPXxqpuueucqhy8WOCcVpeyO4MK8aAZ9Xm66kyDfgI6JQ22ToYkj1nEAeG2Z9+8w4DoO\njqqqKnz3u9/FihUrcODAAUya5CMQoGI1lUFRd2D4ULFf2gfYu1/8l4b1UvehE6rF0FkTgLJS4F+u\nArZsE8flNbf8O/Dgz2xZf1R6gG01ImX6q5PC2ueKi9rrZcK0J7VelNmnt2ibDjhZWQUB079g5jX2\nMV627JPqq8L1g1oWp6EiMdLWc+zdBzzfJI77sT5y0mkA7fVCvD7y2jAWfyp4mbwuqmWg1H84PTen\n5ymf0UfbhS7rX64CGp8GBvQTz6TqYmHeq1racSsoqUf6cCvwfAgLwdT69u9AfZ0YE/h3OfMaoLQP\nPnp47kfhOhExx0FkhuPgOg4V11xzDU2cOJG+//3v+yssFwdx8BDRu8KSitZt1K/j4CshdUXjpGNw\nsBgJRYvIe7WtWnMFgaQj6+pUZtkYPaITHavpXCt1/hyCiEfC1lNeW342adFlOdHmx+RzikorVz/y\nviwb0/5azkmXjRF97VSGqifzap9qRVZ+tt62uZxHbFUVDbd/V/x/bRlw7tn2b1O47rrrcOWVV+Lp\np5/2dwO3mnLiPnbsFAGDirqL8KGHDgO7m4GRn0NgRwGn1ZS0qAKyLTdm3SG4irk/BkYMF74FC54B\n/ppm2fYfEKuvIBwHpzX/B84rVF6H/34B2N0iVnhBIOlUXwXU3Ni+zJ27RR86WcIEhdqGMByIWsbi\nPwlLmVv+XdRfWs6s+kA8l+T64D4I6gq84VHBdT34M0Fn+Tt2neUKWz7r3hHTA7s9Z8lpsRwzWPq6\nP0snJ6icDW/nqJHA0MHChfvwEdGHFWOy6yStybp3E+9H3V3AO6/Y9V38J3Hf6FPFtmatoFU+zKY/\n645sDlG1RBwRMjkd7y/puFg3B1j1vvC/mfWv7c+HRdgZJ+oGYCOAVQBWIj3zASgDsBhAKv2/b65y\nOqtVlXZIPUZqvc1ZyOPrNtrbspVEy98W/9/9wLU41z7iMlE3xTdR9mpJ3sNXTn1GZa+4/CKXnNeJ\n+wnDcUhlplwxyvKuuN4us9uw7BVlEKjt4L+d5M5B4cTpybIlFxCmfJUTk6vutL9MlrGD7H915a0T\nkkbJKeJ/12HRuDdV6e2mm6j+Zvt3nxtlzH1ItLvxqezycz1nN50gkR5LRDcuzYHzwXHMcUwhok/Y\n79sBvExE91qWdXv69wkWPDkkduwEduzK1nEcOgys2yi4jZP6CVnmwUNC57C7GRh2cjSa6uqfo/Ym\nsXJb9QEwYZy96pWr3fPOsVehQWTuiZHeK/w584AnfyvkwL1LgClfBLZuC26jvuglUfdFLwk7etnW\nUWwVe+SofT4o1L7jfjAyzshHEQJJzburvW2+XIFv3CzaIT2uw9RbcmKS4xjQH/jjX8SzlVwYfwfC\n9lMucBoAcMkUYPiQ8HocXh7Xn7h5ks+6w36OgHj3am4U+rzdzYILu+Fa+3yu91elx9shv5co3Bvn\n3KTOpqQncMEX23NzD88NTaajJw4VVRCJnwBgAYBX8VmfOKRoqkcPoHg/MHCAHTrzo49tEdWxVuCT\n3eL30aPAaaPD0Zt5DfDOalsJ66WwlIPXzGvEoJFaL8JgyFAYYRWYfrBmrRjERo0U9MqH5b6HQxWN\nSCX/jdcCf1tus/dhByhVJFJ1MfDU/4gBSyb52LotXNmA6G8pIpFIrRcKViA71KpfcOUqf343XJut\nPJZ9snGzGDx7legzJHDChHFA/37CCGL//mg5gqUDJRcVyoFevsPNLaKt9XXeDoq5DBG4YprTl/TK\nh2WFisHUL2V/e1Gh0pdtjip6BTpUVLUBQkz1FoCb08ea2XmL/3bbTnhRlVSMS+U3N7eVinApopK/\nW/aE9+PIZfrpR1lt0qSSi8/U0ApRwEUVJpTZ8nf34fZ/3eIdLroLU/+g7eb9r8MB06tOTmFITNLz\n8uHQ9Z6roiQdhga5lP9SBJdcd9yKqv6BiLZalnUSgMWWZX3ATxIRWZblGJvUsqybAdwMAMOHh1Qk\nHS+QivHSPmIlxE1spYJ8z17BaRQXAbs+BT7dI7iOY61CQR4EXspoud/cIkRFgL3qWvwnsYU1i/UD\nvuoFgPLhwI5PRAKKqCteLvrZuk1wH1HKVPuRm2tu3SZWfTpEO6p56Ydbw3FLcqVdeZGtPM3FMc67\nC7j5NvGujRgeXmHtBdUYQ3KZftrmxwhBvUY+p2Ur3cOEOBk7cM5BcmvcgIEbFjhh1Eg789mKd3K3\nzQ1u5s48FAoXvYVF2BlH5wbgLgC3AlgDYHD62GAAa3Lde8JzHH4gOZC/rxL/5SYV6A6I1EeqAx5R\ne2WtCU9gJ5NJbv6oCzrDmHiZMuuAWmbYfvdS2nqh6VVns1UTUA0acsFPfztdk1xnh1PxQ8uN0/My\n65V0uAPlwPHiuoHj/bUvDNj7gQgcR0dNFD0B9GL7fwNwCYD7AdyePn47gPtylfWZnzi4VdXOXUSr\n3idanRT/W/a43ubYR16WQOp18oWXVieNT2V/1Kr1Shg41YfTrf6m/bFNmR69fImgA5RX+XwgNjGZ\nNr2aHR4lbL97WdC5XZtcZ/srRLFAU8t0Ox60bX762ys8jNuk4/ReVl4vRE08TI18NhdMd/auV8sZ\nWJGeOCpyty0oHMLWHHexqiDCjb6d3t4DcGf6eD8AL0OY4y4BUJarrM48cdx99900btw4qqiooIkT\nJ9Lrr7+urew//elPdMYZZ1BhYSE9e+/9RMn1zuFHXOAZVl2Vy8sPVX35qr/p7OTndS4IVHNGp488\nimkrl/vyFaauAV7Wrfxse+DQPXlwOXn52WIbcnr4PiHyH6uqZjbR5Eu9B1m/yCWbl3GgpFlwFFNm\nJ6jvttNEIK9zitGVq/5OfSNNv69I6zTkcxtyut62qfU4XkOOENF6ABMdju8CcGH+a6Qfr732Gl58\n8UWsWLEC3bt3xyeffIIjGhOADx8+HI8//jgeqJ8jDhw+bMtIucNgEDjJ5bnpItDeNBEQ+oCqi215\ncXOLODdqZLQQGCpUE1fpxCStgIKCy31fSIfNqLsLOO9cdzPkMOjWVfRHaR/xW2fZEn16CysnAOgT\nNDuUAjczbKeQJBelTVF7lUR7zl6yefkOPvgzoCVtUbh3X3haTlDb3LtE9OfGzcCwIXY/SAfAstJs\nHYibk6dX+JYP1mb/P3OCCDlypgELNScz3QjobOa4Jwy2bduG/v37o3t34XfRv39/AMCyZctQW1uL\n/fv3o3v37nj55ZexcOFCPP/88zhw4ADWrVuHK6+8MpOjo6SkBLW1tXjxxRdRXFyMRYsWYeDAgSgv\nLwcAFJT0EN7hw4cAn+zGvpY9qJr5VXza0oKjR4/i7rvvRlVVlb9KqzboqumiBB9wl60UCstFL4lj\n0gegYoztYRsGTpOC+nE2PCoG5Iox4WjI9qbWC8XmqveFkvesCaLMKDG2ANscUvpCSA9i7r0bVZks\naVRdDPz4MWGmPPtbtjI2DNwGcacJZcJYMbhO+UK0trj5P/B3kCvIe0X0VFfh1mZVEa/mvODPkdff\nK38NIO5J52bJvIO9S8IvgtzAlf/KYuujJ38Sx6riCCyqOnaM6J4fEV3+VfFfQ1z1vXv30sSJEymR\nSNA3vvENevXVV+nw4cM0YsQIevPNN4mIqKWlhY4ePUqNjY00YsQIam5upoMHD9Lw4cPpww8/JCIi\nAPT8888TEdFt372F/t9364Q+I21yO3PGDHr2yd9kzHKPvracWt4V7d+5cyedcsopjvk+fPeRqsDz\nOi/1DpXXC52D9B4PI8LgrLWbrqPyepu9vyKECaMqnsilzAwL2ZZRX7D7yoQy2XRoci+dkw49lp+o\nAVHoeZWvnlN1R166ECfDBK7/cUotkEssrAseJvM4Ts1xOw9+OB+4816x/+Ji8f+O2khFlpSU4K23\n3sKf//xnvPLKK/jnf/5n3HnnnRg8eDDOPvtsAEDv3r0z11944YXo00eIMsaNG4dNmzZh2LBh6Nat\nGy6//HIAwKRTTsHixUuE09XBQ2Lbuw/4tFl4lZeVggaU4Y4fzsXSP/8ZBd26YevWrdi+fTsGDRoU\nvBGp9cD0f7O9dgHboUiuoBIjxbGHG21RjBT7AMFNJ7kHL2BHYZWrN2k2K+tU0lP8f+f94O3jK2gg\nOwaRFL/pQNXFwBP/LVbKyfXC5JXHx4oC3m+yX5pb2j+noOX5iQgbleMD7EgA0qlP7ssYW7x9ktuo\nviocd6PGXOPtUTmERS/Zz2vYEHGPym1xJ0/1W5n/A8FRyDLmzMuOG6bGlaq9CfjdEnF/3Zxw0XGd\noHJSvM4nkOd4x+C1t7x/h0RhYSHOP/98nH/++aioqMDDD7sHnJMiLXnfsWPHAABdu3aFZVnieN++\nOFZYIAa13r2ALR8JXw1AhBw5aQCefPop7Ny2DW81/hpdTx6E8vP+AYcOHQrXAJ7y9OzTxTFVx6DK\ncaX8tFeJmNTOPt3fR84/XP7xyXLlcTk49ukFfPlcEUr+lb8CFWODt88pcGByvdAX7G7WF0Zj0Uu2\nbF5Cl35D7beKMULspg7AQctT6+h0XNWB6dTZqHSXviaeTRTw5+0UTFMNReKURpnv87A1gNgvH2b7\nNHmFDlEXXPN/IHRhgBA36oIqQlPrHBLxxAEA506yOQ35OyLWrFmDgoICJBIJAMDKlSsxduxY/OEP\nf8CyZctw9tlnY+/evSguLvZfaLeuYkAe+Tngwy1i0uhSCPQtzeTlaGlpwUknn4yuvXrhlXfexqZN\nm3KXq672VWcowJa7qiEM1MGEy9zVWDxetKVegTtLSahybjl5DBuSvXIM2k454W3cLH6POdWOsxU2\nHpKaYwEQoSTKhwE7d4lQIGvW6nOWq7pYDFIy78PULwHf/j/iXBgFspeSmq+QN24WHEBZafAIyBxq\nWAw1RAd3ajznTDGhhw3JwQdRtZ3qO8Yd+gDRXrlS5zk4+H/AXnzU3SXK27NPPG+nOlddLLiM3y0R\nUXlb9gBdu4qwN6bAOI/yh+8L7z0dVsbVWTYtOo7WVu06juXLl9O5555LY8eOpYqKCrryyitp586d\n9Oabb9LkyZNpwoQJNHnyZNq7dy81NjZSTU1N5t7LLruMXnnlFSLKzuvx7LPP0syZM4mI6M0//4WG\nDBpEPXr0oLKyMhqXGEXUsod2/v1tOueMM2j8KafSDV/5Co0ZM4Y2bNjQrn5ZfaQ6f7GwBJ7mhDpk\n3F6OZ07lR6Gp0lL/V15vm32G1RU4hcjg+6b0J7JMTku3ySp/F1QHTN20OEzpntzg5NCnmuC6vR+N\nT4l6zn0oy/Q165tyoiND0ZgM30KUFf14EroQhRx3O3zgj7p1Zj8OY+BOf9xvQ8azSq4X+34dAKXD\n29yHxH9pXy5fdKeBWg2/7fRh+IGb4xlXVksHv4rziS64OnugCqIQVmmp/9VQ4mEGKE5DNRoImoPa\nLz1e9pTpIgT5BVfrV5LzvpY+FdJpLUoI91wOetLHZu5D+vLLe9Uluc6OG1V+tvjNnUK9fIfcFmGq\nQYR8bpMvFc9r4oVE/caKsPFzHzLXNlm/yutpPLocoHjisHHCTxxyopCThQxqKP/LoId+HQC9OA71\nvHpMXisnG10rT1m+dPiSFloyL4MMBOfFEQVFLkuYzo5cHs9h4Da4q88/CnfmVVd+jYn2udXFa3Lg\nGS6dvMGlxV/l9e6On06cqVywROU43GjxySMixxHrOI4XyPDqpX2EbqNPb+H0d/CQkDvL3OO9e4lr\nuxT6dwBUbdPlMZmDu7lFyLFVhyeuPJRK8T372udWDgNZ/p60nH7ffjv44Osr2tc9ioUSD544ZLCQ\nUR85Gr48p/Lr5ghZ9/wfCH1HlBzkbqi9SeRLX7FKX5hzN2W5+nzCINezU3Nob9ws9AGAPos3p7pI\nX4fKi+x6APYzq68T4c+djAKk0vvJ39p5caTyW+0/2Y4Fz4h+/HCLeM+DZrNU4aT4l/opeSy1Hrue\n/NnO0DTCzjidZXPjOJx8F45rSC5j1ft2AEMppjp4qL34ygNtbW3+wqrz3+rKkouSlJWMthWhk/7D\na7UXBqpITIooTOkhpC+HTu6MrzB1BmjkZTuJEnle7TB+NLnAn78aRj9qjnM/dLnI0k/4GwneNzzc\njB+a8h2JAj8+J0SxH4eKoqIi7Nq1C/369cuYsh73kNzD4aOCo5A41ioSOgFidQMIbkNyIAqICLt2\n7UJRUZEzHaewI/I/Xx1zU11lJdPOMiYs+KqMW2i5rfbCQLajTy/bZLbyImFNo8vPorkFGDJIhJMY\nMlh42+uAtODavBV4vklYH5UPF/myg2ZEdIO0ROKZ8GpvEj4L3Dz2A40mpBIyXP+qD0T49o2bga5d\ngKPH9JqsqpBJt3Y32znHb02H9pGWal6Z/hJp8/Xk+uxwM27Xy3ekX1+RFqGsb7T6O9VNB2fOcEJO\nHEOHDsWWLVuwc2d4TqzT4ugx4PB+YF8B8NZW4ccAAF26iI+qoABoLgD273UtoqioCEOHKhOLGppA\nwu0D4WFHZMwqOWHICSSoyEp1MlOzs0ks/KUQLX24FZhR4z/boJPZMRe5yKyCvXo6h4kIA+kkV50W\nP/x1WbTUrhzSeU46Qa563y47aEZECS/TbMA2f171gRCXtuwRg+Psb0VrixP4ACyzJZ70/7d3Z7F2\nVXUcx7+/3vZ2sMQCJdgyeKEDyFhABkENPBgbCGAUSBNCETUEsUYjkhQfsDxoSIgm+GBIo4gmRoIT\nYZRAJGJAhhYppSKEAgINtrRYKKGp3vbvw96bu+/hTPvM+9zfJ7np2WdPa511Tv97rb32WvOTqXc7\n9cxIvpny8kuSZqOnnpl4CPRHa5J0ZEOc1BvqJD//Bkx0j86Gm6klK8fZ6cXc61vaz1P+d1RjeTbM\nbHywGlqtqgzKX7WmqqFVa8iHrFqcH4Zj7NRiw1xXu1HZqEdVvrdJvidWreM1Uq85IDtP1lX23lnS\nZwAACkZJREFU/JUTVfsDji42b0Jl19/svIcsm7gJ34mhHyqH1c6nuRPNVPmRcZeemfSoavf49bpH\nR0z+TmRdT7vZRfbFzRPn0ILON1Plm4jyTWH7LZ7cJFrtt5fvhXXp1RPfnaxrbf6Gfr3PJ/teZPlr\ndz6OypvhlU3K6fIYI9tiWJqqJC0HbgZGgJ9FxI19TtLgqLzpteTI5Mng/Ki0+eE+rlnz4Xmpa6lW\nlc2ulmFyVbvaUB0nHDN5GI1OVI3z5/ngyfEbkqvBpWltYPpIsnzltfDKk83lsdbDiVvSJr+5H+nM\n0A/XrEmulre/DX9+dCLNnahtQHKMbJDJ7Mn+djX6jLLvxDeuSB7UzDfndMOSI+H2W2DFVRNNsZ1s\npsp38rj8kolhTQ7cP2mWevHliSbRWk/Sw8TvZOZo0lSYfSb5BwdryR4yjHTC0yIPBdfKU/ZvVjvM\n0pNb3rrx31tbPkerEacbfyTBYjPJfB2jJPN1HFNvnylb46h2BXT+yog5R0xcMXViIqJaNY5qzyhU\nuxprNX+Vy5U1jmziqAM/MVFbaDefWffJ7AZ5uzcpK9Nc+dxIp27u5z+jbE6OVsq+WrnVK+tOTXiV\nP3f+s6l8Jub0c5Or+l/8pv3z1Prs8+esnAWw1u+vXhfcejW3yv2PPiti2sKIa9a0nr9G+c0tU7YZ\nAGsmBj4FPJBbvg64rt4+Uypw5NXq/ZQ9+9CNh6Rqnb9WWjo90mcm/4PMmhQ62aunk/8hVtPJZ0+q\nHbfVpqpq6arWlJOtz5pYzvlS+4GwWhNZ5XtFp7VtNo/Nbtto3yK/g2rHyp5RmruoeL5a0E7gULL/\nYJB0EbA8Ir6WLl8GnB4Rqyq2uxK4Ml08imSu8illNsw8mJGDt7J3627YMxtmLmRk4Rw0ZxRmAewg\n3nqVva91+/wAlWlZxPRFM2F2N9IwxsjhB6KD9sL4CEzfA7s3M755N+xpvHf/VZZdJ4+7mOmLR2HW\nTuLtzex9pd10Zd8rgB3s2z6Paftn649n+rGjMGsf7J0GI+2UdXbunez7T3YOSL5X2Xv5da1+bkU+\n+2q/sXr7Ftm+2roTmX7CdJgxDv/bwPizreSvoI9HRMHZ3hIDd4+jGRGxFljb73SYmU1F0/qdgApb\ngHw/wkPT98zMbEAMWuB4Clgi6QhJo8AK4K4+p8nMzHIGqqkqIsYlrQIeIOlhdWtEbOpzsszMLGeg\nbo6bmdngG7SmKjMzG3AOHGZmVogDh5mZFeLAYWZmhThwmJlZIQ4cZmZWiAOHmZkV4sBhZmaFOHCY\nmVkhDhxmZlaIA4eZmRXiwGFmZoU4cJiZWSEOHGZmVshAzcfRivnz58fY2Fi/k2FmVirr16/fPqXm\nHM8bGxtj3bp1/U6GmVlbxlbf+8HrV288r+vnk/SvVvd1U5WZmRVS+hqHWaf1+srPrGwcOMysZQ6y\nU5MDx5DxD3lqG/byz/I3jHkrE9/jMDOzQhw4zMysEAcOMzMrxPc4rCt60dY+7O35ZoPKNQ4zMyvE\ngcPMzApx4DAzs0IcOMzMrBDfHDfrA9/YtzJzjcPMzApx4LChMLb63klX8WbWPW6qMusRBzYbFq5x\nmJlZIQ4cZmZWiAOHmZkV4nscZmZdMqzdrh04rOs8+Y5ZdWXtMOHAMSTK+gUsqlv5nCqfXxn1umyq\nna/IRc9U+C45cJTMsP/H2Yt0+Bz9Oe4gnK/Vc9XaLx9QGh27zN+JSoqInpyoWyTtAl7odzq6aD6w\nvd+J6CLnr9yGOX/DnDeAoyJiv1Z2HIYaxwsR8cl+J6JbJK1z/srL+SuvYc4bJPlrdV93xzUzs0Ic\nOMzMrJBhCBxr+52ALnP+ys35K69hzhu0kb/S3xw3M7PeGoYah5mZ9VBpAoek5ZJekPSSpNVV1kvS\nT9L1z0o6uR/pbFUT+Ttb0juSnkn/ru9HOlsh6VZJ2yQ9V2N92cuuUf7KXHaHSXpY0j8kbZL0rSrb\nlLb8msxfmctvlqQnJW1I83dDlW2Kl19EDPwfMAJsBo4ERoENwDEV25wL3A8IOAN4ot/p7nD+zgbu\n6XdaW8zfZ4GTgedqrC9t2TWZvzKX3QLg5PT1fsCLQ/bbayZ/ZS4/AXPT1zOAJ4Az2i2/stQ4TgNe\nioiXI+K/wO3AhRXbXAj8KhKPA/MkLeh1QlvUTP5KKyIeAd6us0mZy66Z/JVWRLwZEU+nr3cBzwOH\nVGxW2vJrMn+llZbJe+nijPSv8sZ24fIrS+A4BHg9t/wGHy7cZrYZVM2m/cy0Knm/pGN7k7SeKHPZ\nNav0ZSdpDDiJ5Ko1byjKr07+oMTlJ2lE0jPANuDBiGi7/IbhyfGp4mng8Ih4T9K5wJ3Akj6nyZpT\n+rKTNBf4PfDtiHi33+nptAb5K3X5RcReYJmkecAfJR0XEVXvxzWrLDWOLcBhueVD0/eKbjOoGqY9\nIt7NqpwRcR8wQ9L83iWxq8pcdg2VvewkzSD5T/XXEfGHKpuUuvwa5a/s5ZeJiJ3Aw8DyilWFy68s\ngeMpYImkIySNAiuAuyq2uQtYmfYQOAN4JyLe7HVCW9Qwf5I+Jknp69NIym5Hz1PaHWUuu4bKXHZp\nun8OPB8RP66xWWnLr5n8lbz8DkprGkiaDXwO+GfFZoXLrxRNVRExLmkV8ABJD6RbI2KTpKvS9bcA\n95H0DngJeB+4ol/pLarJ/F0EfF3SOLAbWBFpl4hBJ+k3JD1T5kt6A/g+yU260pcdNJW/0pYdcBZw\nGbAxbScH+B5wOAxF+TWTvzKX3wLgl5JGSALeHRFxT7v/d/rJcTMzK6QsTVVmZjYgHDjMzKwQBw4z\nMyvEgcPMzApx4DAzs0IcOMzMrBAHDrMcSfMkXZ1bXijpd1061xfqDdEt6XhJt3Xj3Gbt8HMcZjnp\nQHf3RMRxPTjXY8AFEbG9zjYPAV+JiNe6nR6zZrnGYTbZjcCidMKemySNKZ2gSdKXJd0p6UFJr0pa\nJek7kv4u6XFJB6TbLZL0J0nrJf1V0tGVJ5G0FNiTBQ1JF0t6TsmEO4/kNr2bZAgas4HhwGE22Wpg\nc0Qsi4hrq6w/DvgicCrwA+D9iDgJ+BuwMt1mLfDNiDgF+C7w0yrHOYtk1NXM9cDnI+JE4ILc++uA\nz7SRH7OOK8VYVWYD5OF0wp9dkt4hqREAbAROSIfnPhP4bTouHsDMKsdZALyVW34UuE3SHUB+hNZt\nwMIOpt+sbQ4cZsXsyb3el1veR/J7mgbsjIhlDY6zG/hothARV0k6HTgPWC/plIjYAcxKtzUbGG6q\nMptsF8nc0y1JJwF6RdLFkAzbLenEKps+DyzOFiQtiognIuJ6kppINj/CUqCtSXfMOs2Bwywnvcp/\nNL1RfVOLh7kU+KqkDcAmqs8f/whwkibas26StDG9Ef8YsCF9/xzg3hbTYdYV7o5r1ieSbgbujoiH\naqyfCfwF+HREjPc0cWZ1uMZh1j8/BObUWX84sNpBwwaNaxxmZlaIaxxmZlaIA4eZmRXiwGFmZoU4\ncJiZWSEOHGZmVsj/AVl5YizaGjALAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from bmtk.analyzer.visualization.spikes import plot_spikes, plot_rates\n",
+ "\n",
+ "plot_spikes('network/V1_nodes.h5', 'network/V1_node_types.csv', 'output/spikes.h5', group_key='pop_name')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Or we can plot the rates of the different populations"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEKCAYAAAAfGVI8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAGVlJREFUeJzt3X2UHXWd5/H3N89OBJPGTIgGiHIiGEDQadm0Aqc16AIT\nHjwi+ERCTiCGdVcFhYVF3LgMIyKLw+4ZhCiYgM4oY0ZhcRUx0jwsHaEzBEhoQlCejHkiMUM4kAeS\n3/5R1aTTdN9+St3b6Xq/zulzq+pW3fr2L537ub/63aqKlBKSpPIaUusCJEm1ZRBIUskZBJJUcgaB\nJJWcQSBJJWcQSFLJDSvyxSPiOWALsBN4PaVUHxF1wE+BScBzwFkppb8UWYckqWvV6BF8JKV0TEqp\nPp+/FFicUpoMLM7nJUk1UotDQ6cDC/PphcAZNahBkpSLIs8sjohngX8nOzR0U0ppfkRsTimNyZ8P\n4C9t8x22nQPMARg9evTfHH744YXVKUmD0dKlS19KKY3rbr1CxwiA41JKqyPir4F7IuKp9k+mlFJE\ndJpEKaX5wHyA+vr61NLSUnCpkjS4RMTzPVmv0ENDKaXV+eN64OfAscC6iJgAkD+uL7IGSVJlhQVB\nRIyOiP3apoGPA8uBO4GZ+WozgTuKqkGS1L0iDw2NB36eDQMwDPinlNKvI+IR4PaImA08D5xVYA2S\nVNHmzZt56aWX2LFjR61L6be6ujrGjx/f6+0KC4KU0h+BoztZvhGYVtR+Jak31qxZw6RJkxg1ahT5\nB9d90s6dO3n66acHVhBI0r7iLW95S61L6LehQ4f2eVsvMSFJ3Wluhm99K3schOwRSFIlzc0wbRps\n3w4jRsDixdDQ0KNNFyxYwNvf/namT5/OvHnzOPPMM/nZz35Ga2srY8eO5XOf+xzHH398wb9A9+wR\nSFIlTU1ZCOzcmT02NfX7Ja+44gpuvPHGTkPgqquu4sILL+S8885jy5YtnHvuubzwwgtccMEFrFy5\nkrvvvptZs2Zx0UUXsW7dun7XAvYIJKmyxsasJ9DWI2hs7PdLXnnllYwdO5avf/3rTJw48Y3lra2t\n3H///TQ0NLB161ZaW1u59tprOfXUU5k1axaHHXYYl112GYsWLdqrA9sGgSRV0tCQHQ5qaspCoIeH\nhSq54oorOPLII9+0fNeuXRxxxBHMmzfvjWUrV65kv/32Y/PmzQCFfLPJQ0OS1J2GBrjssj6FwA03\n3MDcuXO55pprul33iCOOYMiQIVx00UVccMEFvPjii3zjG99g0aJFbNq0iUceeYTzzjuPOXPmcMkl\nl7B+/d65MEOhF53bW7zWkKSitLa28t73vrfWZewVHX+XiFja7hYAXfLQkCTVyNq1a7nxxhvfmD/p\npJOYOnVq1eswCCSpRg488MA9xgNqxTECSSo5g0CSurGEZr7Dt1jC4Dyz2CCQpAqW0MwpTOObXMEp\nTOtVGCxYsIC77roLgHnz5rF8+XLmzZvH2Wefzdy5c3nggQfetM3FF1/c6Wu1bV8ExwgkqYIHaGI7\n29nJTraznQdoYir9O5egq/MIAJ599lkAjjrqKM4991yWLl3KzTffDMD8+fPZsmULJ5xwArNmzepX\nDe3ZI5CkCo6nkRGMYChDGcEIjqex36955ZVXMnfuXP70pz91uc7EiRP56le/SkNDA8uWLQPg7LPP\n5oc//CG//vWv+11De/YIJKmCqTTwf1nMAzRxPI397g1A5R5Bm9GjRwMwfPhwtm3bBsDb3vY2YO+f\nXWwQSFI3ptLQ5wC44YYbuOuuu7j11ls588wz93Jle4dnFksqNc8stkcgSTXjmcWSVHKeWSxJGhAM\nAkkqOYNAkrrR/DB867rssTcWLFjA9OnTmTlzJhHB6tWrAdi4cSPnnHNOAZX2jWMEklRB88Mw7Yx2\n967/BTQc2/Pt586dy/Tp03nhhRdYsGABl19+ObfddtuACgJ7BJJUQdODHe5d/2Dvtv/+97/P+eef\nz4UXXsiSJUtIKbF48WJOPPHEYgruA3sEklRB43Ed7l1/XO+2P//885k+fTqQ3X/42muv5YMf/CBD\nhgycz+EGgSRV0HBsdjio6cEsBHpzWKijGTNmcOihh/LUU0/tvQL3AoNAkrrRcGzfAuDcc8/dY378\n+PG88sore6eovWjg9E0kSTVhEEhSyRkEklRyBoEkdae5Bb71v7LHXlixYgWf/exn+dKXvsQ111zT\np11/4Qtf4KijjurTtj3lYLEkVdLcAtM+Bdt3wIjhsPhfoKHbKzsD8Jvf/IZzzjmHk08+GcjuR/z6\n669z8MEH84lPfIIZM2Zw2mmnsWLFCm655RZmzZrF5MmTWb16NWeccQYf//jHuemmm/a4j8Evf/lL\n7rvvPtavX891111HXV1dv39FewSSVEnTQ1kI7NyZPTY91ONNZ8+ezYMPPsjs2bO5/vrrGTFiBN/9\n7ne58MILATjiiCP42te+Rl1dHWvXrgXgvPPO49vf/jaLFi3q9DWHDh3Krl272LFjB7/97W/7//th\nj0CSKmv8UNYT2E722PihHm+6//77c9VVVwHwkY98hOOO2/NstM5uRzl69GiGDRv2xnxH3/ve97jj\njjtYuHAhr776ah9+oTcrPAgiYijQAqxOKU2PiDrgp8Ak4DngrJTSX4quQ5L6pKE+OxzU9FAWAj08\nLATwi1/8grvvvpthw4ZRX1/Pa6+9xsUXX8xBBx3Eaaed1qPXuPzyy3n00UeZO3cu119/PVOmTOGq\nq66itbV1r12movBbVUbERUA9sH8eBNcAm1JKV0fEpcDYlNJ/rfQa3qpSUlG8VWXBYwQRMRH4W+AH\n7RafDizMpxcCZxRZgySpsqIHi/8BuATY1W7Z+JTSmnx6LTC+sw0jYk5EtEREy4YNGwouU1KZvfba\naxR9dKRoO3fu7PO2hY0RRMR0YH1KaWlENHa2TkopRUSnrZ9Smg/Mh+zQUFF1Siq3CRMmsHr1anbs\n2FHrUvqtr18lLXKw+MPAaRFxCjAK2D8ifgSsi4gJKaU1ETEBWF9gDZJU0ZgxYxgzZkyty6ipwg4N\npZQuSylNTClNAj4N/C6l9HngTmBmvtpM4I6iapAkda8WJ5RdDXwsIlYBJ+bzkqQaqcoJZSmlJqAp\nn94ITKvGfiVJ3fMSE5JUcgaBJJWcQSBJJWcQSFLJGQSSVHIGgSSVnEEgSSVnEEhSyRkEklRyBoEk\nlZxBIEklZxBIUskZBJJUcgaBJJWcQSBJJWcQSFLJGQSSVHIGgSSVnEEgSSVnEEhSyRkEklRyBoEk\nlZxBIEklZxBIUskZBJJUcgaBJJWcQSBJJWcQSFLJGQSSVHIGgSSVnEEgSSVnEEhSyRkEklRyBoEk\nlVxhQRARoyLi4Yh4LCJWRMQ38+V1EXFPRKzKH8cWVYMkqXtF9gi2AR9NKR0NHAOcFBFTgUuBxSml\nycDifF6SVCOFBUHKvJLPDs9/EnA6sDBfvhA4o6gaJEndK3SMICKGRsQyYD1wT0rp98D4lNKafJW1\nwPgutp0TES0R0bJhw4Yiy5SkUis0CFJKO1NKxwATgWMj4sgOzyeyXkJn285PKdWnlOrHjRtXZJmS\nVGpV+dZQSmkzcC9wErAuIiYA5I/rq1GDJKlzRX5raFxEjMmn3wJ8DHgKuBOYma82E7ijqBokSd0b\nVuBrTwAWRsRQssC5PaV0V0Q0A7dHxGzgeeCsAmuQJHWjsCBIKT0OvL+T5RuBaUXtV5LUO55ZLEkl\nZxBIUskZBJJUcgaBJJWcQSBJJWcQSFLJGQSSVHIGgSSVnEEgSSVnEEhSyRkEklRyBoEklVyPgiAi\njs+vItp+2QeKKUmSVE097RHcDfwuIv663bIfFFCPJKnKehoEK4HvAPdFxIfyZVFMSZKkaurp/QhS\nflOZlcBPI+IWurjXsCRp39LTHkEApJRWASfkP+8rqihJUvX0qEeQUnp/u+lXgLMi4uDCqpIkVU3F\nIIiI/03lQ0Bf2rvlSJKqrbseQUu76W8C/73AWiRJNVAxCFJKC9umI+Ir7eclSYNDb84s9ltCkjQI\neYkJSSq57gaLt7C7J/BXEfFy21Nk5xbsX2RxkqTidTdGsF+1CpEk1YaHhiSp5AwCSSo5g0CSSs4g\nkKSSMwgkqeQMAkkqOYNAkkrOIJCkkjMIJKnkDAJJKrnCgiAiDoqIeyPiyYhYERFfzpfXRcQ9EbEq\nfxxbVA2SpO4V2SN4HfhqSmkKMBX4YkRMAS4FFqeUJgOL83lJUo0UFgQppTUppX/Lp7cArcA7gdOB\nthvcLATOKKoGSVL3qjJGEBGTgPcDvwfGp5TW5E+tBcZ3sc2ciGiJiJYNGzZUo0xJKqXCgyAi3gos\nAr6SUnq5/XMppUQXdz5LKc1PKdWnlOrHjRtXdJmSVFqFBkFEDCcLgR+nlP41X7wuIibkz08A1hdZ\ngySpsiK/NRTAzUBrSum6dk/dCczMp2cCdxRVgySpexXvUNZPHwbOAZ6IiGX5sv8GXA3cHhGzgeeB\nswqsQZLUjcKCIKX0INm9jTszraj9SpJ6xzOLJankDAJJKjmDQJJKziCQpJIzCCSp5AwCSSo5g0CS\nSs4gkKSSMwgkqeQMAkkqOYNAkkrOIJCkkjMIJKnkDAJJKjmDQJJKziCQpJIzCCSp5AwCSSo5g0CS\nSs4gkKSSMwgkqeQMAkkqOYNAkkrOIJCkkjMIJKnkDAJJKjmDQJJKziCQpJIzCCSp5AwCSSo5g0CS\nSs4gkKSSMwgkqeQMAkkqucKCICJuiYj1EbG83bK6iLgnIlblj2OL2r8kqWeK7BEsAE7qsOxSYHFK\naTKwOJ+XJNVQYUGQUrof2NRh8enAwnx6IXBGUfuXJPVMtccIxqeU1uTTa4HxXa0YEXMioiUiWjZs\n2FCd6iSphGo2WJxSSkCq8Pz8lFJ9Sql+3LhxVaxMksql2kGwLiImAOSP66u8f0lSB9UOgjuBmfn0\nTOCOKu9fktRBkV8f/WegGTgsIv4UEbOBq4GPRcQq4MR8XpJUQ8OKeuGU0me6eGpaUfuUJPWeZxZL\nUskZBJJUcgaBJJVcYWMEkqTeW0IzP+ZW1rEWgPEcyOeYwVQaCtunQSBJfdD2ht3Kk2xkA5M5jMm8\nh/tpYiSjqKOOTWxiIxs4gOyk2O6mX+ZlnuAxOp5rexs/5NfcW1gYGASSBoz2n4bbv4n25U0VYBtb\nOZfZzGZOl6/dk9fquP/O3rCforXCb9bay+k9bWc7D9BUWBBEdqWHga2+vj61tLTUugxJe0HHQx9t\nb7DDGclyHiN1feWZPjuUyfyRZwp57WoYycg+9QgiYmlKqb679ewRSKqam5nPl/lP7GRnVff7B1ZV\ndX97w/s4hkOY5BiBpMFjCc18hS9WPQSKdiiTeZ3tvMiL7GIXQXAUR3MIk/p9OKtaDALtO5qboakJ\nGhuz+aYm2Lw5exw1CurqYNMm2LAB2q5Y25vpSttPmQIzZkBDhU9lndXX2Fh5mxJ5gCZ2satH6wbB\nkRzNDrb1eYygq4HXjq/dk9fqbP8d37CX0MwDNHE8jYV+ei+CYwSDTPPDcOtP4MmVsGEjjDsA6sbC\npr/snofOp+vSRg5cv5wZ7/o9DRcdn72BNTfDrbfCk0/u+Qa5dSvMng1zevCppbkZrrkGVq7s+5vy\nyy/D44/Drl0Qka1bi7/dY46B/fd/c80jR8ITT8DO/NPukCG7az366M636S582k/3JIgGuCU0cwrT\n2MrWN47VD2EIR/C+Pd7w9+ahkFp8FXMg6ekYgUEwiDQ/DI2nwvbtfdk6vfHBaSTbuPfVaTRM2gDP\nPFP5DXfyZBg2rOs3uOefz360d3QMoo5BUleXrXfggQMyONo+NddxAJvYuE9+et6XOFhcQk0Pwo4d\nfd06IP+gvT0Np2nICTSs6sHFYVflg3Ct7b761lrpa3Tql2XLdk93bOeO8zfdBIccAgcfnM33thfS\ncbp9L7Ctp7h27Z6v1bHn0rYewIwZTG1o8I1/ALJHMIjs1R7BKx+hYeeSvVlecdoOv0yaVMwYweuv\n7w48Zb3A7nqKxxyTPT722J7rnXACXH31gOupDFYeGiqpfo8RPPcIM565koYdD+35wu0PAb388pv/\ng/fEpElv/nTa1XRnb8pbt2aDr08/DX/+czY9Zkx1BmTnz4ebb84GpbuqecqU7LBNUxO84x3wnvfs\nHsjuze852INo+HC47z7DoAoMAvVd+wHirgaFuxpE7uwN7rDD4JJL/I/fH10FUft2Hjly94D6QPf3\nfw+XXVbrKgY9g0Aqo7avsB5wAPzqV51/U6svYwRd9QLbDstt29bznos9gqoxCNqb/yO4+cfwjglw\n8kfh0ceBgPcfBb9aDCv/COPyb1ts2LR7eus2mP1ZmPP5fv8OXWpugaaH4IC6rJY/r8v2edTh2fLG\nD0FDt/+OUvHaDxBD599M6thzaetRQrb8He+wd1hFBgFkb7KX/h3c//v+FTD5XTBseBYQbxxw39R5\neHQ33X77kcPh8dbOu/IR2aevIUPguGO7f+3DDoX3vBuWLYdPTi82vCTtEwyC5hZo/GRfv0Kz77vp\nO4aBVHI9DYLBe4eypof686X6fd+iu2pdgaR9xOA9oazxQ9mgVPseQdtp/+1FwCET4eB3ZvNth1xe\n3gKPreh4mZJ9xyen17oCSfuIwRsEDfXQtAhuvR0ImPGpbHnbwGzbgPGMT3U9GNvckm3/5KrOj/H3\nd4ygbR7gwHG7a7z1dlj70p61VNrn86vhxdVZyA0ZAl+7wMNCknps8I4RlE3bt4/8lpGknNcaKpuG\negNAUp8M3sFiSVKPGASSVHIGgSSVnEEgSSVnEEhSyRkEklRyBoEklZxBIEklZxBIUskZBJJUcjUJ\ngog4KSJWRsQzEXFpLWqQJGWqHgQRMRT4R+BkYArwmYiYUu06JEmZWvQIjgWeSSn9MaW0HfgJcHoN\n6pAkUZurj74TeLHd/J+A/9BxpYiYA8zJZ1+JiJV93N/bgZe6Xav6rKv3Bmpt1tU71tU7/anrkJ6s\nNGAvQ51Smg/M7+/rRERLT67HXW3W1XsDtTbr6h3r6p1q1FWLQ0OrgYPazU/Ml0mSaqAWQfAIMDki\n3hURI4BPA3fWoA5JEjU4NJRSej0i/jNwNzAUuCWltKLAXfb78FJBrKv3Bmpt1tU71tU7hde1T9yz\nWJJUHM8slqSSMwgkqeQGdRAMpEtZRMRzEfFERCyLiJZ8WV1E3BMRq/LHsVWo45aIWB8Ry9st67KO\niLgsb7+VEfEfq1zXvIhYnbfZsog4pQZ1HRQR90bEkxGxIiK+nC+vaZtVqKumbRYRoyLi4Yh4LK/r\nm/nyWrdXV3XV/G8s39fQiHg0Iu7K56vbXimlQflDNhD9B+DdwAjgMWBKDet5Dnh7h2XXAJfm05cC\n365CHScAHwCWd1cH2SVAHgNGAu/K23NoFeuaB3ytk3WrWdcE4AP59H7A0/n+a9pmFeqqaZsBAbw1\nnx4O/B6YOgDaq6u6av43lu/vIuCfgLvy+aq212DuEewLl7I4HViYTy8Ezih6hyml+4FNPazjdOAn\nKaVtKaVngWfI2rVadXWlmnWtSSn9Wz69BWglOzu+pm1Woa6uVKuulFJ6JZ8dnv8kat9eXdXVlar9\njUXEROBvgR902H/V2mswB0Fnl7Ko9B+laAn4bUQszS+fATA+pbQmn14LjK9NaV3WMRDa8L9ExOP5\noaO27nFN6oqIScD7yT5NDpg261AX1LjN8sMcy4D1wD0ppQHRXl3UBbX/G/sH4BJgV7tlVW2vwRwE\nA81xKaVjyK66+sWIOKH9kynr99X8u7wDpY7c98gO7R0DrAH+Z60KiYi3AouAr6SUXm7/XC3brJO6\nat5mKaWd+d/6RODYiDiyw/M1aa8u6qppe0XEdGB9SmlpV+tUo70GcxAMqEtZpJRW54/rgZ+TdefW\nRcQEgPxxfY3K66qOmrZhSmld/p93F/B9dneBq1pXRAwne7P9cUrpX/PFNW+zzuoaKG2W17IZuBc4\niQHQXp3VNQDa68PAaRHxHNnh649GxI+ocnsN5iAYMJeyiIjREbFf2zTwcWB5Xs/MfLWZwB21qK9C\nHXcCn46IkRHxLmAy8HC1imr7j5D7BFmbVbWuiAjgZqA1pXRdu6dq2mZd1VXrNouIcRExJp9+C/Ax\n4Clq316d1lXr9kopXZZSmphSmkT2HvW7lNLnqXZ7FTUKPhB+gFPIvk3xB+DyGtbxbrKR/seAFW21\nAAcAi4FVwG+BuirU8s9kXeAdZMcXZ1eqA7g8b7+VwMlVrus24Ang8fw/wIQa1HUcWbf8cWBZ/nNK\nrdusQl01bTPgfcCj+f6XA9/o7m+9xnXV/G+s3f4a2f2toaq2l5eYkKSSG8yHhiRJPWAQSFLJGQSS\nVHIGgSSVnEEgSSVnEEh9FBH/IyJO7GR5Y9tVJKV9QdVvVSkNFimlb9S6BmlvMAikHoiIK4DPAxvI\nLvq1FDiS7ASgn0XESWQXD3sVeLBmhUp94KEhqRsR8UHgk8DRZBcNrO/w/Ciy69ScCvwNcGC1a5T6\nwyCQuvdh4I6U0taUXfv//3R4/nDg2ZTSqpSdqv+jqlco9YNBIEklZxBI3ft/wKn5fW/fCkzv8PxT\nwKSIODSf/0xVq5P6ycFiqRsppUci4k6yK1SuI7ta5b+3e35rfte5X0bEq8ADZPcRlvYJXn1U6oGI\neGtK6ZWI+CvgfmBOyu8ZLO3r7BFIPTM/IqYAo4CFhoAGE3sEklRyDhZLUskZBJJUcgaBJJWcQSBJ\nJWcQSFLJ/X+gTbzuE2g/XgAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD9CAYAAACsq4z3AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAD0FJREFUeJzt3X+s3XV9x/Hnyxa0FCcQ7ppazDBbh2MiRa9Mx+biAGGO\n2E7HD7e5mjDrEs10PzC4mbklyyQxc1sWt1Cno8mUgQxpY5ZpVyFsCwIXQRDQ1Qg4ukIvIipZpVDe\n++N8O6/l3p5z7z3n3t5Pn4+kOd/v5/s597zPN72v7+d8vt/zvakqJElL3/MWuwBJ0nAY6JLUCANd\nkhphoEtSIwx0SWqEgS5JjVg+SKckDwLfA/YDz1TVeJITgGuAk4EHgYuq6tujKVOS1M9sRuivr6p1\nVTXerV8O7KiqtcCObl2StEjmM+WyHtjSLW8BNsy/HEnSXGWQb4omeQD4Dr0plyuranOSJ6rquG57\ngG8fWD/ouZuATQArV6581cte9rJh1i9Jzbvjjjseq6qxfv0GmkMHfq6qdiX5UWB7kq9O3VhVlWTa\nI0NVbQY2A4yPj9fExMSALylJAkjy0CD9Bppyqapd3eMe4DPAmcCjSVZ3L7Ya2DO3UiVJw9A30JOs\nTPLCA8vAG4CvANuAjV23jcDWURUpSepvkCmXVcBnetPkLAc+VVX/muR24NoklwIPAReNrkxJUj99\nA72qvgGcPk37t4CzR1GUJGn2/KaoJDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREG\nuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBL\nUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1\nwkCXpEYY6JLUiIEDPcmyJHcm+Wy3fkKS7Ul2do/Hj65MSVI/sxmhvwe4f8r65cCOqloL7OjWJUmL\nZKBAT3IS8MvA309pXg9s6Za3ABuGW5okaTYGHaH/FfA+4Nkpbauqane3/AiwaronJtmUZCLJxOTk\n5NwrlSQdUt9AT3IBsKeq7pipT1UVUDNs21xV41U1PjY2NvdKJUmHtHyAPmcBb0ryRuAFwI8k+Ufg\n0SSrq2p3ktXAnlEWKkk6tL4j9Kp6f1WdVFUnA5cAX6iq3wC2ARu7bhuBrSOrUpLU13yuQ78CODfJ\nTuCcbl2StEgGmXL5f1V1E3BTt/wt4OzhlyRJmgu/KSpJjTDQJakRBrokNcJAl6RGGOiS1AgDXZIa\nYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREG\nuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBL\nUiMMdElqhIEuSY0w0CWpEX0DPckLktyW5MtJ7k3yp137CUm2J9nZPR4/+nIlSTMZZIT+FPCLVXU6\nsA44P8lrgMuBHVW1FtjRrUuSFknfQK+eJ7vVo7p/BawHtnTtW4ANI6lQkjSQgebQkyxLchewB9he\nVbcCq6pqd9flEWDVDM/dlGQiycTk5ORQipYkPddAgV5V+6tqHXAScGaSlx+0veiN2qd77uaqGq+q\n8bGxsXkXLEma3qyucqmqJ4AbgfOBR5OsBuge9wy/PEnSoAa5ymUsyXHd8grgXOCrwDZgY9dtI7B1\nVEVKkvpbPkCf1cCWJMvoHQCurarPJrkFuDbJpcBDwEUjrFOS1EffQK+qu4Ezpmn/FnD2KIqSJM2e\n3xSVpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMM\ndElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCX\npEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIa0TfQk7wkyY1J\n7ktyb5L3dO0nJNmeZGf3ePzoy5UkzWSQEfozwO9X1anAa4B3JTkVuBzYUVVrgR3duiRpkfQN9Kra\nXVVf6pa/B9wPrAHWA1u6bluADaMqUpLU36zm0JOcDJwB3Aqsqqrd3aZHgFUzPGdTkokkE5OTk/Mo\nVZJ0KAMHepJjgX8G3ltV3526raoKqOmeV1Wbq2q8qsbHxsbmVawkaWYDBXqSo+iF+Ser6vqu+dEk\nq7vtq4E9oylRkjSIQa5yCfBx4P6q+siUTduAjd3yRmDr8MuTJA1q+QB9zgLeBtyT5K6u7Q+BK4Br\nk1wKPARcNJoSJUmD6BvoVfUfQGbYfPZwy5EkzZXfFJWkRhjoktQIA12SGmGgS1IjDHRJaoSBLkmN\nMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgD\nXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAl\nqREGuiQ1wkCXpEYY6JLUiL6BnuQTSfYk+cqUthOSbE+ys3s8frRlSpL6GWSEfhVw/kFtlwM7qmot\nsKNblyQtor6BXlU3A48f1Lwe2NItbwE2DLkuSdIszXUOfVVV7e6WHwFWzdQxyaYkE0kmJicn5/hy\nkqR+5n1StKoKqENs31xV41U1PjY2Nt+XkyTNYK6B/miS1QDd457hlSRJmou5Bvo2YGO3vBHYOpxy\nJElzNchli1cDtwCnJHk4yaXAFcC5SXYC53TrkrRkXHzlLVx85S2LXcZQLe/XoareOsOms4dciyRp\nHvymqCQ1wkCXpEYY6JLUCANdWiJaPImn4TLQD+IvjdS+G+7cxZ3ffIJbH3ics674AjfcuWuxSxoK\nA13SEeWGO3fx/uvvYd/+ZwHY9cRe3n/9PU2EuoEu6Yjy4c99jb1P7/+htr1P7+fDn/vaIlU0PAa6\nRsopLB1u/ueJvbNqX0oMdElHlBcft2JW7UuJga6RafXEk5a2y847hRVHLfuhthVHLeOy805ZpIqG\nx0DXSLR84mkxeHAcng1nrOFDbz6No5f14m/NcSv40JtPY8MZaxa5svnrey8XaS4OdeKphV+chTTT\nwRFwX87RhjPWcPVt3wTgmne+dpGrGR5H6BqJlk88LbSWr8rQcBnoGomWTzwtNA+OGpSBrpFo+cTT\nQvPgqEEZ6BqJlk88LTQPjhqUJ0U1Mq2eeFpoBw6C77vubvbtf5Y1x63gsvNO8eA4Ty3+nzTQpzhw\nadi+/c9y1hVf8JdGhw0PjhqEgd7x0rDRMHykheMcesdLwyQtdQZ6x0vDJC11BnrHS8MkLXUGesdL\nw3S4u+adr/WchA7Jk6IdLw2TtNQZ6FN4aZikpcwpF0lqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQI\nA12SGmGgS1IjDHRJaoSBLkmNmFegJzk/ydeSfD3J5cMqSpI0e3O+l0uSZcBHgXOBh4Hbk2yrqvuG\nVdxi8B4ukpaq+YzQzwS+XlXfqKp9wD8B64dTliRptuZzt8U1wH9PWX8Y+JmDOyXZBGzqVp9MshT+\nptuJwGOLXURD3J/D474crqWyP39skE4jv31uVW0GNo/6dYYpyURVjS92Ha1wfw6P+3K4Wtuf85ly\n2QW8ZMr6SV2bJGkRzCfQbwfWJnlpkqOBS4BtwylLkjRbc55yqapnkrwb+BywDPhEVd07tMoW15Ka\nIloC3J/D474crqb2Z6pqsWuQJA2B3xSVpEYY6JLUiOYCPckfJbk3yd1J7krynGvj5/GzX5fkS0me\nSfKrw/q5rUiyv9vnX0ny6STHJLkxyXkH9Xtvkr9brDqlVjUV6EleC1wAvLKqXgGcww9/+Wm+vgm8\nHfjUEH9mS/ZW1bqqejmwD/ht4Gp6V0BNdUnXfkRJ8uQ0bX+S5A+65auSPNAdFO9K8jsLX+XSMsx9\nmuRfkhzX5/VuSnLYXrc+8i8WLbDVwGNV9RRAVT0GkOTVwF8DK4GngLOBtwBvAo4Bfhz4TFW9r+v/\nZNf/AmAvsL6qHq2qB7vtz0590STHAluB44GjgA9U1daRvtPD378DrwA+APxZkqOral+Sk4EXd9v1\nXJdV1XWLXURjBtqnVfXGhShmlJoaoQOfB16S5L+S/G2SX+iukb8GeE9VnU5v1L63678OuBg4Dbg4\nyYEvSq0Evtj1vxl4R5/X/T7wK1X1SuD1wF8kyVDf2RKSZDnwS8A9VfU4cFu3Dr3R+bXl5VXzkuQN\nSW7ppgA/neTYJC/q7n56Stfn6iTv6JbP7/p+OcmOxa3+8JTkwSQnJjk5yf1JPtZN334+yYopXS9M\ncluXMz+/aAVPo6lAr6ongVfRu3fMJL0gfyewu6pu7/p8t6qe6Z6yo6q+U1XfB+7jB/dL2Ad8tlu+\nAzi5z0sH+PMkdwP/Ru8+N6uG8qaWlhVJ7gIm6E1PfbxrnzrtckROt8zCh6dMD5w2XYckJ9L75HNO\nN4iYAH6vqr4DvBu4KsklwPFV9bEkY8DHgLd0g5QLF+atHDb67tNprAU+WlU/DTxB7xP9Acur6kzg\nvcAHh1zrvLQ25UJV7QduAm5Kcg/wrkN0f2rK8n5+sD+enjKCnNo+k18HxoBXVdXTSR4EXjDL0luw\nt6rWTdO+FfjLJK8EjqmqOxa4rqVkkOmB1wCnAv/ZfRA8GrgFoKq2J7mQ3q2tT5/S/+aqeqDr8/go\nCj+MzWUa64GquqtbPnhQd/0M7YuuqUDvPmo+W1U7u6Z1wP3A+UleXVW3J3khP5hyGZYXAXu6MH89\nA94Z7UhRVU8muRH4BI7OhyHA9qp663M2JM8Dfgr4X3rndB5e4NpacfBgb8U02wYZ7C2opqZcgGOB\nLUnu66Y/TgX+mN48+d8k+TKwnTmOnpO8OsnD9D6yXpnkwK0OPgmMd58IfhP46jzfR4uupjdiNNDn\n74vAWUl+AiDJyiQ/2W37XXqDmF8D/iHJUV3/1yV5adf/hEWoWQvgsDq6zFf3Uf5np9n0GL2PnVNd\n1f078NwLpiwfO2X5OuC6bvl2eneVPPh1HwOO+D91NHW/TbPtBnojyyPZMd2A4ICPzOWHVNVkkrcD\nVyd5ftf8ge5E/G8BZ1bV95LcTO+Kqw+m93cJru9G8Hvo/aWxFgxln7bCe7lIUiNam3KRpCNWU1Mu\nUmuS3Ao8/6Dmt1XVPYtRTwta3qdOuUhSI5xykaRGGOiS1AgDXZIaYaBLUiP+D+QhVrhzpEQtAAAA\nAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "plot_rates('network/V1_nodes.h5', 'network/V1_node_types.csv', 'output/spikes.h5', group_key='pop_name', smoothed=True)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "In our simulation_config.json in the reports section, we can see we also record the V_m (i.e membrane potential) of a select sample of cells. By default these files are written to an hdf5 file with the same name as the report (membrane_potential.h5), and we can use the analyzer to show the time course of some of these cells."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZEAAAEWCAYAAACnlKo3AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXWYXNXdxz9nZt03u5tNNrZJiHsIBEugeGlxh0JpX6xK\njfcF3valLdACbSmlghQtFGuBFgsSCARJAkmIu26yrrMyPnPeP869M3d2dEc20vt9nnl29t4755xr\nPxchpcSECRMmTJhIBpYDvQATJkyYMHHowmQiJkyYMGEiaZhMxIQJEyZMJA2TiZgwYcKEiaRhMhET\nJkyYMJE0TCZiwoQJEyaShslETPzHQwghhRBHHOh1HGoQQjwkhPhZgsd+IIS4NtNrMjH4MJmICROH\nIYQQtRpzzErTeNcIIT42bpNS3iilvCMd45s4dGEyERMmBoB0EWUTJg4XmEzExEEBIcQeIcTNQoh1\nQog+IcRjQohqIcQiIUSPEGKxEKLccPwxQohPhRBdQoi1QoiTDPs+EELcqe3vFUK8JoSoEEL8XQjR\nLYT4XAhR228JZwkhdgkh2oQQvxFCWLSxrhFCfCKE+L0Qoh34uRBivBDifSFEu3b834UQZf3O5Sfa\nudiEEC8IIfIM+78qhFijrf1TIcTMGNdFCiG+H2VtFiHET4UQe4UQLUKIvwkhSrWfLtX+dmnX4Fjt\nN98UQmwWQnQKId4WQozpN9eNQojt2tr+LBSmAA8Bx2pjdWnHPymEuFP7Xi6EeF0I0aqN/boQYmSC\nt9/EoQwppfkxPwf8A+wBlgPVwAigBVgNzAHygPeB27VjRwDtwFkoQeg07f8qbf8HwA5gPFAKbAK2\nAacCWcDfgCcMc0tgCTAEGK0de6227xrAC3xP+20+cIQ2Zy5QhSLY9/c7l8+AGm3MzcCN2r452rnN\nB6zA17Xjc6Ncl1hr+6Z2nuOAIuBl4GltX6322yzDWOdqx0/RzuWnwKf95nodKNPmagXONFyHj/ut\n7UngTu17BXAhUAAUA/8A/mU49gN93ebn8PqYmoiJgwl/lFI2SynrgY+AFVLKL6SUTuAVFAEG+Brw\nppTyTSmlX0r5LrASxVR0PCGl3CmltAGLgJ1SysVSSi+KwM0hFPdIKTuklHXA/cDlhn0NUso/Sim9\nUkqHlHKHlPJdKaVLStkK3Aec2G+8B6SUDVLKDuA1YLa2/XrgYSnlCimlT0r5FOACjolxXaKt7Urg\nPinlLillL3ArcFkMk9uNwK+llJu16/ArYLZRGwHullJ2aXMtMaw7JqSU7VLKl6SUdillD3AX4dfE\nxGEIk4mYOJjQbPjuiPB/kfZ9DHCxZnLp0swrJwDDkxhLxz7D970oLSLSPjQz2/NCiHohRDfwDFDZ\nb7wmw3d7v7X/uN/aR/Wbrz+ira1G+9+4LwulzUXCGOAPhnk7AIHS7OKtOyaEEAVCiIc101o3Sjsr\nE0JYE/m9iUMXJhMxcShiH8psU2b4FEop705hzFGG76OBBsP//Utd/0rbNkNKWYLSjESC8+wD7uq3\n9gIp5XNJrK0BxRiM+7wohhmpPPc+4IZ+c+dLKT9NYN3xyn3/GJgEzNeuyUJte6LXxcQhCpOJmDgU\n8QxwthDiDCGEVQiRJ4Q4KUVH7s2ac3gUcBPwQoxji4FewCaEGAHcPIB5/grcKISYrzmtC4UQXxFC\nFCextueAHwohxgohilDM7QXNVNUK+FH+Eh0PAbcKIaYBCCFKhRAXJ7juZmCkECInyv5ilIbXJYQY\nAtye4LgmDnGYTMTEIQcp5T6Uk/g2FLHchyLkqTzP/wZWAWuAN4DHYhz7C2AuYNOOfTnRSaSUK4Hr\ngD8BnShH9zVJru1x4GmU6Wg34EQFACCltKP8Ep9o5qtjpJSvAPcAz2smpw3AlxNc+vvARqBJCNEW\nYf/9qKCDNlSAxFsJjmviEIeQ0mxKZcLEwQohhAQmSCl3HOi1mDARCaYmYsKECRMmkobJREyYMGHC\nRNIwzVkmTJgwYSJpmJqICRMmTJhIGod9MbnKykpZW1t7oJdhwoQJE4cUVq1a1SalrIp33GHPRGpr\na1m5cuWBXoYJEyZMHFIQQuyNf5RpzjJhwoQJEynAZCImTJgwYSJpmEzEhAkTJkwkDZOJmDBhwoSJ\npGEyERMmTJgwkTRMJmLChAkTJpKGyURMmDBhwkTSMJmIiXC4euGf34QP74XBLosjJSy6BeqWp39s\njxNeuRE6Ewp/jw1XDzx7GSx/MPWxjGhcC6/9APz+9I679Dew5c30jtkf29+FJb/K3PhSwpv/DXUr\nMjfHYGHZn+GFr0Ff+4FeScowmYiJcLz+Q9jwEiy5C3a8N7hzb/oXrHgQHj8j/WMvuRPWPgd/mJn6\nWK/dBNsWwVu3gK0+9fF0PLwQVj0Bq59K35i2enj/Tnj+8vjHpoK/XwQf3gOtWzMz/vZ34bOH4fHT\nMzP+YEFKePs22PwaPH/FgV5NyjCZiIlwdNUFv/s9gzu3PYOSWU9T/GMShW1/8HsmrpGjI31jeZ3p\nGysReOyZGdfZlZlxBxt+X/C7bd+BW0eaYDIRE+Hwe4PfLdmDO3cmzWfpHFsazE0iE69RGluTyzSb\nxg4UDpfzML5fwnrg1pEmmEzERDhCmMggPyIZJRTpZCKGsTLCRNKIw6Xdw+FyHiFMJI3CwgHCQf70\nmzggkAZ1e7AJpFHVTzeMRChV5+yBvEYDxmFCfA+X8whhIgf7sxMfh/4ZmEg/jIR8sNVtmUkmYtBy\ntr+T2lghzC4D0mQ6JdTDxQx0uJyH8dmxmOYsE4cjQsxZg81EBsmclaoz3EgIMrHmtPpvDhMJ/rBh\nIoZnz9RETByW8BkJ7CDbbAfLnOXqTW0sI6M96InbIDOR127KzLiHCzM0zVkmDndI3yCH9erzSskX\ndWkMbQXe2tDItuYefYbgDncamUgmiPQhZM7y+vw89vHu4IbGtZmZaLDDzTMFMzrLxOGOPqchr2AQ\npewVuzt4f1Nj2sZr6XFy4zOrOf33S9WG/1RNJMMS/NPL93LH65syOgcAPm/8Yw4F+A+loIz4OPTP\nwETaIUI0kcEzITTaHOSK9EmbS7e1AZBliSDVu3vCtw0EmfaJpBWGe9iyOe2jt/W6wjdmQps9LDUR\nM8TXxGEIizwwUnZHn4dc0kco9rT1AXDOrBq1wXgu7hSzqkM0kUww2jQSFyPD+9e30jeuBqcnwjPi\n6Ez7PBlhTP3w6Y42vqjLwNqNOJCBKxmAyURMhMFqlPgyxEQ6+tzc+fom2g1SbEefizzcaZtjY4MN\nAEskTcTVndrgmWYiS3+bvrGM97B9V/rG1eDyRgiGsKfXtwWEXvPelrQPX9/l4IpHV3DVY5+lfewQ\n+MzoLBOHObLILBNZtbeTX7y2kUc/3s0jS4NE7c9LdqaVidR3OQDw60TeSOydtpTG9mfaJ5Kquc0I\noybidaRvXA2uSJpIJmqgGYnvx/enffidLcpP1uvKrO/F6zbcA5OJpB9CiJ8LIeqFEGu0z1mGfbcK\nIXYIIbYKITJQ5tUEgMVoQ8+AlH3D0yv595oGAB5euos7Xt9Ej1MRiHT6RHqcihjop+A3JjI6U9NE\nfN5B8Bul69obz9uXPiatw+72EXYN0llAUodx7RkoxthoSz+DjQSPy8hEDn1zVtaBXkAU/F5KGaLP\nCyGmApcB04AaYLEQYqKUmUxxNpFuJuL3S9p6QwnZYx/v5qPtrQChPpGGNVAzO+m5ejUmomsiLV19\nDNN3eh3gdUFWblJjS1/6HetSylBPiKsH8kpSH9ifWcna5vBgpd81yIQmYjyPDPhH6ruCUYkOt4/8\nnMwQeI/LQb7+j6mJDCrOBZ6XUrqklLuBHcDRB3hNhz96m9M6XIc9yEAmDC3i9e+dAMC25l5GlOWH\nmrO2Lkp6Hr9f0uvWmYjaJj39JM0UtBFplLzTRDC3NfcLO07Tte+xR4ieSiMiMhFHBsq2G5lIBiK1\nGrqCz0enPf0am44Qc1Zfa8bmGSwcrEzke0KIdUKIx4UQ5dq2EYCx+P5+bVsYhBDXCyFWCiFWtrYe\n+jdpMCGlZIe/JrihdUtax2/pVgRtXFUhd184g+kjSpk5shSAr84cjh2DZpCdH2mIhNDr9gbNWBoX\n6cwfA4BX5KgdKfhFvrBMD/6TpuS6W19eF7ohTUxkT2s/ZpnmqgA2hwdLfyaSid4fWXnB7xmobLCh\nPvg8dPRlkIkYzVnWQW61kAEcECYihFgshNgQ4XMu8CAwDpgNNAK/G+j4UspHpJTzpJTzqqqq0rz6\nwxsdfW58WFjkOwpHwYi0ayItPcpk8JuLZnLkmCEAXLdgHCdNquLrx9WSi4d1/rG4LXkpSWm6PwSC\n5iyfX9Iiy3ht8j1qR5KEzu+XdHiy2ekfrhhSGppdSSlZo2XrL81eoDam6drvalFM5PPiU9SGNGsJ\n3U4PWSiifqfnSiisyowm4vNAThGMPi7tIcQ+v2RHSy9H1SqZNZOaiE/TiJf6ZkBfW8bmGSwcEJ+I\nlPLURI4TQvwVeF37tx4YZdg9UttmIo3Y1+mgWtjp8RfgzKskP53dAIGWHqWJDC0OSpVnz6rh7Fk1\nOD0+8nHjIJe+7ApyUphbd9QDLNrQRGuPC+l14pLZiPwytSNJQrSjtZds6cYh1DpL00Dst7f0Bgjx\nfr9irvSkh4nUtatIL3t2hdpgb4fCirSMDUoTKdY0ET8WZF4ZIhOaiM8N1hwoGALtO9M69JItLXj9\nkpkjy/h8T2dqmoiU6hOlF4/PpQSp/QwF+0alVR3C+SIHnTlLCDHc8O/5wAbt+6vAZUKIXCHEWGAC\nkOGA7v887GrtpYQ+uinAlTc07fH4rRoTqSoOd2gLAfnCiUPm0pNVMWBJ3Mg4jJoIwDef/Bw8Tpxo\nRAiSYiJSSq56bAV5uHGSQ2/2wNcZCc+uqCPXoghxo7dIdZRMkybSYlOJlTarzjzTGzklJWRpTMSL\nBfLLMpMn4nNpTKQirY57KSXX/m0lAFOHq0CGziSYSJPNqfwq7/0S/jI/alCKXyu5s18MU0EZh7hf\n5GCMzrpXCDEbFTO4B7gBQEq5UQjxIrAJ8ALfMSOz0o8XV+zhAuGiRxbgyM2BzpVpHb+l20lJXhZ5\n2eGSl0UI8nHTSA492UXQm5ii6fb6Of6e92ntcfHglXP58ozhgcgsHevrbTDEgZNsHNZitTEJJrKl\nqYfmbhdVxX463Ln0ZA9Ji8awuq6T+WNKoBHsXoEsrUakgYl4fX46ehyQDTaLxkTSSOB1id1q1EQK\nqxEd6dUUAPC6ISsHCisVE5EyLWVDjAENp0wZCkCnfeCO+4se+hSH28cq331qg6MzKLAYYW/HKbNp\nsGjyck8TFA8LP+4QwUGniUgpr5JSzpBSzpRSniOlbDTsu0tKOV5KOUlKmXzojgkAuuxu1u+38fjH\nu/H5JS6vjy11inD3UECDr0S9CN70Rfe09LgYWpIXcZ9VCPJx4SCX7qzEifPutr6AhvPiShV7YXMo\nIpCTFXzEPU47TnJwWLWw2SSYSF2HkurHlFhwkaOYSG9qJj+H28fGhm5m1RSqdWJFFg1NiybSaHMG\nQpC7Rak2YXqYyJ62Pube8S5AwLHuxYq/qDrlaxIRPhdYc5UmIn1pc96fcb8q0PnXq+dRVpBDYY51\nwAmHHp+f/Z0O2o0aTBSTm7S300Exe12aMJNmv+Ng42DURA4p2OwesrMEBTmH1qX0+vwsvHcJ3ZrE\n/uxndfzg1AkUSuX0s5NHh9CkqN5mKBudlnlbelwMjWDKAlWepKYQnL25dGdVgMsGHkfcKC1jaOae\ndkXkuzXT1os3HMv25h5u/uc68Dpxylx8WCG3NCmJvKVb2bOz/U5cogSZVRFktEnmnOxo6cXnl0yv\nVszVTTaysBq698X5ZXzUddixar6WDosW6JgmU9AHW4OmzjvOmQTvgA8L/sLUr0lE+DxqvALdt9MB\n+eWxfxMHTk/QmDG1RgkXxXnZIabRRKALMSFo3w6jjgrbbHF00CmLaZGaZniIM5GDThM51PDNpz5n\n6v+9zU//tX7Q5rS7vVz56HL+uWp/0mM0dTsDDAQUIfvus19QKBSR9GQXKW0A0uoXaelxRmUiAFk+\nxTS6rQYGFgM2h4dvPPk5AF87ZjS72/rocXqwaeaIqcNLWDhRRejpfgyvXyozQxLEtKNPjWv1OXCL\nXGxZGkFL4RrtalPmlNoyJYi4ZRa+wqFpifra2GAjR6j7bKMIsvLTdj+NUveXJlYC4JcW/EWaaSbd\nxNGr+0TUXOlghru1Ip33XzqbEWVKWCnKyxqQJqJHdgX+t2pCz873Ix5vdXbSIYtx5GnnkaYAigMF\nk4mkiFV7lUnkmeV17O9MsTJsgli6rY1PdrTzk38kl5/Q0efm9XXBvh3XLRhLvuajmFWlHgmXpQCb\nVSOQaYrQklLS0h3dnIWU4LHjFnnYrIlFKC3fFSQkJ05U9uxtzb3YHB7ys63kZFkCTEtnIn4pg3b1\nAaLT7qY4LwvhdeIkN2FmFwv7O5UmNbxQXXs32fgLhqr1pZCZLaXk4Q93UZmvjSuzlO29Jz09Wxq0\nDO8/XDabbKGcyF4s+ArUfUg7cQw41rVrnobwWP3aj60sDGwrys0KBGb4/JKuCOG+9V0O9nXY2VBv\nY/xtb3L14yrGx4Ifq0/TjPd8HHHOXHcnnRTjEzmQV5YZ098gwmQiKaB/9dJlOzNQ6sGA1XWdrNrb\nyc7WoNRT3zXwej8/fnENdy9SSYQv3nAst501hQ2/OIN1Pz+du786FgC7KKArQCDT85B3O7y4vP7o\nmojPDdKPS+QmPPc+zUdx3uwaxlcpQvDm+kZW7u1kTEUBAEIIfvqVKVRm2bHJQnx+qaTZJKJiuuxu\nygtywOPAI3LSwmgbuhyUFWSTZ1GEy00W3oIqQKYUubO7rY/2PjfHj1VmGpffCsXDB0zcF61vpPaW\nN3j4w1Abf1O3gzmjyzh39ohANrkfC/5CjYmkmzjqjvUibfy+1DSqLrub67SorBHlQZNpcV6Qidz5\nxiZm//LdgKbh9vr5yT/Wcvzd77Pg3iW8sT6UIZeivZtF1YpZR2h+lufpokMWK2GmeJhpzvpPhp59\nffcFMyjNzw5oJZmA3y+5+KFlXPjgpyGq82WPLMPhHliQ2s7WvsD36SNKEEJgtQhK8rKxetTYTpFP\nj6UUEGmTKPVEw0jhvQB4FENwWfIMTCQ2odjf6aAoN4vfXzqbUUMU03js492s2dfFpGHFgeOuPWGs\nFrqsMZEkJfIOu4fy/Czw2HGJ3KDGlCTBfOjDnfx9RR3DSvIUkURpIl5dmk+BwGzXnpNxQ9T1dkkr\nFFcP+Ly/9ffVANz79taQ7Y1dTmpKNeKrFUf0kBVce5pzjFSeSK4i0AjoTk2jWrE76BOrKMwJfC/W\nzFlSSp74ZA8Ap973ITc8vZI/vb89xIz84Ac7OWJoEROGFgEwKk89467qOeqArr39zsFLob+HTlms\n4k+Lqk1z1n8ymjUn67DSPI4cU87nezIQG69hV5tyvgK88kV94KHd1+Hg+c/rAGWmMjoKI0FKGSDm\n88cOCQ8I0CQnp7UQD1aVfZwkIfvFaxs57b4PA5FSOlEzJhqGQGsU5RF52ESJKk4XhxDt73Qwoiwf\nIQTZVguTDYzju186wjB2H8LvpZtCJQGW1Chz0QAjz7rsbqoKVFipW+TSk1UGiKT8DE02Z0Aj3N3W\nB16NAJGNJ19nIslL21/UdZFlEVRq6/WiaSIDvJ/ZVvV7iyDwDLq9KhpppC7Be9TaneTgzatU9y7d\nErbPrRzr1mz1XKZoltMDMr590niEHiosJdN8m3E4XGxpCi3H//bGZh54fwdWi9Jsi3LVu3Pk6HKe\n+ubRvPrd47l2roqAs5VNVT/qx+i62tU16cCoiZjmrP9YNGuayLDSPObVlrOztS9yq9A0YN3+0DpP\nI8rzuefCGQAs3tyMlJKTf/cB5/35k5jj2BwenB4/P/vqVF644djwA1zqxXFa8hXBKKpOmhg88cke\ntrf08vbGJrw+P9/WJNqhJdE0EfVSuyy5+LBA4dC4L1hDl4OasiBTeuqbR7P4Ryey6ZdnMKE6yFB0\n00c3hSoHTI/LH6C03Gl3U52vwlmd5CjCXFiZlNT9yY6gTf/ieSPBrZhsj8zHk687XQc2rs8v+cYT\nn3HH65tYsbudOaPLyNWis9xkqfvp7g3c50SQbbVQnJeFxycDIc6LNjTi9vmZM1qLjtK0SIfMRQqL\nRuTTbc5yBWtNpcG302hzkpNl4eYzJgU3bvo339n1Hb7sWhS4P+/+cGHI72aOLOXaBeN44HLljD91\najU1ZfnMHFnGpBKlkbUXTVQHd4fmOrW0qmuSVThElfAs0hJ6M9Idc3BgMpEUoGsi1cV5HDNO2cbn\n3bmYLU0pds2LgD1tfVgELJigiMvw0jwuPWo0154wls/3dFLf5aDL7mFLU09MRtZocwZ+HxEaIXOJ\nAs3sU50UMTCa2Batb2RPe9CEpkfBhMGjjvGIvODccSTxRpuD4YbxqkvyOGJoUbiGVbcCgA1ynCYB\nakUmB8pE+jxU5Com4rbkaiaJ5Oza21t6ybYKVv/sNH5+9rRAVeFe8nHnazXfBqiJbG3qYcnWVh77\neDdf1HUxraY0UPHWIy1KE4GETSi9Li92t4+TJinN6Eu//YB/r6nnjXWNDC3ODSTn6VqUk2xVNTkF\n4SMq9DwRUJpkGjSR4aV5QS0EoG07ACfKz1hd18mIsnwmVBdzwdxgrdfJw5SP6eTJ1Xxyy8mcNrU6\nsK9UqnvYmDtWaWPdDSFzdnYov2luUZl6DouGqWuXYpO0AwmTiaSA5m4lyZQVZDNzRGlg+0MfpD9b\nd1+ng+Gl+Rw7XjErXXA5YUIlbq+fV9cGH9YPtkZ3xjbZgia4iHD1qGqp1mx8+kM+AGLg9flxenxs\nb1GS7oiyfJZub2PNPvWSvP69EyJmqwPgVkzEaSnA50ezF0cn8g63j067h5po52KETdmxdzFSSYAB\nTaQh6k/6o8vuptflZUShuvgucjWGNHBGu7utj4c+3ElBThZDCnPIsloCLXt7ZIEq7DiAyJ1PdrTx\noxfW8OG20Hs/eVgx+Nx4yUIiDOedGAHWBSVdeAF4+MNd7GjtZc7oMrKtGgnRNRFy1XNTPCwDPhGP\ncqxDWsZvtDnDhSntes+w7GbVno5A7sh9l8xm0U0LOGNaNVcdMybqmEU+9Zw3yTL1/PbTRHq61P0p\nKBkSqhEfws51k4kkic4+N402J9UluQghyLJaeODyOVQW5fLelhbc3vS2TN3XYWdkeT6nT1UP3Qna\nSz1/bAW5WRZe+DyYmLZka/T59TDkgEO0P1w9kFuM1SLw+QzagD+x8/nvf67jqLsW87dlyqH4vZOP\nwOeXvLtJvZy68zsiNCbituRpUlpsaXafdi7Do52LEd31UFCJW+QEfSKQECHa0dLLnF++w2WPLAdg\nbLHGRESeZpIYNmCN4YcvrAFghkH40CvfBvw2CUbu/PzVjVz56Ape/qKee97aQoGhmdKZ04eBz4NP\nZCH1MSFhoqUHj4wsy+fHpykTzabGbna19jFluKFhlsEn4k/RDBoN0utiQ7NWn6p4uIpc8yZfKLGu\nw86o8n7PoxYNVyb68Pa0hvjYpgwv4eGr5gUYSyTke7twyBzanFb1jBk0kU92tPHKMuUDyy/WmEiR\npsWkm+EOIg6tNOsDhNV1nVz71ErmjCqjweYk2yoCPgq9dDTAObNqyM+2ct3fVvLq2gYuOnJk2tZQ\n12Fn4cQqjhhaxNrbT6c0X9mG83OsHDe+giWa9nHSpCreWNfIG+saeeKao/jS5KEh4+xs7aMwx0p1\nNL+EuxdyirBIoWki1arEhL0diuKX1X9zQyNOj59/rtqP1SI4dWo1vLyetzc2U5SbRUlejEdOU+md\nFgMR7WuNWuX0TS28MoSYRUN3PZSOwNKDMkHllyvTSHd8TeTtjU102j2BekoTSxRhtVnKNHPWUOVz\n8fujVm41YtXeTtbs6+LkyUP54+VzgjscHXizClSEk08G7eUxIKXkyU/3hGw7c9ow8nOsONw+ygpy\nwOfBK7KUmWmAmogehDG0JJfvnTKBEeX5/OhFlZ90+dGGKgYBn0hOUMKOce+Sgd/rYtneXpb/awOP\nzdDMcr3NUDYq9g8joM/lpbXHxdiqwtAdhlI4R4gGjhs/sC7cVmcnXRSrUvIlNdC6LbDvpdX7KRZa\n8EhWEX7ZF2QiKQRQSCm57m+rKM5TUYqDDVMTSQCL1jfS0efmvS0tbG7sDnFy11aEPoQLJlRSXpDN\n/7y0jk0N6fGNuLw+WnpcAV+CzkB0nD4tWLztyvlBVfsbT37OM8tDQwyX72rniOriUDtwyGQGTUSX\nKCEhs0pbrwunJ6ix3HTKBCqLcpk3RjHaMPtzf2hlSHqsZUG7uvRHTSpbvLmZuaPLYkqGAXTVQckI\nBEIxKCESNoms3tvJkMIcjhk3hAcun0OZX91/m6UkKN37vQknLz7xyW4AfnfxLApzDUzV0YknR12r\nwLWPs75uRzCz+uxZSruaNaqMu86fwX06QfF78JGlzju3RGWtJyj56ppIlRZRd8Hckdx/6Wx+eOpE\nqo1Jo4HIshyD8JHeCrXC61YM1i8Nvp3k/CK633JcZT8mYu/EVzUNgNOruzhmXIQCirHQ10qPpURV\nAS4dBbZ9AdvzZ7s7KEZjItnFmlk18fcrGl5d28Dizc288kU9H25r5cTfLOGCv3zCxobB8bOYTCQG\nOvvcPPXpHj7aHk7EfvqVKeRYLVxyVKgUlJdt5a0fLMRqEfxt2Z6U5vf6/HxR18mkn74FhCZEGXGG\ngYksnFjJuKrCAKO5e9EW7G4vfr9kX4edLU09nD1zeMRxABXim1uMRYhQ80cCjtiVe5QU9+jV8/ji\nZ6fx/VMmAHDuHOWUjNvoRyPCdktR0CQCUV+wJpszJBckKvZ9rjo0lo3GIgyBMKUj1UseBw02J3NH\nl/H89cdyzqyaAFOzidJQk0QcQuDx+Zl++9u8vq6R/Gwr5YbcBADsHXhylHlLopuEYkfuNHariLY/\nXzGX+y4k/rtcAAAgAElEQVSZxSNXHcmV8/vVOfO5g5pIgHkmronkZVtCNMjz5ozgplMn9Ds5g0/E\nLwcU/baztZdbX14X2wQsJRbpwU0WlUW5UKo5uhO4f/2xod7GhQ8uAwgExATg6MBaMxNffgXfcD+P\n8A3AXOb3w/6V7M8ZR4fdo+rNeexg78Dt9VPf5eCUsXkgLHitBUkx9UgwVm34+uOfsbfdzuq6Lm58\nZhVeX3rN6pFgmrNi4OGlu3hIy9I9d3YNLo+fo8cOYfHmZi47ejTXLhgX8XfVJXmcM6uG5z/fx1G1\nQ7gwSbPWdX9bGTBTQXQ/xpDCHG5YOA6rRZCbZWXxD0/EYhF8vqeDix9axmtrG3h9XWOAGYa9OEa4\nuqGkBkufstQEfAfd8et06XWI5o8bQnFeUFs6e+ZwfvavDZxwRGW0nyo4OiCvFGHVpGbdTNFVB8Nn\nhRzq9vpp63WHSsORICW8dYsquHj8TYjl6wM91ymvjVrfyIjWHiezRxl8F/Y2sGRhF0WK2Jfq69wH\nw2ZEHee1tQ2BmkxPfCO8MB+OTry5qihfQBPzOtQ9ySsNP55gtN2w0jyyrZYQrTQAVy9uS74SCkAR\nt849Udfp8fmxOTzc8tJ69nfaGVocR4ME8DiRWFRuEVpABiREHH/52iY+3NbKlfPHMH1E5PPU83nc\nMpvygmwo1zTujt1xxzfi4Q938mstN+ea42qVuc8IRycUVGD90q3w5k+gfhWMOS6xwdt3gKOD3RWz\ng5oIQOcemgvykRIqrA7ILUZYLEo2EEJpIwmYVaNhf6eDWaPKQErW7rdxwZwRnDChktfWNtBp90RP\n7k0TTCYSA1/UKcm6rCCbX5wzLfDAffOEsXF/e9UxY/jnqv38+B9rGVdVGIynTxBOjy+EgUwZXsK0\nGGabW8+aEvhusagXft6YciZVF/Obt7cFwn7vPG969BcVlF+iajIIoQhZyUjlO0igk9ze9j4qCnNC\nGAhAWUEOH/zkpPgPs6MT8ocghGZKK9euc8eusEN1W/2weExk/+dQvxKO/wGU1CDEekX4QY3f06iS\nHHMiO/z9fklHn5uKQsPa+9qgoBJhESreoEITJuL00Hh6+V5K87N54YZjAmGiIXB04ClUzutAYAEo\nLTAKE2mKF7IN4OrBYSkMKjQV42HDyxEP3dLUzTl/+oSi3KxArxDdHBkTHge+rDxAe250Ih+DWelY\nr/U2V/c0yrOp5bX0kE8OQE6huj4DZCLPawEol84bxc/PmdbvHJxKc8gvhynnKCbSuDYxJtJVB2/8\nCIDmsrl01Lth+EwV5rv6KRpn/ByAEtEH+eXoLFlKiRgyLuIznij2dzqYWlPCL86ZRp/LyxjNxH7B\n3PT5ZGPBNGdFwaYlL2DZ+xGXHz2az247NVxiiYNZo8r44CcnUV6QzR/e2z7g+fW8irmjy/jzFXNZ\ndNOCcPNHHAghuOb42gADeeeHC/lapPDE+tWBHAVdElNmH63F55BxSsqKg/2dDkZXRCbGtZWFofb/\nSHB0Qn45ViEUwcsvUzWu2sKvXyBHJ154b71KcGT+jQAIDNahAPGP/gJ3Oz34JaHX3t4OhZXK5IdU\nRCd/SMR16tjd1scXdV187ZjRkRkIgL0joIlICQyJzkR1NHY5sIgYpWQAXN04rYWBXvNUHKF6cfSF\n+3A+361ML8b2sAlJsl4HPqu6F6rAZZUy1bTHfvallIGy63pBx2jnACr8Wc+ap2JC3PGN+P2729jd\n1sdNp0zgnotm9lu/Gx7VetAXVintoLAK3r8T/jgvECIe5STgha/Bno8gr5TsqvE0dTtxFY2A6Rci\nN7/G62tVqG+RvxfyyrBomp2UqPvRvjOphMNPd7Sxu62PUeUFVBblBhjIYMJkIhHg80usH9zJtdY3\nmTK8OKSx0UBQW1nItQvG8cHWVlbt7aChyxF8AeLMf9vLqrT8vRfN5CuxfBhxcP6cEQwrUWVZJlZH\n8B+074S/fgn+8XX1Irm6oaAilNhWTVQ+hTjY32lnZP+QyYFAYyIWC0GCVzUZWreGHdpk06oFxNNE\nGteo/hOajd5iEUGzTqWWVRzj3HRiOqTQoF31tQV6WgRuZ9XkiONIKfnN21u4641NAKpYYST4/eDs\nwqs51qWUUDkh7vrW19uorSwM5mtEgtOGw1IUvKaVk6KOq5vHjLBaEuge6HHg10qg+/0oM03VpIj3\nzoj9nQ48PqnNHaOYqNaAqod8w7MxSZ1DAsS32+kJCHOzRkXQdprXQ7PWibv2BPV31HwVrdi+HTa8\nFH3wjl1KYwG48HEmDS/F55dsb+6F2gUIRwcfrVhBjtVCvq9bPePaJfVLqc7D3ROWUxILNoeH7zy7\nmiseVUm0/5WAdSRTMJlIBFgtgpLRM5liqWPuAM1Q/fGN42sZWpzLhQ8u47i732f8bW/ynb+vjlhe\nWsfjH+9mdV0XJ06s4oihCTiOYyAv28qr3z2eR6+eF/mAfeohZOf7sOIh9b2gPChlAwydpswGESqS\n6vD6lOMwajZ6JPS1hdYW0pmI0MKLAYZOgZZNYXkqTd0JmLM698K6F2H0sYE2qgID4a+cCMIKzRuj\nD6Hdp3KjJmpvC2oi+lhDp0DL5jCCtq25lz8v2cnizSqEc3S0PBmXDaQfn66JgNJwimvU+UeAlJJV\nezuZPzZOBJGjE7u1xLDWyepvhHGbbE5GlOXz0reO5aVvHcc3jq/lxhPHxx4fwN2LL0tjIoF7N1UR\n5hhEXi+jApEZmA5voxKqGmRlUBAbOkWZX2NpCRr2aP66M6ZVB1oGhEA3i13/oTL3AUw0hPfWrVDP\n/5rnwnNT9Ot47fsw4dRA4vHGBhuyRkXIzbLsZMWs17HUr4T8skBXX79EXSeA5sj3WYffL7l/8Tbu\neH0TJ/5mCW9o7Rz+58zJGfd7xILJRKJg+OSjqRHtTC8bWJvM/ijIyeJ/vzIlZNsb6xv5yT/WBiVi\nA5weH394bztjKwv53SWzwvYng6EledFNYav/pv4OmwHv/kxbdAUWIYJ0e9gMQAYltQhYs68Lj08y\nfUQC4bYA6/8JvxkP902GTa+ql9PARAKEfvhMJQ32M+kYqwVERcNqleNy/A8Cm0KYY1au0iCa1kUd\nQm9CNcR4/fraoLAKoZv89HW6uqEz1EZvrLhcVpAdPVtfy0/w5WmOdf0CDJsBjZHXt77eRrfTy+xR\nZVHXj5Tg6MRhLSbwtJWMUAwqwnk3dTu1gqJDOHJMObefPS22D02HvRNPjsEUp6/d0Rkzgqpe6+cx\noixfSe5R4NjxCe2ymM1ydJBJ6cEWMe6fDj2o5OYzJkfWrHQmUmWoozXzUjj3LzDtfOVb++wR+NeN\n8MKVob/t1MLoNeYzakgB2VbB7jY7O+QoHDKH24tepXzTM+q4nuZAoIJEQrXmm2mO3dhuxe4O7l+8\nncc+3k2X3cPkYcV8ePNJfOuk8co0+cE9QbP0IOKgYyJCiBeEEGu0zx4hxBpte60QwmHY91BGF1Iz\nV/2tX5XyUOfMquFbJ43n7gtm8ML1x3DbWZNZvLmFB97bgdvr5+evbuTxj3cjpWRDvY1el5f/PWuK\nCmXMJFy9ULcM5n4dvnxvcHv+EBAGiXJE/GvxpyU7yLIIjh8fIQJr99LQKKi1z8NL/xX8/8Wr1Mvp\n6ISS4ViEgYjWaMl4DatDhqzvclATL+9El1CrJgY2CWHQRPTx61dHlZY7+/ppIl6XZvLTfSL6ONo1\n2h96jYwmGp8vhtmlSREQV6GKhgusccRcZbKJQBxeXl1PjtXCmdNimDvdveD3YreWBO+nEMHz7r8M\nmzN6SZxYcHTgCWhRiT83+zvtWIQqQLm+3hZWtkWHc/cyVvsnAkIlYgJUTwdLVkLv6Cc72pgyvIQj\ntOrXYejaqxz1xlbMWbkw50oYc7xKJl35uNq+/Z1Q7ae7AbILAsEPVotgRFk++zvt7OxwsUHWUu7S\njq+cBCfejKFosPpd+Vho+CLmOSzaoDSPmtI8/nnjsbz87eOCPpDPHoYPfgXLH4x7LdKNg46JSCkv\nlVLOllLOBl4CjGEkO/V9UsobM7qQEXPVA1q3LOWhhBD8z5mTuezo0cwfV8F1C8ZxwdwR/H7xNq74\n63Ke/HQPv3x9E7e9soFdmto9rn8mbbrRvhPev0N9n3QWjDSEnOqOdf3/4mEqLDTKtXB6fCzd1srV\nx9ZS7msLZRhSwlNnw9PnB7NyP3tE/Z1ydtA+r6N6OlaLMNi9p6gXdP/KkMP2tvfFdyJ27VPOXUNk\nkzCaoABGz1ehxW3bwn8PdNh1n4jGRPSEwsIKjSEZTDc5RWHXyNg0LDc7xuu252PILqS3+mjAQIhH\nHgVI2P9ZyOF9Li8vrtzHwolVlEbSxnYshle+FTAXOq3FwbUCjDxamfG0UiugGPe+TjvD4/mZIsHe\ngTdXmX4DDLB6hsqBqFse9Wfr622MHlLANcfVMq6qkP968nPa+xUQbem0McS5n6yaGYwoyw+aOnMK\nYNjMmOOD0hY3N3Yza2QUjapuOWxdBGURgk5AmUNBaVSFWtWGHYuD+3saVCi8QaAZWV7Avg47+zsd\nrPJrz/jQqfDdz2D8yaGOdYCR82DfZzFNf9uaezhyTDmf3noK82r7tXHQ34/t70T9faZw0DERHUKJ\nmJcAzx2QBeQUKuly94dpH1oIwT0XzuQrM4azUmtkdcPCcTz3WR3//U+lmkdLLEwbXrsp6AMZNl2V\n2NYrpBYMQSBCzW21C2H3R6qMRT/s67Djl5rD8unz1WfnErXTGP++5Q3l22jZAkffAJc+A99eDrd3\nwSVPq+s95vhgiC+ANUu9YHtVifttzT1c+9RKNtR3UxslEgxQ8+xaohIKDVABA8bz0pyou8Lvs8fn\nD/T7CNSj0rPnCypDGZI1SxGbfs9LY5eT0UMKOHZcBXeeFz2HhL2fQs1sLFZFGALjjj4GLNlh69va\n3IPd7eOSeVHCON+7A9Y+G2DYjqziULdS7QmADFxXgE93tuPxSWZEI7agmN3TF8DH98Nbt6qwWClV\nyZZAjotuLsxRTDrCtQXlM1iytZWTJ1dTVpDDXefNwOuXgZDfwJRb1mAVktETZykBw9/v/u37LKa/\nrr7LQafdEzkxVRdy7G3Ry6cMnaoEHoCzfqvMgTsWKwa8f6V6xotDtcEpw4vZ3NTDrrY+vrBq913P\nuYJAiG/gWo05TpVwiREFubfdzphIPrX374Kd76nvDasDlR8GCwctEwEWAM1SSmMM31jNlPWhEGJB\ntB8KIa4XQqwUQqxsbU2h7MKE05TKn4HOY9lWC3+8fA63nTWZ3186i1vPmsK9WthhTWkeuVnpqTcU\nFUY7cokWMXTZ32HGxVA0DIuln1B0xCkqQkZ3xBuwVisDc0SRJxjxs1FTIDe/Fjxwy+tKavP0BW3P\nFouS4KaeA9cvgdyi0KxygPEnK3+MrZ6XV9ezeHMzVovgtKkREut0vPszpV3oL78GS39NZMg49dn2\nVtgQxk6VAbOZHm1UWKU56Q2DTThNEQFDTk2DzUFtZSHPXX+MKoYYCb2t6vyOONXgcNWl7UIYc2zY\n+va26xprBPOM1xV09q56AlCaSAjzHDUfcoqVBK5h7X6llfSvtxaCzx9TBGvx7bD8L/DF0yq3wucO\naCIh8xxxGrRuDskX+aKuk/ve2cr/vrKBLIvgeyer5mG6qUl3guuoWvsgLplF6ZRTVDmekOfyVFXq\nfteSiMv1+vzc/A/1rBsrEQfQ1xboyhh4D/rDYlECz89tMO08dZ93LoFXblBhwftWhP127uhy3F4/\nz66oo7VyPpx0K5x5d3DIgE9Ew9gT1d8d74VNf+1Tn3Pry+tp7F+hoWk9/Pu78OkD6v9z/6xKzWx/\nRwUCLP1tRKEv3TggTEQIsVgIsSHC51zDYZcTqoU0AqM1M9ePgGeFEBG9uFLKR6SU86SU86qq4hcN\njIopZwMSNr6S/BgxYLEIrl84nvPnKGnyknmjWH7rKTx73TEZmS+A7kYV1TLjEvjWp0E1fMJpcOGj\nYLFozu1+BDIrL2Ko4yc72qgqzmVKtpadbM2B7YsVMX3rf1Tm7vE/gF0fBE0PFUeEjaMjxJwFMFlj\nBBtfprnbycjyfHbc9eVAJeMQuPuUjXzt8zB2oXp5DQgxQemYcrbSIHpDBY53Nirh4dlr5wc3rntB\nmceqJhHmn538FfV3/T8Dmxq6nNFL1detgG1vBxnz8JmBMUOWOOUcxRCbgo5X3eFfWRQhYGLVU4ow\nDp0W6H3uyioi5KyzcmDSmYrJa9ng9V0OKgpzKMmLEaygM1Frrvqsfko5nQF/tmICIT6nKV9Vfw3P\nzQ1Pr+KB93ewZl8X58yqCQR9VBblkJ9tZV+nIdS3bQdjmxbxmjyBIcNGh/rLQEnwBRUh19yIxZub\nWbarnXNm1TA+EsPVw2rHnwIn/DD6eRsx9TzlazIy9pJQTcQYjHDK9JFw0i3BkG0IFxYqxivTbT9a\n4/H5Wby5hec+U91LjzZG4i39rWLiXieceAvMukIlBy/+BTx+ujJXD6RsS5I4IExESnmqlHJ6hM+/\nAYQQWcAFwAuG37iklO3a91XATmBipPHThqFTYPhs5VBLsBR6qhhWmkdt/6Jw6caej9TfY78djAyJ\ngBBikFusiNm6F8OcvDtaepk8rBjLzsUqZPaU/1Max5+0sOKy0TD7CkXQdIe6nkgXAcIY4gtQeYTy\nDax8ghZbH9UlURzqHodK+vrryco8Meb4EDs1EOoM1zH7SrW21U+FbP5ou2Iqx+nlWqTmm5h2vjL5\n9We0pSNh3ElqHJ9HK83iiuyodnbDE2fCs5cEo32qZwTOK2Tc6Rcqgv35o4FNNrtbVcyIRPDXPqdM\nMBcGj3dklYUzz9lXKO1Sy15v73XFDuaQUkWfzb8R/nsXnPkrxdj+fgkAnWNUSGwIkS+vhTEnwKon\nwefF6/PTqvk8Lj5yZLBIJOq+jyzPZ58h7Fc37ywuOAuLRQQLg+qwZithaMsbYa1o7W4vd725mYrC\nHH5/6ezIz4zORE7+KRQkWGxx3Elw4WPwld/BwpvVtn7mLGOoe6SM/0B0lpGszLoM9i0PCTk3Npgr\nys1STcZAme82/Uu9bwUVSsizWGD25cEeOVf8Qwl+GcbBas46FdgipQyEQAghqoQQVu37OGACkHyt\ngERx3PegbStsiCzpHJJo3qi0hWEzox4Skdge+20VmbTi4ZDNjTanquvVtF6ZqY66TkV4Sb9yrJ75\na7XdaFqKZjqAYMZ6yNzfgY6dzOx4O3oZ+xeuCjr1S0crU0cEhBHTqknq2GV/Doba+iXbW3o5f45h\nne07lQanhZaGmd0Ajv2uIkyrngxk1Ueseda0Tl2f3FIlgZ52BxRXG8phGI4tGKII/ppnA6HOXQ4P\nJXnZ4eGqzRuVXXzmJVA9Vflp8sqw5QwjLM917ElKW1l6L3hdtPe6qYik2ejorlemq4ojILcIZl6m\ntvtcMPfreLVujGHzHPsdVRbki6dp73MjJdxx3nR+c3F4CPuoIQXsN2oiWniw0PwVlv4CBsD8G9S1\n/Oi3IZs/2NrKvg4Hv7pgRvSESZvGREoHUCJECJhxERx1rdJeTrkdpl8UcojFIpigmeemRChXFNA4\njW/Z3KtVcMaSXwU26S247zhvOm9+f0Ew8flNjXmd+Wu4eafyG4JaEyhtaeLpYUJUJnCwMpHLCHeo\nLwTWaSG//wRulFJm3oM07XwVDvnWLepFOBzQvkNJiDH6PITkQOiomaMiuT65H9p2wO6lyPtncJHz\nH6r8SOsWRZCz81TIcNkYuGFpMJ7/kqfh1F/AvP+KObdFEJ7ZP+VcGDGPbzqeYnS+J/xHHgfseFd9\nv2Ep/GBd8MUyjm2BcO4InPpzxSBeuwn8/kBo7tFjh8CH98LvpwfzaMar8hiBsvJGHHEq1C6A9++g\nvV5J0RE1Ed1vcuNH8J3lcPz3tXM35A8YceL/KG3klW+B10WX3RPMkVn2F0VU7B3B6LBp56u/lz8H\n170fmqlvvBin/1IxpsU/p8Pujl1aZ682tn5dc4vgnD8qzeTL9xpMcf3mmfRlpRW+eztd+5TPrNqY\nHCelIpwf/Y5RpdkhEW22pl24ZBYTxqkSNWGOdVBa7ZFfV9qOwfeyvt5GlkXwpUkxfDzd+1XgQkGc\n4qDRkFMIC34EheFFTR/7+lE8fs28iObBoGPdsLFgiBpry+sqbwpo0QSR2SPLVEkhr1uZFNc+q96v\nuV8PZRTFw+D7a5R/ZJBwUBZglFJeE2HbS6iQ38GFxQrnPwKPngpPfgUueiIicTpk0LJZ2XKnXRDz\nsDAHtI4v3wuPnKjMMKUjEF113JJVx849DvUCz9ZMMzMvVh8jhIATfhA2ZKS5w4izxYLjtLspe+IM\nvrH7x9DxjPJ/vPO/yrxQq8VZXPxUWMXfkCVEIvygEuNOvR3e/T946xZ2jfsRVXRy4u7fw2YtP0AP\n8dSKC4pImogQcPYf4JGTmLDocmaKG6gpWxg+X9s2xRT6ScA6EwmznpYMh7PvV+bAv1+M3/UtygtK\nFON7W/P77F6q8hwKKoPhqvnlWgLnhnANARTTm38jLP8L14ndbM35Ufgxbrsad+/HKmTaqMHOvTq4\ndosyQ4XNIwSc9xf468mMef0SjhI3MLTk+OD+htXw4T0A3Fj6Knb30Xiefpiswgo6tq7BSgVfO6YW\nQHOsRziRBT9RptbnLlfPQNVE9rT1MXpIQVB6b90Kf79IhZV/+R7lh+jYre5nAs3EBorRFQVRa8np\nRVLDGO5xNymn/avfBVc3Lf7TANUUDL8fHjkJWjRz18KblcDWHzFMxZnAQclEDjpUTYSr/wXPX6mi\nMWoXwITTlblAz/61GiQ4KQmIuyl/R/0f8Xsix/T7/tF9yv5/2i9innJY5JGOslFw2XPwzIXQsRPb\nqFN4cXcu19X/W+2PUQo9USipOXx7Y+EU7vZ8nz/bH1b+Fs1pzK4PtB9mK4k31tjG/Jf+OO77qnT5\n8r8wYetnPJPTSs3m/YpxXP1veO+XQWaF5qSP5CqrGA9X/Qv/U5fyau7P8L25GI68WmkHFqsWfvyh\nKj/STyMLc7gaMeMi5Sh9/Yfc7VvJ54Vfgnc0CfiEH6nqA/Y2Fc3Wz4wRMaBAxxm/QiK4fMWD2Lcs\ng49uUqGzhVWw9U14+7ZgbbBxX4qqRVpirb28Fr7+Gt4nL+Yfub/E9fa7MOcyVUTx0wcAASf/L8Pf\nv5PfZK+BneDDylh87Kw8idFa7opFiMj150pHqOjCF6+GvxwDE05jRv1oxhaPh9Ya5dP793eVNcHe\noY458hol9Wua5WAioiYCKlT8smeVsLDov/lq3kh6suZQYauGli7FQEYdAyfefEDWHQkmE0kUI+bC\nd1aozNC1LwRNG4ciqqeHxKxHQlhSnhGj58ONS2HFw2zJO467tmdx2tlXUNvxiSIyKSKiOQtlH37H\nfxRrz72IeQ3PwpbXYNblMGS8Uu8nnRW3ha8wllQJ3wln/AqGTqXwjf9juKUDjr8JTv4/9XJf8ULI\n4RYh8BE54MIzfA5fL/gjF7hf42vdyxRR+Pd3oHCoCnG2t8NX74+4BIjB6GZfASPmsfzBH3KC/T1Y\n7VChusd9T31WPg7jw+9BVM0SwGKl+8Q7+MbSah4Y8jYF7/UTMIqHq+xtgNroTDpiUIAR1dN4YtZz\n9H38IP/Tt0yZDnXMvBQW3sy+4tn85h8fsFmOplsW8NtJWzjurK8FDguL3DNi3Enw3ZWw7E/IDS/x\nbcdb4ACMlp0v3wtTz4V3fqai2KQ/ZnBJpiCimS0B8krgihdh86u0vXE/12W9SdZjhlD58x9UYekH\nCUwmMhDklSgVcuHNKhy0bZvqR+HsAp8mFQckQBEqDUbcnuD3wO8T+U78YxLQFiyxJFdQD/GX72Hz\nJ7uBTRRNOx2Kzo5+/AAQjVDoPUTKhtXCzF+p6CAdsy5NaOyIvp7+B8y9iqs+GcPInD7+fOqZUZ2T\nYSVUDHhk6S5WN/u56tLbYNZwFUnT8IXKS/DYlWQ++8qw3wmimDmMqJrI9zzf46r5Ndx2dI6yy+uR\nRQt/En2tMSpId9jdrJYTWXH8xYwc61Fl7e1aDsXkr6oIoO76sCik0LWjrT360hvsFt7Nu5Bbvv+g\n8sV07lHrH66itGpmncrqd6w0dDm4+6KZLJh3VcjvrdE0ER1FQ+G0X7Jxyo/4rz+9xm9PLmDBML8K\nCBEWmHOVEggu/Cuc+ye1hgNAkEPKnkQ7YOq53LViOM7sFp5b0K4Yednog4qBgMlEkkdRVVyp91BG\nRHt/BDR1u8i2CoYMsN9K7LkjMxG9AVPU6KxExib+eUkp2d5qZ+5Ro2NGtwRaCEfAiyv3sWBCZSAH\niOkXqk8c6Kb5WGt0uH04PD7KigpgaPR8m7C1xtgfqFZcmA1DRka2q8eJYLLE00RQXSKrivPUda0Y\nH6yYq8FqEbz23ROwe3wRK0In+lyu2NNJM0OYdMwpEK2MS1auCuM/AEjkWgG09LioLquGI786GMtK\nCgdrdJaJA4yQarcxsL/TTnVJXsBRmJ65I0v421t6qSjMoShec6uYY8c/r067B7vbx6gEeqNEWqfb\n66euwz7gbpb6+qKNG1yfVs9rAIw7nmbZ3qvXCEueQYfVg4qA5m5XXCGgvDAnakuBRJnItqYeKgpz\nGJpMHbBBQMSk0gho7nYx9ACWeU8EJhMxERGxTDVGrNtvY0YipcIHAGsUTWT9fhszR5bG7/cdA1Gd\n4Qbo4b0xW84SXbpv7nYiJYwcSG8VfX3a31gEX2+UNZBOl9G0Ox16X4+o/U4SmkP9jTVPS48zJaKY\nqHCzo7U380VMU4Butox1rbw+P+19roOWEeowmYiJiBAxTDU6XF4f+zvtkTsmpjh3f7u3zy/Z3daX\n8lyJECHdbBavJHo0/4qe51CTDBPpX1MpAjr7VxZOaNzYUm9dex9FuVmUx+rPEgfxtCifX9La42Jo\ncVTvWJsAACAASURBVPJEMRHhpqPPzdp9XcyrTTAD/QAgrk8EaOtViZmmJmLikETMaB4Nde2qeu/Y\nNJdpsUYI8W3pceL2+RmVgqSsIx4R0jvsxWMC0a5Rg8ZEhpcNnFhGTdgzYJ1W8HIgWkO8+7lmv43J\nw4pT0vKC/pzIE7X3ufDLVH1a8YWbT3a04fVLzpwWo0DnAYZIyPSnnkOTiZg4JBE1T8SATY2qhlbE\nEtspIFKIr06YB9R+N+LYCTDHDjtWi4jbFCzaNep2qIz68iSCDeKFyUopeXN9I7NGlVE9ADNHLJ+I\n2+tnU4ONI2tTawUdTxNp7FJEsSpFTSSeMWtrUw9Wi2DK8AS7bB4AxMyp0ZCoMHOgYTIRExERMylP\nw952ZUdPtyYSKWNdr6eUap+VeCG+To+Pvy3bQ01ZXvR6S4GxIjMku0eV3w70IBkA4jlcd7f1sbGh\nm/Nmx87z6Y+omfrAG+sb8PgkU1MkuvEI46c7VUOvmO184yBmno+G3W19jCrPD2aqH4SImw9EUBMZ\niLBwIBA3zEUIcSzwNVR/j+Go9J0NwBvAM1JKW4yfmzhEEc8RC0pSqijMid43PElEylhv6EqPVBYv\n1PVfX9Tj9Pj5yemTYhylEC0L3O7yYRGQmwQRiyfNr65TPT9OOGJgtZ5iCQW/fVt1dTwyQrXZgSCe\nFrVsVzuTqouTa7+rwZJAjHZr7yHgjA6Ys2JrItlWQcUAfF8HAjGfciHEIuBa4G3gTBQTmQr8FMgD\n/i2EOCfTizQx+EgklLLJ5kiJIESDRRBWH6nR5qA0Pzul8F6IU/4DePDDncwaWco5s+JL+tEUFbvb\nR2FOVkr+hWhr3NbcQ06WJXIzqhjQtab+RMvu9lLf5eD6heMYmUBIcyzEC/Hd3NjNzFhdExOAMiHG\nPqajz33QE14dsc6lyeZIe/h8JhDvjbxKStnWb1svsFr7/E4IkWT5SxMHM5QDM/YxjTbVICrdiBTi\n22hzxg25TQSxyrk4PT72ttu56LSJCTGAaCYiu9tLfhKmLAgW5oumNtS12xlVnh/X1BY2roHAG09N\nt7unaspSc6i/ka6JzeGhtcfF+KEDY37hc8SPruvocw8ocu1AwBK4CdHPpak7Pc98phFP3/6FECJm\nRbsITMbEYQBLvPIgqIc8E5pIJKm5yZaeuWIFDAzUkRnWQliD3e2jMEmNKZ5fYV+nPalcjmjjtmj9\nKtIRARTLFKe3862tSM1/Fi/Px++XdNkPfiYSzKmJfkyTzXnQ+0MgPhPZBvxWCLFHCHGvEGLOYCzK\nxIGHJY4D0+720mX3MDxSw6U0zA2hL1i6NJFYAnzDAPM7YmoiSfqJgklo4fu8Pj+7WvsYWzlwaT4a\n0dLrkQ1NIew2fI7wxetBGLWVqYZox/ZpdTs9+CWUpbEMTyYQrJEWeb+UMm3PfKYRk4lIKf8gpTwW\nOBFoBx4XQmwRQtwuhMhsa1oTBxTxfAc6UUglwzkarNpTqc8faDNbkjrDihUwUD/AMOJo4aZ9Lh+F\nuUmaswLWrPCRtzb34PD4mDVq4H6FaFVjW3uUJpJK2G3YHBGu78aGbrKtImVNJJ6G3GnXw6uTT5oc\nDMS6zwDtfW5cXn/KIe2DgYTCR6SUe6WU90gp5wCXA+cBmzO6MhMHFCJOFNPuNmWeSHd4rz43BHNF\ndGl5WGk6TC7Rpb9OrZxIzBaxBkTzr/S4PJF7nyc0qPoTSRPZ0tgDEOyzPQBEc3o3dzvJzbJQkpd6\nLdZY4clr9nUytaY05Ui+eAEfgUKSB7smot/nKKa5gWrFBxIJMREhRJYQ4mwhxN+BRcBWIHZrPBOH\nNOLlUwRqLUXp3JYKdKexPn2PU5XZj9RmdKCIlS/R4/RitYiE8zuiJfD1OL1JR5EFHK4Rxt3T3odF\nJKf9RfWJ9LgYWpKbUiRZcI7oprgdLX1Mqk7Nqa7PEcux3qUxkbKDXBOJ2U8EqE9TXtRgIOaTLoQ4\nDaV5nAV8BjwPXC+l7BuEtZk4gIglsQPs67BTXpCdFsIeaW4Ihvna3YqJFKQY3guxJdkep4ei3MRD\nc6Nlv/c4vRQnKdnHdk7bGZFkEl20cVu6U6tlZUQ0n4jPL+noc6XFSRyvdlZnX/LVAgYT8Xqv6Mm1\nI8vSL6SlG/GexluBT4EpUspzpJTPpoOBCCEuFkJsFEL4hRDz+u27VQixQwixVQhxhmH7kUKI9dq+\nB0Q6RCcTURGxz7kBdR32tNSxijY3BImR3Z18Bnh/xGYiAyP+kSK93F4/HX1uqpKMdopVxbfJ5kw6\nkCEagU+1qq4R0RLo9JpZyV4TI+IVBm3WTJ9DEjRJHijEy6nZ1dbHkMIcSg9yjQriO9ZPllI+KqXs\nFEKUCyFmCiHm6p8U5t2AMoctNW4UQkwFLgOmoZIb/yKE0CnHg8B1wATtc2YK85uIg1hJXVJKNjV0\np716r47AC6bZi3UmkmzEU/+xo5kQup3eAfkyIvlE9FIVyUbVxCIuTd1OhiUpzUcr+NfSk75+FdG0\nHd15n4554iWsb2roZkRZfkY05HQiXtn8HS09jMuAvzETSEjsEkLcAVwD7IJAU2kJnJzMpFLKzdq4\n/XedCzwvpXQBu4UQO4CjhRB7gBIp5XLtd39DOfcXJTO/ifiIFcXU7fTS3udmYhps3JEQ1ZyVJk0k\nGnPscXoGpolE8Bs1detBAElqDP0i03RIKVPKy4lUHdjp8dHj9KatREg0v0swAixNmkiM/Vuaeg7q\nwos6YtXO8vslmxt7OH/OiEFdU7JI9I25BBgvpXRncjHACGC54f/92jaP9r3/9ogQQlwPXA8wevTo\n9K/yPwBCEDWZNti0KTNOP92x3t+clWwCnxGxWtr2OL0D0iAidWDUExaT1USi2cq77B7cXn/SfoVI\nWoKeaJgO4g7G2lmh2wNMpCg9eT7R7p+Ukrp2O6dMGZryPJlGrHDo+i4HvS7vIcEMIfEqvhuAAZXe\nFEIsFkJsiPA5d+DLHBiklI9IKedJKedVVR2+fdAziViFChvTVAwxGgLESKNGDt2clQZNBGJoIq4B\naiKEm8aaEuyKGA0Bc1b/cbtT6y8fSUsIJBqmzZyl/vYnjK29iolUFqfup4hlZu20e3D7/Emb/AYT\nQd9X+L5tzSqUe9KwzGj66Uaib8yvgS+EEBsAl75RShm1+KKU8tQk1lMPjDL8P1LbVq9977/dRIYQ\nq/9Eg02PYc/MyxrURNT/fS7NsZ4un0gMTWRgPpFwjaHR5qQwx5p0nkg0v0LATJYsgYxQYbcl4KtI\nlzkrchXf1h4XRblZFOSkSZOMIt4cKqXTIXbtrJ2tvQAcUZUZn2O6kehdfQq4B1hP0CeSCbwKPCuE\nuA+oQTnQP5NS+oQQ3UKIY4AVwNXAHzO4jv94xIrO2t/pwGoRaSM+4XOrv/r8Do+PbKsgy5p6f4ho\nWeZen58uuyfhREM1VnhpmC67J6XIoGgO12ZbagTSEoFmtaSo3YTPEd2clS6TGTFqZx0qnQAhdu2s\n+k4HxXlZh0RkFiTOROxSygfSNakQ4nwUE6gC3hBCrJFSniGl3CiEeBHYBHiB70gpfdrPvg08CeSj\nHOqmUz2TiOGAXraznRkjSgdcSTbhqftlrDs9vrT1LImW29Fh17PVEydAapmhg/U4PRTnJv/yR8s1\n1H0tyda4iugT6XGRZRFpy6mIxgBbe1xUDeC6xkJQgg+HrlkdSppIxPbKNuchUe5ER6JM5CMhxK9R\nmoLRnLU6mUmllK8Ar0TZdxdwV4TtK4HpycxnYuCwRBHZvT4/GxtsfPP4sRmb29rvBXN508dEolXx\nbetRTKRyANVfIyVkdju8lOQnb7aJlmuxt72PEWX55GalVpPLeO77O1U/mHT1q4jGAFt7XUwZlh4n\ncawqzLpmlTatJ4OIlQ/U0OU4JAov6kj0ader9x5j2JZ0iK+Jgx/RfCL7Oh14fJIJGcoRAVViHYIh\nvk6Pn7zs9LQ6jVbvqr1PyUYD0kQilFDpdnpSSsKMZhLa025nTAolZiJ1HdzR0sv4ATa3igVLv4AI\nHa09LhZOSFcEWPQ8keZuF2UF2WnvtJkRxKgz1mhzMiuFFsKDjYSYiJTyS5leiImDC9FqTO1sUU6/\ncVWZS4Tq76B1uH3kJSmB90e06sTtvQMrvgiRW872OL0pJbpFk1DruxycPCn50NX+ocM+v2Rnay/H\nja9Iesz+CEaWBaHnoqRLO4jnWK/OkJ8u3YgWhadXPDgUIsx0xGuP+zUhRNRjhBDjhRAnpH9ZJg40\novXk3qFFjqRTgg2fO9Sk4/T60hbeG81w0/b/7Z17tGRVeeB/X917u2/TTdPQ3UDTDXRjI+Ghg9gC\njo/RYBRZCuoIkjA+Jiq6xCSOYyawjA9WHmKcxIlhxhE1ikpkMVFExyARlmhGF2qrLTQgAURDN21z\n6Qf3We9v/jjnVO06t+reunX2rlO77v6tVauqTlWd851H7X2+dxKGunopPhGZd9c9OVfJZM5qZxIq\nVmpMTJUyhVSnbfCPPTVNqVrntOPtaZTtTGbNHBF7mkgnX93+uJikD3TKB3p6Lqr9NegFJE0Wu9rX\nE4X2/gT4CTBB1Ft9O1GPkaeAq51KGMgFs7ugWVngF/smOW7tSo5a5e4iLzQc69H7YsWeJtLJsX5g\npszYiCxpAki7jep1Zbq8tDDh+euUeZnwv2l0XOz97rSQyoT/ya8PAXC2RbNJu2TDJEfEZkJjJ3PW\nk5NFTj3Wj27dhVSl6oTJYjSJuPx/2WbBf4yq/q2IXE/k+3gB8GxgjqiXyBtV9d/cixjIA/OO2AyI\nuf+JyZ76WSyFdFOquUrdSr8LWMicVWL96qWVRE/3oZ+t1FCFNT02pEpId5V8YonNsjqtE5r7/rN/\nO8zRR4yxPWPP8/nbaZ0ArWfF0z7Lu1qrMzFVshau7JpOZsvJWBMZ9NpfJov+M+MQ22/Hj8AyoZ19\nu1St8cjENBc9a5PTbadDfEuVGqus2tTnc2C6vCR/SLSu1gFttpTU+Mo24aUjkBpJgRbs5PWGOWuG\nZ2xcY6WPiEk6vyjRRGwWeWx3/n59cJZqXXtqHZwH0ub/BVFdOiCTSbTf2Al5CQwd7ezb+58uoQpb\nHDfKSYf42swToYMm8tRMeUmRWTDfPm+r2nB6oDyYdFxcQvhxu3VGRGt+/OCsk4ZiaS1qYqqECByT\nQXaTTprkw/vjLG/LmpUrOuXUJD4Rn8xZYRIJtKVdSOgTjgsvJqTt93OVmrUQ3075LwemS0vKEUnW\nZUbXFKt2anylB8pDs2UKkm1gMUOH63XlyamSk1yEtOwTUyXWr15hpdoAdC4F/3Bcb8pVZWnbNPS/\neXlG0SSSxa/Wb8IkEmhLuyihRvVeRzWzmtuOzVlGnoiNXiLQOVmtF3NWOqu/WIkiAVb20HnQJO38\nPzRb5qhVY5mSAk3N8qmZEtW6OsnsTss+MVVig6XILOjclOqJp4tsWLPCSn2uftApxHdoNREROU5E\nPisit8fvzxCRt7oVLZAn7coyPHE4W5nzbmmasxRVZaZUtdIaF9rXzpotV5mr1JZszkprNaVKpIlk\nNb2JtCbsTRWrrM04qDQrIzed3W4mkVbZJ6Yt1s2ic7LhxFSRjZ7kiIBhzkrVAZssVlgxWvAjYTKm\n21umzwN3EBVFBPhX4D0uBAoMBu18Ir95usjacTvVWBfedjPEd6pUpVpXjrFU36ldiG8j0XCJ5qy0\nVlOsRiNCVtNb2ieSpWd7gmmD/03GYo4LkfaJPGWz+CKdHesHZ8qZfEb9pl3gCsR5Rh6ZsqD7SWSD\nqt5CXMFXVatAbeGfBHxGmO8TOTBTYkMf6hKZPpHDM3aTr9qZsxr9LnrQRNLZ2UDP9a0S0n6FqWKF\nNRk1MbNwYeay8gtgyq6qdiv40tkceXiu4k3VW5P5Ib7Zaq/lQbeTyIyIrCeeOOOS7E87kyqQO+3a\ndx6Y7s/dnpnTkFTXtRfds4AmskSfSHqwL9nURAwZZ0o11mSoDBytM3quq/LkZJGCwIYMJes7Yfos\nJueqlGt1a9nq0frbm7Mm5yqs88mP0KGK71QpW7JqHnQ75b2XqILvM0Tk+0Ql3F/vTKpA7jQucsNm\ne3Cm7LRmVnrb9TocisNbj7YYIpp2zB6YXnrxxWhdrYO9K01ktlxltYUERogCAX4zWWTDmpXWIqZa\nt9O88ZiYtl9Vt13tLFXl8GzFK2d0M3CldV+mixWOtOT/6xfdFmD8qYj8B+A0Io3yIVWtOJUskCvt\n4tgPzZY5ZvUxzrdtZqwnORK2el60qwl2oMc8jGb9o6g0jC3Hejphb7pUy57AaJzP/ZMlZz03TNmT\nJEmrpdnb1M6aKdeo1tWrelPtNH2A6VLVWbM3VyzlyjwX2Br/5pxYbf2CE6kCuZN2/NXryqHZSl/M\nWWaI76HEnGWrcVKb6sRPTZc4cuXokgd/0yQhYoT4ZjZntZo5ZsvVzKVUGhnS8TG1ZR5st51kkJ+Y\nsputDu3zfBJtdd0qDx3rqX2ZLlZZY6nET7/oSloR+SLwDGAXTYe6AmESGVLS0VmTxQq1ulozKy3E\nSGrAGylI5uikhHY29Z5yRGi9uy8glOJkw6zFIs2BuF5XZsvZNZFCw3wS5SJsXe/GLGmWgmlU8LV4\nZ93Ose5j5dtOtbOmStXMQRT9pltpdwBnaLssn8BQks5Yt1F6o1tMn0hi67bXfW9+iOiBmdKS/SFg\nDMzx+2KlTkFgbCSbrOZAPBubyGz6RJ6ec+c/KIg0ch8mpkusGC1YK56ZrD99/g7PJpOIP5pIu9pZ\nqsp0KXs4d7/pVu/eDRzvUpDAYJHOWD9o2cHdzbZrqlZyJNLrnu9Y7y3qLD3RJjW+shY1NE1uSVHH\n1RnvThORqvU6k04nkebxSHqr2yzy2K52VmLy9EoTaeNYny0nVaCHcxLZADwgIneIyNeTR68bFZFL\nReR+EamLyA5j+e+IyE9E5L74+beNz+4WkYdEZFf86L3NW2BR0jbbfmoiI4WmOWuqWLE6ibTri/7U\n9NKLL8L8ibZYrWUueQKtMs7ERR1XZzZnRcJOFavU1V1ZjbRPxHa/83Yh2ocTc5ZH0VntfCLT8Q3D\nUPpEgA9b3u5u4HXAp1LLnwJerapPiMhZRFnym43Pr1DVnZZlCbQh7RPppyZiZqxPl6ocmTFHwiTt\nWK/XlYMzpZ5yJpKEzGR1pUrdSrkKcyCeaZSXz+hnoXV9rgaqQqHVJ5Kl33w70hFxAIcTx7pP5qz4\nuTWpND43nmki3Yb4ftfmRlX1QWCemquqPzPe3g+sEpGVqlqyuf3A4qQz1g9ajpJaCDPEd6pYtToQ\npUN8J4sV6trbANT0icTmrKqdScQciBuDftaM9VjYuUYui5vaq2aI78RUiXNOPtr6+qG1Wdqh2Qqr\nV4ywwtE+uaBdgdPkXA+lT0REzheRH4vItIiURaQmIpOOZfuPwE9TE8iNsSnrA7KAoVVErhSRnSKy\nc2JiwrGYw8k8n8h0mVVjI9Z6nS+87eYEZt8n0toXPTEh9JLg1YzOip6LFTvmrBafSGzOylqAMpnw\n5sp2EiI7byfSoqq1Ogdny1Yr+Ebrj57NO/jDs2WvtBCY32kSDHOWRc27H3R7xV8P/C7wMLAKeBvw\nPxf6gYjcKSK72zwuWWxjInIm8FHgHcbiK1T1TOBF8eONnX6vqjeo6g5V3bFx48ZFdy4wn3k+EYe5\nBZ22XVdlsmi/IJ2piTQH6aUPqs1j1HSsr7ShiRja0kw5dqxn7lESayLlJCHSzV174vieLlVRB76X\n9MQNkU/k6NV+DbwJrYU2I9/OUJqzAFT1EREZidvlfk5EfgZcs8D3X9aLQCKyBbgVeJOqPmqsb2/8\nPCUi/0CU/BjyVByRzlg/PFvpW/TLSNonYjlElDbOzCzRT8mAVqrWGbfiWJ/vE8kanZVMeHOWSrN0\nImkalUXDW3D9bfpwHJotW6to0C8K6fhwmpUT+nWzZotuz/CsiKwAdonIXwH7cNDQSkTWAd8ErlbV\n7xvLR4F1qvqUiIwBrwLutL39QJN0xvp0H5OgkglsulhB1a6NeF5dqlLv0U+NyrgNx3rNilnFlHEm\ng3wt64yf5ypuNZHEJ9KQ2/okEj2bvoTDsxU2r3PbbdM27RzrzTpxfmlV3V5Jb4y/+25gBjiRyGfR\nEyLyWhHZAzwf+KaI3BF/9G5gO/DBVCjvSuAOEbmXKGt+L/DpXrcfWJy0JjJd7N8kkoT4HnbQKjTt\nWG+Yi3owZ6WPUbFStzI4m5VwZ2P5svqikgnPVpHIhbZjaiJZkyTbrR9aJ5HJuUrmpl39pl0/kQMz\nZdasHHV2blyx6KggIiPAX6rqFUARuDbrRlX1ViKTVXr5nwN/3uFnz8263UD3pO39M+X+1fRJtp2U\naD/aohlNUsUNG+aiDJpIsrZStWZlADDzRKZLNVaMFDJHHiUTXuIDylrfa6HtJD4RsB9p1Ck01reI\npnYFTg/O9M/vaJNFr6TYB3JybM4KLBPSDsyZUtW6aaITibn4wEwUmGcz8iZdO6uRzJcpOsuuJmKG\nydooA5+sEwzHuuPoLFu+nHbrB7PUTI1yre5dN8B2ZjlfJ5Fuz/Avge/HWeozyUJV/RsnUgVyJ202\nmOqjOStxOjY1EYuTCOmGT1nMWa3HqFitOUg2zF58MVpn9NxwrLvyiRSaNaAguy8nTXriThL0vNNE\naNX0IbreNx3lVxl46H4SeTR+FIAj3YkTGBTMePxqrU6pWu/fJJKYsxw4GqPgLNOxXkUEVvUw+JvZ\n02AzT6Q12dCmJlLsU7LhtKPs6/TEnYTFejeJtOkncnCmzJknrM1Fnix0m7F+LYCIrI3e6pRTqQID\nQDNXw1WkTSdGGj6R2JxlsU9EunbWdKnG6hWjPRUJNE0rqhqF+FrKWG9oImU7ZsRCShOxIWc7xLk5\nK3rWtCbiW4JeajLUuAHbMQ5aFrum24z1HSJyH3AvcJ+I/FxEgqN7iDH7T0yVkiSo/kSNSHxVHpqt\nsGblqNVyFunaWVl8DqZppVyro2pncC60RGfVrJiEkkkycayvcNAaF5pVfKfLVVaMZg8ISNN0rEfP\nk75qIvFzI/qxFPWj70dZIdt0e4b/HniXqm5V1a3AVcDnnEkVyB3zTinRRPpVjqFgaAW2ExzTIb7T\npWrPg7Q50Ta6GtowZ6WSDbMWX4SmrMVyjRWjBWv9WeZvJw7xdeRDKxRafQlNn4jfmsihmWgy9NGx\n3u0VX1PVf0neqOr/A6puRAoMAuZdtquY/06MGJOI9ZLlqVLis+VaTyVPoLVIZanhsLYz4DeSDct2\nBuNG2RNLfptOJLLPOEpOTWsiiU9k7Sq/NBGM/xc0IxF76bCZNwseeRE5J375XRH5FPBlohu5NwB3\nuxUtkCemvX/aUiXZbjHdE7bNFKZNXUQyaSLSRhOxUfYkKR0CUUZ9r5OciekTcalRJnk406WaEx9a\nuuxJ0hrXVX8UV6QVwUarBQ/NWYud5b9Ovf+Q8Tq0yh1iTE3EdQ+KNCPGP8y2maKpPcCIRD6RY3vs\nAW5GChXj/uo2qhxHLWCbtnIbPpFmdFadDWtcayIwXao48aGl8ysm56qMFMS7ooWd20/brXrcDxY8\n8qr60n4JEhgszIz1JFzTdsz/YtsGBxnPLdE9wkypxuoNGX0iaCOJr5dQ4fnrjfqUJ6HVNu7oTe3O\npTkrysOpM1Ou99ToazHSvoSn5yqsHe8tui5PmuHh0XPS4te3ulnQZYhvXBjxTcBW8zeq+oduxArk\njZmx3m9zlqnq264Cmy6eOlOq9lxm3TxGRYuhs0npkNl4nXYc682D6iq8F+Lw5Fp0XE9eb7erIcyP\napos+lc3C+Zn3h+cqTA24p9GBd0nG/4TcA9wH1B3J05gUGiNznIT898JEYfmrJQZIUs5F1Nbs5l/\nIQL1ulFh2CNNJAlPnnLkWJ9XXbqPlRRsks68nyxWOGrVmHcaFXQ/iYyr6nudShIYKNLRWS5i/rvB\nnTkr6q8+W6llbvhUNx3rlmpn1ahb6XVirjPBZZXYJDx5rmynXMv8DURPSXdKV5OVa9K+neli1bsw\n5YRur/gvisjbRWSTiByTPJxKFsgVM3x1qlS1blbqFleOddUoUkm190G6OTBrw5xlzSeizTLwWSc5\naNVEXPUSgaQiQHQ8Vq2wv51C6k59xnLTsn6Rrp3Vz349tulW6jLwMeD9NDVJBU5xIVQgfwrN8TG+\nS8prEnHlE9FGL5Fe+5e79okkSZ427uj7pYkURChV61TramVCnb/+6NnM9O6XmdUmhZQmMlOqWons\ny4Nuj/5/Bbar6lMuhQkMDklmcF2jhK5+hfemsR03bw78zUz83v68pt9ozqImkq4/ZeMOtdWx7lYT\nmS27q8+VblHgr0+k+f+CqLWyjwEC0L056xFg1qUggcHCjILJU9W2XfbENCMkg3Svd/rmMWr6RCwl\nBqqpKVmYmIzXrn0iiRnOxSSSbpY2VepfszSbNBX9aD9K1brTgAeXdHv0Z4j6q38HKCULQ4jv8CJG\nFEyxUs+tHMPRlmsJmSW4s97pSxtNxMZA0PSJ2OmvnqwzwVUvkWg7TQ3PhTkroa5QrtYpV+us6VP+\nkk3SjvWSpV40edDt0f9a/AgsE0zbc7FSc9YJbzFsVzVtDPz15iDdax6GGcFWimtS2ShsaNafAkua\niDFvOM0TEWma9hzY+M1ghn5XUrCJpDSqUmXINRFVvVFEVgEnqepDWTcqIpcCHwZOB85V1Z3x8q3A\ng0CyjXtU9Z3xZ88FPg+sIspb+SM124IFrGKaDaI+Gflc4LYHItOxnjWJ0ry7n6vULMoqLT4b65qI\n4zyRBBeaSJ5JsLYxK0qXqm4LY7qk234irwZ2Ad+K358dt8rtld3A64DvtfnsUVU9O36801j+SeDt\nwKnx48IM2w8sQuPPWo8ij3xVtdOYVWBns0ZnNdZlV1tLwmRny1XGxwottcSyrDPBadmTllBinXZY\nAQAAGCRJREFUlz4R/yeRpFglJJqIn/+xbq+mDwPnAocBVHUXGcJ7VfXBpWg0IrIJWKuq98TaxxeA\n1/S6/cDiFFp8IsMziZj9KKaT6Kxe+4nE/56kiq8tba3Rk8NS8UVoBhSAW3OWOI4CSwd8gJ/mLGjt\nslmq1p36qlzS7dGvqOrTqZR8V+VPtonILuBp4E/jPiabgT3Gd/bEy9oiIlcCVwKcdNJJjsQcbkx7\nfzGHyJEPv/oMVji4M2vRROJBqFczlJmQaXOijdrjatTV0NJddv/KnjRfu/CJmMEMrvq49wuJzZb1\netQZ01dzVrdH/34R+T1gREROBf4Q+MFCPxCRO4Hj23z0flW9rcPP9hH5XQ7EPpCviciZXcrYQFVv\nAG4A2LFjR/Cb9EBjgKwr5WrdSrOlpfCWF2xzs2KjH8V0ucqKkd7LuZiRXkWLxygxc9jqagjp6Cy3\njvUE18mGU6Wkq6GfkwgSXYelqr3w8Dzo9uj/AVG2eomoMdUdwJ8t9ANVfdlShVHVUrwNVPUnIvIo\n8ExgL7DF+OqWeFnAEYmpJumTkZdj3TZmJv5sqZapW6MZYVOy2DEwaUplUxPJwyfixrFuhFWXE03S\nz0kkygeKnOrg9ry4pNvorFmiSeT9LoURkY3AQVWticgpRA70X6rqQRGZFJHzgR8SlaX/O5eyLHeS\nO8pG9rGnTr80ZlOq6E6/9wGopcd6tW6tu17UlCpy/NszZ/Wv7EljOw41Eds9XPIgMmc1NZGhdqyL\nyA4R+aqI/FRE7k0evW5URF4rInuA5wPfFJE74o9eDNwb+0T+EXinqh6MP3sX8Bmi7PlHgdt73X5g\ncZKhIPmj+ur0S9MyCFVqmcxF5oRUqtSstMaFZp7IXKVuz8/SxwKMCa5DfOfiKgG+TiKJY71USSYR\nP/9j3d7m3AT8MZb6iajqrcCtbZZ/BfhKh9/sBM7Kuu1AdyR3rnPDpomkiiZmGaTNfu1WHeuxT6RY\nqVkbIPutiYwUhLER+70xTBOizSoBeZDUSGuYszy9Uet2EplQ1Sx5IQHPSAZIm82WBgFzEMoclmtM\nSFYTMuOmVMVazYnW4FITSY7v+GjBSYMlM7rOZpWAPJDYsd6ou+bpjVq3k8iHROQzwF201s76qhOp\nArkzzyfi6V1SmoZfXaOggSzhoQUj0qtYqVm7w0/WO2dREzFxq4lEz67KmptlT+xWCeg/SQDFctFE\n/jPwW8AYTXOWAmESGVKS/6rNPhmDgBndU6zUWb86i0+ElnXZSzaMfSLlGuMOBkm3BRhjTcTR9dLi\nEym7mWT7hcSthH13rHc7iTxPVU9zKklgoEhrIr7andOYjvVSJZu5qNlzRSlarMJaEKFajwYXF4Ok\n2wKMbrdhlj0pVu0FHuRBUjvL9xDfbqX+gYic4VSSwEAhQ+sTiZ5tONYTTaRcraNq7xiJ4DR81W2e\nSHRUXGkIZtmTubLf5XiSpNJGdNaQm7POJ+on8hiRTyQy56k+25lkgVxJBoOmOcvPCzyNWZ04azkX\nMXwXYG9wNhs7ubD5uy4FDw4nETG0v0qNVR5fl0mIbyOhd8jNWaFi7jIjMUs0zVl+XuCdsKKJJNpa\nI5fGXk5H0jbVxYDfj4x1V3fVZsUB/wuDxiG+y0ETUdVfuxYkMFjM84l4eoGnadFEsvpEUpqIvWRD\ntxV3Rx2GxDais5xrItFxt1UlIA+iY+W/Y304RoaAdZJhZviis6Lncq1OXbOZENJZ/dZ8IsZrl/Wn\nXOA6OitdccBF9Fq/kDgfaLk41gPLjKHNWCft68mSsZ7SRBxkl/sWwurcsW4meFbcRK/1C0HiKEG/\ny574KXXAOU2fSJWC4KSERR6kfT1ZzFnzI9hcmLP8+ou6TjZMlz3x7fiYJL6vYrXGaEEYHfFzX/ys\noRxwjhl5ND424tQE0k9sOsPnrctie9wEm2ahD7zqjEYjJ1ckE6ArH1prgqe9lsR5IHEHy6g1rp8T\nCIRJJNCBZCCr1JQ1K/29wNOkw3Kz5YmkTH62NJGCqYnYGyTf+kJHjb4MqnFYWa8thxfDLDXjc0tZ\naNbOKuXQ9M0m/p6BgFPEcYRQXsxzhme4A0wad9n3iTRf+2auqdUj+74r00wjMKKq1OrquSbSrJ0V\nNJHA0OHKpJI3Np3h85z0lga0sUJzQPHt2Nfiynquwoib5y8yy/l2fEwKjdpZGiaRwPDR2n/C3ws8\njc1yLvMSMi1pDaMj/mqBiQIy4mgSafqhkr7k/l6bAo1+Ir6dZxN/z0DAKS09uT2+wNM0qhO7iM6y\npYkYpiBbCYz94vfOO5lnbzmKC8863sn6G36oiv+VFJI2yKWM5XfyJmgigbYUWnIV/L3A0yQa1qyF\nBMF0Lo0tTWSFMYn4Fva5bcNqvv7uFzpbf2LpS0yIPjvWiUv+R9FZHk+GeQsQGHx8TuhK03CsW9Ae\nzKx+EXtmv9EhyclxwfyIOH+vzbjqSeRY93gyzEVyEblURO4XkbqI7DCWXyEiu4xHXUTOjj+7W0Qe\nMj47Ng/Zlwuu6zflRUHSg1D22lmz5Si6xlYuzZhn2kc/mV8Y1N9jFZmzNJizemQ38DrgU+ZCVb0J\nuAlARJ4FfE1VdxlfuUJVd/ZNymWM6RMZKk0k5cewkmxosTUuNKsDuHJO+0zzmPsfnZXUzrLZWjkP\ncplEVPVBWLQQ3O8CN/dFoMA8WqKzPP6jprHZOz4xrZQqddYdYa+abKKJhElkPsl1OVOKzt8RHhdg\nbNFEgjnLCW8AvpxadmNsyvqALDADiciVIrJTRHZOTEy4lXJIGVpNJH6ei/0YKzKYjsyKwDYHgWQS\n8XmAdEVy/pKmXb4foyjENzjW2yIid4rI7jaPS7r47XnArKruNhZfoapnAi+KH2/s9HtVvUFVd6jq\njo0bN2bel+VIa8b6IN9rLJEkxDeuu5TFj2H+NMtklCYxZ612VDrEZwopTcRnc1ahUTsrZKy3RVVf\nluHnl5PSQlR1b/w8JSL/AJwLfCHDNgJdMkyaiGnOyjo5msEHK6z6RCK5Vq8cnuNuC2k41hNNxN+J\nNip7Erdp9vhGbeAkF5ECcBmGP0RERkVkQ/x6DHgVkXM+0AdclfXOA7N2Vta7WFMTsXkn2ZxE/B0g\nXdHQRGKfls83OBLniZSDOWvpiMhrRWQP8HzgmyJyh/Hxi4HHVfWXxrKVwB0ici+wC9gLfLpvAi9z\nhsqxXmgtcZ9pXS2aiL2/UjJ51JNG64EGDU2klERnDdx9cNcURBqtcX3ej7yis24Fbu3w2d3A+all\nM8Bz3UsWaIfPd3tpTMfssUeutLIusKuJnHb8kZy8/giueul2a+scFsTQRFZ53udGGI7yLUFfDiyK\nz3dJaZJBp1ipWzBnuSlSuWblKN/945daW98wYXbcXDtuL6w6D0SEouetcWEAfSKBwcPnng1pbPbq\naPWJDM8xGmSS3JxKTb2OzILo+mnUAAuTSGCYWbvK7zs+E9P4Mag+kUBnWvKXPA/4KIhYadOcN+HK\nDyyKzWzsvGmpCZZRezAnJJt5IoEFMA6674mGpk/Et5L/Jv5KHugbRw2TJmLRnGVOSD7H+fvEMBUG\nbTFnebwv4coPLMowTSItfVKy3sk6ylgPdMbU/nyPGhQjxDf4RAJDje93fJ3I6gxv7f4Y/kr9oLVZ\nmt/XpasQ8X6zLEN8K5UKe/bsoVgs5i3KkhkfH2fLli2MjbnXDv73fzqHn+952vl2+olNc4gZ4rti\nxO8BzResapI502IO9Ti6b1lOInv27OHII49k69atXiUrqSoHDhxgz549bNu2zfn2LjxrExeetcn5\ndvqJXZ9I83WIzuoTLefP34EX7F6LeeKv5BkoFousX7/eqwkEojvf9evXe6lBDQrmKc9qDhFCiG+/\nGaYWBa2BGf7uy7K98n2bQBJ8lXtQsGvOar72+U7SJ8zr3/sQX0cFPPuNv5IHAj3QmmwYMtZ9Y5iS\nDU3CJBJYMt/61rc47bTT2L59O9ddd13e4iwbxKIm0qrVhL9SPzBNiL77RIbFsR6u/Byo1WpcddVV\n3H777TzwwAN8+ctf5oEHHshbrGWBWHTMtoZo+jsI+IRNn1beJPtSkGY3Sx9ZltFZJtd+434eeGLS\n6jrPOGEtH3r1mR0//9GPfsT27ds55ZRTALj88su57bbbOOOMM6zKEZiPTZ9I0ET6T8skssLvY55c\nPysztmnOG7/Pgqfs3buXE088sfF+y5Yt7N27N0eJlg8tPpGMdmibWk2gO4Yx2dD3RNVlr4kspDEE\nhg+r5ixH/UQCnRmu2lmJJuL3teO39J6yefNmHn/88cb7PXv2sHnz5hwlWj64GoR8H9B8YbhqZ0XP\nvvvTwiSSA8973vN4+OGHeeyxxyiXy9x8881cfPHFeYu1LHDlmPX9btIXzPN3xAq/DSkNc5bn104u\n0ovIx0TkFyJyr4jcKiLrjM+uEZFHROQhEXmFsfy5InJf/NknxGNP1OjoKNdffz2veMUrOP3007ns\nsss488xgVusHZrVdm87wYcpZGGTMv/2wONZ912Lzmsq/DVyjqlUR+ShwDfAnInIGcDlwJnACcKeI\nPFNVa8AngbcDPwT+CbgQuD0X6S1w0UUXcdFFF+UtxrLDLE9is9SE7/2+fcT3wbdpzvJ8Msxjo6r6\nz6pajd/eA2yJX18C3KyqJVV9DHgEOFdENgFrVfUeVVXgC8Br+i54wHvGHGkivg9oPrLac3NWoon4\nXndtEKT/fZoaxWbgceOzPfGyzfHr9PK2iMiVIrJTRHZOTExYFjfgM+YfNjSS8psjx/2eRBKniO83\nIM7+RSJyp4jsbvO4xPjO+4EqcJPNbavqDaq6Q1V3bNy40eaqA54zahRfsuFWO3frMWw6ajzzegJL\nZ9Tzm4CmT8Tv/XA2lavqyxb6XETeArwKuCA2UQHsBU40vrYlXraXpsnLXB4ILAnb8Rg3vf086o3L\nNxDonuRKHA8hvktHRC4E/htwsarOGh99HbhcRFaKyDbgVOBHqroPmBSR8+OorDcBt/Vd8EAgxdhI\nwfs4f984+8R1nHjMqrzFyEzDsR40kZ64HlgJfDu+M7xHVd+pqveLyC3AA0RmrqviyCyAdwGfB1YR\n+VC8jcwKBAK9c8s7no/iv/Zn1s7ymVwmEVXdvsBnfwH8RZvlO4GzXMrVTz7+8Y/zmc98BhHhWc96\nFp/73OeYnZ3lDW94A7/61a/YunUrt9xyC0cffXTeogYCA4Xv0UwJw1I7y2/pPWXv3r184hOfYOfO\nnezevZtarcbNN9/MddddxwUXXMDDDz/MBRdcEPqMOOKWdzyfO97z4rzFCCxzRuIgD999Ip7HyFng\n9qvhN/fZXefxz4JXLjwBVKtV5ubmGBsbY3Z2lhNOOIGPfOQj3H333QC8+c1v5iUveQkf/ehH7coW\n4Nxtx+QtQiAwNBnrQRPJgc2bN/O+972Pk046iU2bNnHUUUfx8pe/nP3797Np0yYAjj/+ePbv35+z\npIFAwDW+Z6wHTWQRjcEFhw4d4rbbbuOxxx5j3bp1XHrppXzpS19q+Y6IeN2oJhAILEy1HgUHHOF5\n3TW/p0BPufPOO9m2bRsbN25kbGyM173udfzgBz/guOOOY9++fQDs27ePY489NmdJA4GAK8q1OhDM\nWYEeOOmkk7jnnnuYnZ1FVbnrrrs4/fTTufjii7nxxhsBuPHGG7nkkksWWVMgEPCVcjXKXgjmrMCS\nOe+883j961/POeecw+joKM95znO48sormZ6e5rLLLuOzn/0sJ598MrfcckveogYCAUecsC5KmDzq\nCL8rQIdJJCeuvfZarr322pZlK1eu5K677spJokAg0E/e9qJTWDFa4JyT/M4FC5NIIBAI5MDmdau4\n5pWn5y1GZvw2xgUCgUAgV5btJKKeVl71Ve5AIDCcLMtJZHx8nAMHDng3IKsqBw4cYHw89K8IBAKD\nwbL0iWzZsoU9e/bgY9fD8fFxtmzZsvgXA4FAoA8sy0lkbGyMbdu25S1GIBAIeM+yNGcFAoFAwA5h\nEgkEAoFAz4RJJBAIBAI9I75FKC0VEZkAft3jzzcAT1kUJ0+GZV+GZT8g7MugMiz7knU/TlbVjYt9\naegnkSyIyE5V3ZG3HDYYln0Zlv2AsC+DyrDsS7/2I5izAoFAINAzYRIJBAKBQM+ESWRhbshbAIsM\ny74My35A2JdBZVj2pS/7EXwigUAgEOiZoIkEAoFAoGfCJBIIBAKBngmTSBtE5EIReUhEHhGRq/OW\npxtE5Fcicp+I7BKRnfGyY0Tk2yLycPx8tPH9a+L9e0hEXpGf5CAify8iT4rIbmPZkmUXkefGx+AR\nEfmEiMiA7MuHRWRvfG52ichFg74vInKiiHxHRB4QkftF5I/i5d6dlwX2xavzIiLjIvIjEfl5vB/X\nxsvzPSeqGh7GAxgBHgVOAVYAPwfOyFuuLuT+FbAhteyvgKvj11cDH41fnxHv10pgW7y/IznK/mLg\nHGB3FtmBHwHnAwLcDrxyQPblw8D72nx3YPcF2AScE78+EvjXWF7vzssC++LVeYm3uSZ+PQb8MJYl\n13MSNJH5nAs8oqq/VNUycDNwSc4y9colwI3x6xuB1xjLb1bVkqo+BjxCtN+5oKrfAw6mFi9JdhHZ\nBKxV1Xs0+pd8wfhN3+iwL50Y2H1R1X2q+tP49RTwILAZD8/LAvvSiYHcF42Yjt+OxQ8l53MSJpH5\nbAYeN97vYeELblBQ4E4R+YmIXBkvO05V98WvfwMcF7/2YR+XKvvm+HV6+aDwByJyb2zuSswNXuyL\niGwFnkN05+v1eUntC3h2XkRkRER2AU8C31bV3M9JmESGhxeq6tnAK4GrROTF5ofxHYeX8dw+yx7z\nSSLz6NnAPuCv8xWne0RkDfAV4D2qOml+5tt5abMv3p0XVa3F//MtRFrFWanP+35OwiQyn73Aicb7\nLfGygUZV98bPTwK3Epmn9seqK/Hzk/HXfdjHpcq+N36dXp47qro//vPXgU/TNB0O9L6IyBjRoHuT\nqn41XuzleWm3L76eFwBVPQx8B7iQnM9JmETm82PgVBHZJiIrgMuBr+cs04KIyGoROTJ5Dbwc2E0k\n95vjr70ZuC1+/XXgchFZKSLbgFOJHG2DxJJkj9X5SRE5P440eZPxm1xJ/uAxryU6NzDA+xJv97PA\ng6r6N8ZH3p2XTvvi23kRkY0isi5+vQr4HeAX5H1O+hVZ4NMDuIgoguNR4P15y9OFvKcQRWH8HLg/\nkRlYD9wFPAzcCRxj/Ob98f49RA5RTCn5v0xkTqgQ2Wff2ovswA6igeBR4HriigwDsC9fBO4D7o3/\n2JsGfV+AFxKZRe4FdsWPi3w8Lwvsi1fnBXg28LNY3t3AB+PluZ6TUPYkEAgEAj0TzFmBQCAQ6Jkw\niQQCgUCgZ8IkEggEAoGeCZNIIBAIBHomTCKBQCAQ6JkwiQQCHRCRdSLyLuP9CSLyj4629RoR+aCF\n9fx3EfltGzIFAt0QQnwDgQ7EdZb+r6qetchXbWzrB8DFqvpUxvWcDHxaVV9uR7JAYGGCJhIIdOY6\n4Blxr4mPichWifuEiMhbRORrcf+GX4nIu0XkvSLyMxG5R0SOib/3DBH5VlwY819E5LfSGxGRZwKl\nZAIRkc+LyCfj9fxSRF4SFwh8UEQ+H39nJP7e7rgvxH8BUNVfA+tF5Pj+HKLAcmc0bwECgQHmauAs\njQreJZqJyVlEFWHHicps/4mqPkdEPk5USuJ/ADcA71TVh0XkPOB/AWlz0wuAn6aWHQ08H7iYKJv6\nBcDbgB+LyNlEfW82J1pSUg4j5qfx97/S224HAt0TJpFAoHe+o1F/iikReRr4Rrz8PuDZcdXYfw/8\nH6Nx3Mo269kETKSWfUNVVUTuA/ar6n0AInI/sBX4LnCKiPwd8E3gn43fPgmckHXnAoFuCJNIINA7\nJeN13XhfJ/pvFYDDiSazAHPAUR3Wba63sW5VPSQi/w54BfBO4DLg9+PvjMfrDAScE3wigUBnpoja\nqfaERj0rHhORSyGqJhsP/GkeBLYvZd0isgEoqOpXgD8lasmb8EyaFWkDAaeESSQQ6ICqHgC+Hzuv\nP9bjaq4A3ioiSYXldq2Wvwc8RwybVxdsBu6Ou9x9CbgGGn0ztgM7e5Q3EFgSIcQ3EBgARORvifwg\nd2Zcz2uBc1T1A3YkCwQWJmgigcBg8JfAERbWM4oHbV4Dw0PQRAKBQCDQM0ETCQQCgUDPhEkkEAgE\nAj0TJpFAIBAI9EyYRAKBQCDQM2ESCQQCgUDP/H/PN0hwXWcfMQAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from bmtk.analyzer import plot_potential\n",
+ "\n",
+ "plot_potential(cell_vars_h5='output/membrane_potential.h5', gids=[0, 80])"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 5. Current clamping the cells (Optional)\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.13"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/bmtk-vb/docs/tutorial/06_population_modeling.ipynb b/bmtk-vb/docs/tutorial/06_population_modeling.ipynb
new file mode 100644
index 0000000..bc03167
--- /dev/null
+++ b/bmtk-vb/docs/tutorial/06_population_modeling.ipynb
@@ -0,0 +1,558 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Chapter 6: Population Level Modeling (with PopNet)\n",
+ "\n",
+ "In this tutorial we will focus on modeling of populations and population firing rates. This is done with the PopNet simulator application of bmtk which uses [DiPDE](https://github.com/AllenInstitute/dipde) engine as a backend. We will first build our networks using the bmtk NetworkBuilder and save them into the SONATA data format. Then we will show how to simulate the firing rates over a given time-source.\n",
+ "\n",
+ "Requirements:\n",
+ "* BMTK\n",
+ "* DiPDE"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 1. Building the network\n",
+ "\n",
+ "\n",
+ "#### Converting existing networks\n",
+ "Like BioNet for biophysically detailed modeling, and PointNet with point-based networks, PopNet stores networks in the SONATA data format. PopNet supports simulating networks of individual cells at the population level. First thing you have to do is modify the node-types and edge-types of an existing network to use Population level models (rather than models of individual cells. \n",
+ "\n",
+ "---\n",
+ "**WARNING** - Converting a network of individual nodes into population of nodes is good for a quick and naive simulation, but for faster and more reliable results it's best to build a network from scratch (next section).\n",
+ "\n",
+ "---\n",
+ "\n",
+ "Here is the node-types csv file of a network set to work with BioNet "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
"
+ ],
+ "text/plain": [
+ " node_type_id ei morphology_file model_processing pop_name location \\\n",
+ "0 100 e NaN NaN Scnn1a L4 \n",
+ "1 101 i NaN NaN PV L4 \n",
+ "2 102 e NaN NaN LIF_exc VisL4 \n",
+ "3 103 i NaN NaN LIF_inh VisL4 \n",
+ "\n",
+ " model_template model_type dynamics_params \n",
+ "0 dipde:Internal population 472363762_pop.json \n",
+ "1 dipde:Internal population 472912177_pop.json \n",
+ "2 dipde:Internal population IntFire1_exc_pop.json \n",
+ "3 dipde:Internal population IntFire1_inh_pop.json "
+ ]
+ },
+ "execution_count": 2,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "pd.read_csv('sources/chapter06/converted_network/V1_node_types_popnet.csv', sep=' ')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Some things to note:\n",
+ "* **model_type** is now a population for all nodes, rather than individual biophysical/point types\n",
+ "* We have set **model_template** to dipde:Internal which will tell the simulator to use special DiPDE model types\n",
+ "* We are using new **dynamic_params** files with parameters that have been adjusted to appropiate range for DiPDE models.\n",
+ "* **morophology_file** and **model_processing**, which were used to set and processes individual cell morphologies, is no longer applicable.\n",
+ "\n",
+ "We must make similar adjustments to our edge_types.csv files. And finally when we run the simulation we must tell PopNet to cluster nodes together using the **group_by** property\n",
+ "\n",
+ "```python\n",
+ "network = popnet.PopNetwork.from_config(configure, group_by='node_type_id')\n",
+ "```\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "#### Building a network\n",
+ "\n",
+ "We will create a network of two populations, one population of excitatory cells and another of inhibitory cells. Then we will save the network into SONATA formated data files.\n",
+ "\n",
+ "The first step is to use the NetworkBuilder to instantiate a new network with two populations:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "from bmtk.builder import NetworkBuilder\n",
+ "\n",
+ "\n",
+ "net = NetworkBuilder('V1')\n",
+ "net.add_nodes(pop_name='excitatory', # name of specific population optional\n",
+ " ei='e', # Optional\n",
+ " location='VisL4', # Optional\n",
+ " model_type='population', # Required, indicates what types of cells are being model\n",
+ " model_template='dipde:Internal', # Required, instructs what DiPDE objects will be created\n",
+ " dynamics_params='exc_model.json' # Required, contains parameters used by DiPDE during initialization of object\n",
+ " )\n",
+ "\n",
+ "net.add_nodes(pop_name='inhibitory',\n",
+ " ei='i',\n",
+ " model_type='population',\n",
+ " model_template='dipde:Internal',\n",
+ " dynamics_params='inh_model.json')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Next we will create connections between the two populations:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "net.add_edges(source={'ei': 'e'}, target={'ei': 'i'},\n",
+ " syn_weight=0.005,\n",
+ " nsyns=20,\n",
+ " delay=0.002,\n",
+ " dynamics_params='ExcToInh.json')\n",
+ "\n",
+ "net.add_edges(source={'ei': 'i'}, target={'ei': 'e'},\n",
+ " syn_weight=-0.002,\n",
+ " nsyns=10,\n",
+ " delay=0.002,\n",
+ " dynamics_params='InhToExc.json')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "and finally we must build and save the network"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "net.build()\n",
+ "net.save_nodes(output_dir='network')\n",
+ "net.save_edges(output_dir='network')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "##### External Nodes\n",
+ "\n",
+ "The *dipde:Internal* nodes we created don't carry intrinsic firing rates, and instead we will use External Populations to drive the network activity. To do this we will create a separate network of 'virtual' populations, or alternativly use model_type=dipde:External, that connect to our excitatory population. \n",
+ "\n",
+ "Note: we could add 'virtual' populations directly to our V1 network. However creating them as a separate network provides a great advantage if/when we want to replace our external connections with a different model (Or if we want to remove the reccurrent connections and simulation with only feed-foward activity)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": [
+ "input_net = NetworkBuilder('LGN')\n",
+ "input_net.add_nodes(pop_name='tON',\n",
+ " ei='e',\n",
+ " model_type='virtual')\n",
+ "\n",
+ "input_net.add_edges(target=net.nodes(ei='e'),\n",
+ " syn_weight=0.0025,\n",
+ " nsyns=10,\n",
+ " delay=0.002,\n",
+ " dynamics_params='input_ExcToExc.json')\n",
+ "\n",
+ "input_net.build()\n",
+ "input_net.save_nodes(output_dir='network')\n",
+ "input_net.save_edges(output_dir='network')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 2. Setting up the PopNet environment\n",
+ "\n",
+ "Before running the simulation we need to set up our simulation environment, inlcuding setting up run-scripts, configuration parameters, and placing our parameter files in their appropiate location. The easiest way to do this is through the command-line:\n",
+ "\n",
+ "```bash\n",
+ "$ python -m bmtk.utils.sim_setup -n network --run-time 1500.0 popnet\n",
+ "```\n",
+ "\n",
+ "Which creates initial files to run a 1500 ms simulation using the network files found in our ./network directory.\n",
+ "\n",
+ "#### Inputs\n",
+ "\n",
+ "We next need to set the firing rates of the External Population. There are multiple ways to set this value which will be discussed later. The best way is to set the firing rates using a input-rates file for each External Population, we can fetch an existing one using the command:\n",
+ "\n",
+ "```bash\n",
+ " $ wget https://github.com/AllenInstitute/bmtk/raw/develop/docs/examples/pop_2pops/lgn_rates.csv\n",
+ "\n",
+ "```\n",
+ "\n",
+ "Then we must open the simulation_config.json file with a text editor and add the lgn_rates.csv file as a part of our inputs:\n",
+ "\n",
+ "```json\n",
+ " \"inputs\": {\n",
+ " \"LGN_pop_rates\": {\n",
+ " \"input_type\": \"csv\",\n",
+ " \"module\": \"pop_rates\",\n",
+ " \"rates\": \"${BASE_DIR}/lgn_rates.csv\",\n",
+ " \"node_set\": \"LGN\"\n",
+ " }\n",
+ " }\n",
+ "```"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 3. Running the simulation\n",
+ "\n",
+ "The call to sim_setup created a file run_pointnet.py which we can run directly in a command line:\n",
+ "```bash\n",
+ "$ python run_popnet.py config.json\n",
+ "```\n",
+ "\n",
+ "Or we can run it directly using the following python code:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "2018-09-20 09:43:43,783 [INFO] Created log file\n",
+ "2018-09-20 09:43:43,841 [INFO] Building cells.\n",
+ "2018-09-20 09:43:43,845 [INFO] Building recurrent connections\n",
+ "2018-09-20 09:43:43,852 [INFO] Build virtual cell stimulations for LGN_pop_rates\n",
+ "2018-09-20 09:43:43,867 [INFO] Network created.\n",
+ "running simulation...\n",
+ "done simulation.\n"
+ ]
+ }
+ ],
+ "source": [
+ "from bmtk.simulator import popnet\n",
+ "\n",
+ "configure = popnet.config.from_json('simulation_config.json')\n",
+ "configure.build_env()\n",
+ "network = popnet.PopNetwork.from_config(configure)\n",
+ "sim = popnet.PopSimulator.from_config(configure, network)\n",
+ "sim.run()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 4. Analyzing results\n",
+ "\n",
+ "As specified in the \"output\" section of simulation_config.json, the results will be written to ouput/spike_rates.csv. The BMTK analyzer includes code for ploting and analyzing the firing rates of our network:"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAEKCAYAAAARnO4WAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XmUXGW57/Hv09VzupMmAxmAkEFQwhDAgAfkqIGrogJH\nlPFyuSiyAI8MDgRyOKCiHCeW4lFEb5ayuAoX4SiHJVyVqx7mQU00kSHKIGMCJCFjTzU+949d1V1d\nPaTSqV1Vvffvs1avqtq1q/ZTTerph2e/+33N3RERkehrqHUAIiJSHUr4IiIxoYQvIhITSvgiIjGh\nhC8iEhNK+CIiMaGELyISE0r4IiIxoYQvIhITjbUOoNj06dN93rx5tQ5DRGTCWLVq1SZ3n1HOvnWV\n8OfNm8fKlStrHYaIyIRhZi+Vu69aOiIiMaGELyISE3XV0hERGY833niDzZs31zqMqpk6dSozZ87c\n5dcp4YvIhLd582b2339/EolErUMJXTab5ZlnnhlXwldLR0QiIQ7JHnbvcyrhi4hUwPr167n++ut5\n8cUXueyyy0bcZ6znioW1MJVaOiISCdfc/RRPr98+rtcumjOZL5x44LDtL730Et/85jdxdxYuXMia\nNWv46le/yuc+9zluueUWLr/8cjKZDHPnzuXkk09m3bp1PPzwwzz66KPccMMNLF26lFtvvZU33niD\n8847j+eff37gueOPP54vfvGLtLa2cuKJJ7J48WLOOussTjzxRMyMpUuXcuSRR3Laaadx++23Y2a7\n+ytSwueVP0IuA/seVetIRKTO3HjjjbS1tdHW1sYTTzzBcccdxwknnMDdd9/N2rVraW5u5rrrrgOC\n6h3gmGOOYfXq1Vx00UU8++yz9Pf3M3PmTH7yk59w+eWXDzx32WWX8eUvf5n58+dz6qmnsnjxYhYt\nWsTy5cvZtm0by5cvp6+vj3e/+90VSfYQ94TvDj/6b8H9q9+ERLx/HSIT2UgV+u7K5XKcffbZHHLI\nIQB85StfYfr06fT09ODuNDQM74oXb/vOd77DsmXLcHe+8IUvDHnO3QcSeeF2ypQpA7cdHR1cf/31\n3HLLLRX7PPHOcFtfHry/6W8ws/L/YERk4rrooou48sormT17Np2dnSSTSe644w7OO+88br31Vvr6\n+li2bBn77LMPJ510EgCzZs3iueee41vf+hZLly7l61//+sCImuLnLrjgAq6++mra29s588wzhx37\nox/9KLfddhsdHR0V+zwW1smB8ViyZIlXdWqFlx+Hm94f3P/wD+DQ4b90Eal/a9eu5YADDqh1GBXz\n9NNPc8011/CNb3yDfffdd9jzxZ/XzFa5+5Jy3jfeFX73hsH7216pXRwiIkUWLVrE7bffXvH3DTXh\nm9mLwA4gC2TK/StUNT3FCf/V2sUhIlIF1ajwl7r7piocZ9d1bwxuZx2shC8ikRfvC6/6tkDrFNhj\nHmxfV+toRKQO/eIXv+Chhx4atv2UU04Zcb+bb76Ze+65Z8hzy5YtG/E1IwnzvGrYFb4DvzWzLPC/\n3H1FyMfbNaluaO6EyXvD8/fVOhoRqUObN2/m5Zdf5qabbmLBggU0NDTwr//6r2zatImrrrqK1atX\ns2LFCjZv3jww7PK2227jvvvuY8899+SKK67ghRde4OGHH+bpp5/m2muv5ZOf/CSf/vSnmTx5Mocc\ncggXXHABBx54IGeffTZmxvz58znttNM455xz+P73v097e3tFPkvYCf8Yd19nZnsCvzGzv7r7g8U7\nmNn5wPkAc+fODTmcEskd0NIBk+cEyT+5A1o6qxuDiFTGr5bD60+M77WzDoYPfG3MXY4//nhOP/30\ngSGUTU1NXHvttdx111088MADQ/Z93/vexznnnMMpp5xCLpcDgguyFi1axFVXXcUNN9zAueeey9Kl\nSzn11FO54IILmDNnDsuXLyebzfKxj32MI444glmzZlUs2UPILR13X5e/3QD8J3DkCPuscPcl7r5k\nxoyyVumqnFQ3NE+CztnB4+2vVff4IjJhTJo0CRhsuRQukmptbSWZTI74mtIrZAuPiy+6Kii8XyKR\n4PDDD+eiiy7iwgsvrNwHIMQK38wmAQ3uviN//33Al8I63rikeqC5AzpnBY93vAYz9q9tTCIyPjup\n0Kvp3nvvZc2aNSxZsmTI1bVNTU1cffXVXHrppXzmM5/hzjvvZOnSpcNef8YZZ3D//fczf/78isYV\n2oVXZraAoKqH4A/L/3H3fxvrNVW/8OrGo2HqfHjvl+C7h8PJK2Dx6dU7vohURJQuvHrttddYtmwZ\nl1xyCUceOawpAtThhVfu/ndgcVjvXxGpHUFLpyO/kMCO9bWNR0Rib/bs2RWdP6dYvIdlJruDlk5L\nB7RMhh2v1zoiERmnepomJky78znjnfDTvdCcPwPeOSvo4YvIhNPU1ER/f3+tw6iK/v5+mpqaxvXa\n+M6l4w6ZfmhsCx53ztYoHZEJavr06QPz0cfB7Nmzx/W6+Cb8bCq4bWwJbjtnw0uP1i4eERm3rq4u\nurq6ah1G3YtvSyeT/9+/xtbgttDSyV8kISISNTFO+PkLJYor/Fwa+jbXLiYRkRDFOOGXVPiT8z0x\nnbgVkYiKccIvVPiFlo6mVxCRaItxwi9U+IWWTtH0CiIiERTjhF9S4XcUEr4uvhKRaIpxwi+p8Bub\noX26plcQkciKbcLPpUpO2kLQx1eFLyIRFcuE//NVr3LxTx4LHhQqfAhG6mipQxGJqFgm/DtWvoJl\nS3r4EKxtu/mFYNoFEZGIiV3Cz+WcNa9upYV0sKG4wp+6MFgFq3tDbYITEQlR7BL+pu4k/ekce7QE\nUyikrHnwyWlvCW43P1+DyEREwhW7hL9uax8Ab50WTC/6Wk9R+2baguD2TSV8EYme2CX89VuD0TkL\n9wgmCn1pW3bwySlzoaFRFb6IRFIME35Q4e/blQDgpW2ZwScTjUFbZ8PaWoQmIhKq+CX8bX1Mak4w\ntdlJeYIXNyeH7jDnMFj/Z43UEZHIiV3Cf7M7xYzOFiybJGNNAxX/gDmHQfcbsF1X3IpItMQu4W/p\nTTGlvRmyKbINzSMk/MOD2/V/qn5wIiIhil3C39qbZo/2Jsim8IYm1m8rWfh41sHBxVgvPFSbAEVE\nQhK7hL+lN8Ue7c2QzUCiiY07kiQzRSN1mlph/rvhmV+rjy8ikRK7hL+1N01XexPk0jQkgrH4r5dW\n+W/7EGx9CV75Qw0iFBEJR6wSfiqTozuZoautGbJpGpqCaRXWlfbxD/ootEyGR75dgyhFRMLRWOsA\nqmlbXzB/zh6TmmBjmsbGYFqFwsVYA1o64JjPwO+ugce+B+/4JDQ0QN8W2PRs8PPmc7DlRejfCv3b\nINUDuQzksuBZyOXAc1X+hCIyIU2aBhc+HPphYpXwt/amAOhqb4ZcmsbmQsLvG77z0RfDq3+Ee6+E\n+78eXJTV++bg8w2N0DUX2qZCaxdM3ivY1pAAS+RvDbAqfDIRmdBaJlflMLFK+IUKf3JrY9DSSTQz\no7Nl5ISfaILTb4Wn74IXHw6q9anzYfpbYfp+0LVv8EdARGSCCD1jmVkCWAmsc/cTwj7eWLqTwTQK\nna1NkE1Dook5U1qH9/ALGhrgoI8EPyIiE1w1TtpeCtTF5DSFhN/R0gi5NDQ0MqerbeQKX0QkYkJN\n+Ga2N/Ah4IdhHqdcPYWEn2/pkGjKJ/x+XGPuRSTiwq7wvw1cDtTFcJUd/UUVfjYNiWbmdLXRl84O\n9PdFRKIqtIRvZicAG9x91U72O9/MVprZyo0bN4YVDgA9yeCK2knNiYGWzl5dwZq2o/bxRUQiIswK\n/53ASWb2IvBT4Fgzu6V0J3df4e5L3H3JjBkzQgwHupNp2poSNCYahrR0YISx+CIiERNawnf3f3H3\nvd19HnAG8F/u/j/COl45upMZJrXkByYVtXRglLH4IiIREqupFbqTWTpb8wk/39KZNqmZ5sYGJXwR\nibyqXDnk7vcD91fjWGPp7k8HJ2xhoKVjZuzV1aYevohEXqwq/J5klkktwVq2hZYOwJyuVlX4IhJ5\nsUr4O5IZOlqCKZELLR2AOVPadNJWRCIvVgm/J5kZ7OHnWzoA+0xt540d/QMXZomIRFGsEn4wSicR\nrGSVG2zpHDB7Mu7w19e31zhCEZHwxCvh9+dbOrl8Jd8QVPgHzgmmJn1qvRK+iERXbBJ+MpMllc3R\n0ZII2jkwML3x7Cmt7NHexFPrlPBFJLpik/AL0yoMzJQJAy0dM+PAOVNY8+rWWoUnIhK6GCX8wkyZ\nTYMVfr6lA3DUwmn89fUdbNih0ToiEk2xSfiDM2UOb+kAvGu/YB6f+/8W7gRuIiK1EpuE35MKEv6k\nEVo6AAftNZl509r52cpXaxGeiEjoYpPw+1JBD7+9OTFiS8fMOOsd+/KHFzfz8LObahGiiEiodjqX\njpktAf4RmAP0AU8Cv3H3LSHHVlG9+YTf1tQ4YksH4Oyj9uXW37/EJT/9M185+WCOWjiNlsYGkpkc\nyUyWZDo3eD+TI5nO0T+wPctYi2aZjS9uG+OFY73leI8nItXX0pjgvYtmhn6cURO+mX0cuBh4AVgF\n/A1oBY4BrjCzJ4Gr3f3l0KOsgL500NJpa05AZniFD9DalOCmjx3BeT9eyYW3jLlui4hIxUzvaKlt\nwgfagXe6+4izipnZocB+wIRI+L3FLZ1kKthY1MMvWDCjg19f+i4eenYjL2zqIZXN0dKYoKWxIfhp\nKrrfmKClqWHgcaJh5A7ZWOvljrWS7tjL7I7xnlqeV2RCaWiozv+Sj5rw3f17AGa2j7u/Uvycmc1y\n99VhB1dJhR5+W3MCsvkrbRMjf/zmxgaOOyD8v7YiItVUzknbF8zsNjNrL9r2y7ACCstAwm9KDI7S\nKWnpiIhEWTkJ/wngIeBhM1uY3zbhTgn2prM0JYymRANkR2/piIhEVTkrXrm732hma4C7zewKxm49\n16W+VDao7qGopaMKX0Tio5yEbwDu/oiZHQfcAbwt1KhC0JfK0t5ctJ4tDCyAIiISB+VkvA8W7rj7\na2a2FDg6vJDC0ZvOBidsQS0dEYmlscbhf7bo/ki7PBhGQGHpS2XU0hGRWBvrpG1n0c9lJY87ww+t\nsnpT2WAMPqilIyKxNNY4/GsK983sw8WPJ6K+dDaYCx/U0hGRWCp38rQJNyqn1NBROoW5dNTSEZH4\niM1smUNbOoU1bdXSEZH4GOuk7RMMVvZvMbO/FJ4iGJt/SNjBVVJvKktbYVhmdvh8+CIiUTdWiXtC\n1aKogv50UYU/0MNXS0dE4mOshP+yjzXNI2BmtrN96oG701s8LHOgpaOELyLxMVYP/z4zu9jM5hZv\nNLNmMzvWzP43cE644VVGMpMj5xRdeJUGS8Ao0xmLiETRWBX+8cC5wG1mNh/YSrAASgL4f8C33f3P\no73YzFoJLs5qyR/nZ+7+hUoFviuGLG8IQUtH7RwRiZmxxuH3AzcCN5pZEzAd6HP3rWW+dxI41t27\n869/2Mx+5e6P73bUu6g3XZLwcxm1c0Qkdsoal+juaeC1XXnjfG+/O/+wKf9Tk35/ocJvLR6Hrwpf\nRGIm1Ca2mSXMbDWwgWDh89+HebzRDLZ0iq60VcIXkZgJNeG7e9bdDwX2Bo40s4NK9zGz881spZmt\n3LhxYyhx9KaCUTlq6YhInO004ZvZJDNryN/f38xOyvfky5bv+99HcCK49LkV7r7E3ZfMmDFjV962\nbIUe/pBROqOsZysiElXlVPgPAq1mthfB6JyzgZt39iIzm2FmXfn7bcB7gb+OP9Tx6y9ezxbyLR1d\nZSsi8VJOwjd37wU+Atzo7qcCB5bxutkEY/n/AvyRoId/z/hDHb/e0mGZaumISAyVtcShmR0FnAV8\nIr8tsbMXuftfgMN2I7aKUUtHRKS8Cv/TwL8A/+nuT5nZAoJ+/ITRN3DStniUjlo6IhIvOy1z3f0B\n4AEza88//jtwSdiBVVJfKgcwdC4dtXREJGbKGaVzlJk9Tf6Eq5ktNrMbQ4+sgnrTGZobG0g05Nfm\nVUtHRGKonJbOt4H3A28CuPsa4F1hBlVpfcWLn4BaOiISS2VdeOXur5RsyoYQS2h6U1nam4oSfi6t\nlo6IxE45fY1XzOxowPMXXF0KrA03rMrqS2dpHVLhZ9TSEZHYKafCvxD4FLAXsA44FPjnMIOqtGEt\nnVxaLR0RiZ1yyty3uvtZxRvM7J3AI+GEVHm9qQztTUUfNZtSS0dEYqecCv+7ZW6rW30ptXREREbN\nevmra48GZpjZZ4uemkwZV9rWk750ltmlJ23V0hGRmBmrzG0GOvL7dBZt3w6cEmZQldY70rBMtXRE\nJGbGWuKwcIXtze7+UhVjqri+VHZwHh3It3SU8EUkXsppZPea2XUEM2S2Fja6+7GhRVVhvans4LQK\nkG/pKOGLSLyUc9L2VoJpFeYD1wAvEkx3PCG4O33popaOu1o6IhJL5ST8ae7+IyDt7g+4+7nAhKnu\n+9P5idMKM2Xm8hcJq8IXkZgpp6WTzt++ZmYfAtYDU8MLqbKGr2eb/zgNGpYpIvFSTta71symAJ8j\nGH8/GfhMqFFVUO9IyxuChmWKSOyMmfDNLAHsl1+acBuwtCpRVVDfsNWugopfLR0RiZsxe/jungXO\nrFIsoegbtp6tWjoiEk/lZL1HzOwG4Hagp7DR3f8UWlQVpJaOiEignIR/aP72S0XbnAkyUqcvHbRw\nhixgDmrpiEjslLOm7YTr2xcrrGc7sIB5Lt/DV0tHRGKmrBWvJrJhwzIHWjqq8EUkXiKf8AujdFqb\nSls66uGLSLxEP+GXjtJRD19EYmqnjWwz+8gIm7cBT7j7hsqHVFnDRunkVOGLSDyVc+byE8BRwH35\nx+8BVgHzzexL7v6TkGKriL50lpbGBhoaLNigYZkiElPlJPxG4AB3fwPAzGYCPwbeATwI1HfCH7b4\niVo6IhJP5fTw9ykk+7wN+W2bGZxYrW4Fq12VLGAOmh5ZRGKnnAr/fjO7B/iP/OOP5rdNAraO9iIz\n24fg/wRmElyotcLd/303491lfekMrU1Ff9fU0hGRmCon4X+KIMm/M//4x8DP3d0ZezK1DPA5d/+T\nmXUCq8zsN+7+9G5FvIv6hlX4mjxNROKpnCttHfhZ/qds7v4a8Fr+/g4zWwvsBVQ14fcOW89WFb6I\nxNNOe/hm9hEze9bMtpnZdjPbYWbbd+UgZjYPOAz4/fjCHL/+dMl6trrSVkRiqpyTtt8ATnL3Ke4+\n2d073X1yuQcwsw7g58Cn3X3YHwozO9/MVprZyo0bN5YfeZl6Rx2lowpfROKlnIT/hruvHc+bm1kT\nQbK/1d3vHGkfd1/h7kvcfcmMGTPGc5gxDWvp5DQsU0TiqZyTtivN7HbgLiBZ2DhaAi8wMwN+BKx1\n92/tVpS7YfSWjip8EYmXchL+ZKAXeF/RNgfGTPgEo3rOBp4ws9X5bVe6+y93OcrdMGpLR+PwRSRm\nyhml8/HxvLG7PwzYeF5bKbmc05fO0lZ64ZUloCHy88aJiAwxasI3s8vd/Rtm9l2Cin4Id78k1Mgq\nIJkpLH5SUuGrnSMiMTRWhV84UbuyGoGEobD4ydAevhK+iMTTqAnf3e82swRwsLtfVsWYKmZgauTS\nC680QkdEYmjMRra7ZxmcUmHC6U+XLH4CSvgiElvljNJZbWa/IJg8raewcWfDMuvBsMVPIN/SUcIX\nkfgpJ+G3Am8CxxZtK2dYZs2N2NLJqYcvIvEU2rDMejDY0ikZlqmELyIxFOlhmaO2dBrK+R8bEZFo\nGSvzFaYxnvDDMoeftFWFLyLxM1bCPx24B+iqxUpVlVBo6bTpwisRkTGHZb7dzOYA55rZHmY2tfin\nWgHuDo3SEREZNFaF/wPgd8ACYBVD58Xx/Pa6NnLCT0FLZ40iEhGpnVErfHf/jrsfANzk7gvcfX7R\nT90newhaOq1NDTQ0FP2tUoUvIjG10ykj3f2T1QgkDL2pkrnwQVfaikhsRXqO4GAu/JKulS68EpGY\ninTC70+XLG8IGqUjIrEV6YTfm8qM3NLRhVciEkORTvh9I1b4uvBKROIp2gm/dD1bgGxGCV9EYinS\nCV+jdEREBkU/4RdX+O5K+CISWxFP+Bk6WopO0OYygEOipWYxiYjUSqQTfk+yZBx+pj+4bVTCF5H4\niWzCT2VypLI5OlqKWjqZVHDb2FqboEREaiiyCb8wF/6klpEqfI3SEZH4iWzC706OlfBV4YtI/EQ2\n4fckg6mRJw3p4SeDW/XwRSSGIpvwByv84rnwCwlfFb6IxE9kE36hhz9kWGahwteVtiISQ6ElfDO7\nycw2mNmTYR1jLD3JwgLm6uGLiEC4Ff7NwPEhvv+YuvM9/KEVvoZlikh8hZbw3f1BYHNY778zg8My\ni8fh68IrEYmvyPbwRx6WqVE6IhJfNU/4Zna+ma00s5UbN26s2Pv2JDMkGoyWxqKPqApfRGKs5gnf\n3Ve4+xJ3XzJjxoyKvW9PMsuk5gRmNrhRwzJFJMZqnvDD0pPMDG3ngFo6IhJrYQ7LvA14DHirmb1q\nZp8I61gj6UmNlPDzLR1NjywiMRTaat7ufmZY712O7mRWFb6ISJHItnR6kxkmla5nm0kG1X1xX19E\nJCYim/C7R+vhq7oXkZiKbMLvKV3eEIIevhK+iMRUZBN+d3+GztaREn5bbQISEamxSCZ8d2d7f4bJ\nrU1Dn0j1QHN7bYISEamxSCb8nlSWbM6Z3FZS4ad6oEkJX0TiKZIJf3tfGmB4hZ/uheZJNYhIRKT2\nIpnwt+UT/pS2kVo6SvgiEk+RTPgDFX5pwk/3qqUjIrEVzYTfH0yNPPJJW1X4IhJP0Uz4o7Z01MMX\nkfiKZMLfNtDSKRmlk9YoHRGJr0gm/O39QcIftp5tLqNx+CISW9FM+H3BtAqNiaKPl+oObps7ahOU\niEiNRTLhb+tLD+/fp3uDW7V0RCSmIpvwh82jk8onfJ20FZGYimTC39KbYuqk5qEb0z3BrSp8EYmp\nSCb8N7uTTOsomQa5f3tw2zq5+gGJiNSBiCb8FNNKK/z+rcFta1f1AxIRqQORS/j96Sw7khmmd5Qk\n/L58wm9TwheReIpcwt/ckwIY3tLp2xLcqsIXkZiKXMJ/szuf8Edq6TQ0apSOiMRW5BL+pu4kMFKF\nvzWo7s1qEJWISO1FLuGv39YHwJyu1qFP9G6C9mk1iEhEpD5ELuG/uqWPxgZjz86ShL/jDeicWZug\nRETqQCQT/pyuNhINJa2b7tehc3ZtghIRqQORS/jrtvSy9x5tQze6w47XoUMVvojEV+QS/sub+4Yn\n/O4NkE3BlL1rE5SISB2IVMLf1J1kU3eS/Wd2Dn3izWeD2+n7VT8oEZE6EWrCN7PjzexvZvacmS0P\n81gAT60P5ss5YHbJfDkb/xrcTlPCF5H4Ci3hm1kC+B7wAWARcKaZLQrreACPPr+JpoSxeJ+Sq2lf\negw6ZqmlIyKxFmaFfyTwnLv/3d1TwE+BfwrrYP3pLHevXs8/LJg2dGnD7o3wzL3wluN00ZWIxFrj\nzncZt72AV4oevwq8o9IHcXeev/btJLJ93JLLMXtLM1wPeBY8F1xh6zk46lOVPrSIyIQSZsIvi5md\nD5wPMHfu3PG8nq2TFtBIhlldk2ib0g4NCbBEUNE3tcPi02HmgZUOXURkQgkz4a8D9il6vHd+2xDu\nvgJYAbBkyRIfz4GWfPZn43mZiEishNnD/yOwn5nNN7Nm4AzgFyEeT0RExhBahe/uGTO7CLgXSAA3\nuftTYR1PRETGFmoP391/CfwyzGOIiEh5InWlrYiIjE4JX0QkJpTwRURiQglfRCQmlPBFRGLC3Md1\nrVMozGwj8NI4Xz4d2FTBcCqt3uMDxVgJ9R4f1H+M9R4f1FeM+7r7jHJ2rKuEvzvMbKW7L6l1HKOp\n9/hAMVZCvccH9R9jvccHEyPGkailIyISE0r4IiIxEaWEv6LWAexEvccHirES6j0+qP8Y6z0+mBgx\nDhOZHr6IiIwtShW+iIiMYUIl/J0tim6B7+Sf/4uZHV6HMZ6Vj+0JM3vUzBbXW4xF+x1hZhkzO6Xe\n4jOz95jZajN7ysweqGZ85cRoZlPM7G4zW5OP8eNVju8mM9tgZk+O8nxNvytlxFcP35MxYyzarybf\nk3Fx9wnxQzDF8vPAAqAZWAMsKtnng8CvAAP+Afh9HcZ4NLBH/v4H6jHGov3+i2C201PqKT6gC3ga\nmJt/vGe9/Q6BK4Gv5+/PADYDzVWM8V3A4cCTozxf6+/KzuKr6feknBiL/i1U/Xsy3p+JVOGXsyj6\nPwE/9sDjQJeZza6nGN39UXffkn/4OMFKYNVU7uLyFwM/BzZUMzjKi++/A3e6+8sA7l6PMTrQaWYG\ndBAk/Ey1AnT3B/PHHE1Nvys7i68Ovifl/A6hdt+TcZlICX+kRdH3Gsc+YdrV43+CoMqqpp3GaGZ7\nAScD369iXAXl/A73B/Yws/vNbJWZ/c+qRRcoJ8YbgAOA9cATwKXunqtOeGWp9XdlV9Tie7JTNf6e\njEvNFzGPKzNbSvAP+ZhaxzKCbwNXuHsuKFDrTiPwduA4oA14zMwed/dnahvWEO8HVgPHAguB35jZ\nQ+6+vbZhTSz6nlTWREr45SyKXtbC6SEq6/hmdgjwQ+AD7v5mlWIrKCfGJcBP8/+IpwMfNLOMu99V\nJ/G9Crzp7j1Aj5k9CCwGqpXwy4nx48DXPGj0PmdmLwBvA/5QnRB3qtbflZ2q8fekHLX8noxPrU8i\nlPtD8Mfp78B8Bk+UHViyz4cYeiLqD3UY41zgOeDoev09lux/M9U9aVvO7/AA4Hf5fduBJ4GD6izG\n7wNfzN+fSZBMp1f5v/U8Rj8pWtPvShnx1fR7Uk6MJftV9Xsy3p8JU+H7KIuim9mF+ed/QHCm/IME\n/1B6Caqseovx88A04MZ8ZZDxKk7CVGaMNVNOfO6+1sx+DfwFyAE/dPcxh85VO0bgy8DNZvYEQVK9\nwt2rNruimd0GvAeYbmavAl8Amoriq+l3pYz4avo9KTPGCUdX2oqIxMREGqUjIiK7QQlfRCQmlPBF\nRGJCCV8Hpf/MAAACGElEQVREJCaU8EVEYkIJX0QkJpTwJbLMrMvM/rno8Rwz+1lIx/qwmX1+jOcP\nNrObwzi2SLk0Dl8iy8zmAfe4+0FVONajwEljXVxlZr8FzvX8LJ8i1aYKX6Lsa8DC/EIp15nZvMJi\nFmb2MTO7y8x+Y2YvmtlFZvZZM/uzmT1uZlPz+y00s1/nZ+V8yMzeVnoQM9sfSBaSvZmdamZP5hc/\nebBo17uBM8L/2CIjU8KXKFsOPO/uh7r7shGePwj4CHAE8G9Ar7sfBjwGFKZcXgFc7O5vBy4Dbhzh\nfd4J/Kno8eeB97v7YuCkou0rgX/cjc8jslsmzFw6IiG4z913ADvMbBtBBQ7B/PWHmFkHwcpL/1E0\n/W3LCO8zG9hY9PgRgnl07gDuLNq+AZhTwfhFdokSvsRZsuh+ruhxjuC70QBsdfdDd/I+fcCUwgN3\nv9DM3kEwI+UqM3u7B9P7tub3FakJtXQkynYAneN9sQeLlbxgZqfCwMLfIy2mvRZ4S+GBmS1099+7\n++cJKv/CvPP7E0zlLFITSvgSWfmq+pH8CdTrxvk2ZwGfMLM1wFOMvP7vg8BhNtj3uc7MnsifIH6U\nYL58gKXA/x1nHCK7TcMyRSrAzP4duNvdfzvK8y3AA8Ax7l61xcxFiqnCF6mMrxCsvjWaucByJXup\nJVX4IiIxoQpfRCQmlPBFRGJCCV9EJCaU8EVEYkIJX0QkJv4/7tM1SnkwrJkAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "from bmtk.analyzer.visualization.spikes import plot_rates_popnet\n",
+ "\n",
+ "plot_rates_popnet('network/V1_node_types.csv', 'output/firing_rates.csv', model_keys='pop_name')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "collapsed": true
+ },
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.13"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/bmtk-vb/docs/tutorial/07_filter_models.ipynb b/bmtk-vb/docs/tutorial/07_filter_models.ipynb
new file mode 100644
index 0000000..1469e84
--- /dev/null
+++ b/bmtk-vb/docs/tutorial/07_filter_models.ipynb
@@ -0,0 +1,44 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Chapter 7: Modeling the visual field (with FilterNet)\n",
+ "\n",
+ "FilterNet is a part of the BMTK that simulates the effects of visual input onto cells in the LGN. It uses LGNModel as a backend, which uses neural-filters to simulate firing rates and spike-trains one may expect given a stimulus on (especially mouse) visual field. FilterNet supports a number of visual stimuli including static-graitings, moving-graiting, full-field flashes, static images and even movies.\n",
+ "\n",
+ "FilterNet is often useful for producing inputs for external networks to feed into models of the V1 and higher cortical areas."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 1. Building the cells\n",
+ "\n"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 2",
+ "language": "python",
+ "name": "python2"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 2
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython2",
+ "version": "2.7.13"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/bmtk-vb/docs/tutorial/NetworkBuilder_Intro.ipynb b/bmtk-vb/docs/tutorial/NetworkBuilder_Intro.ipynb
new file mode 100644
index 0000000..ff22dea
--- /dev/null
+++ b/bmtk-vb/docs/tutorial/NetworkBuilder_Intro.ipynb
@@ -0,0 +1,837 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# BMTK Builder (A Quick Introduction)\n",
+ "\n",
+ "The Brain Modeling Toolkit (bmtk) was designed to handle large-scale network simulations with pre-set connectivity matrices. Whereas other simulation tools will build and simulate a network in a single script, the bmtk splits up these two processes by saving networks to a file. The advantages of doing it this way includes:\n",
+ "* Significantly faster when running multiple simulations on the same network.\n",
+ "* Easy to update and adjust parameters with little-to-no programming required.\n",
+ "* Improves reproducability of simulations.\n",
+ "\n",
+ "Before running a simulation, users should either obtain existing network model files, or as described in this tutorial use the BMTK Builder to create their own from scratch. By default, the bmtk uses the recently developed SONATA dataformat to represent networks and network parameters - for a further information please see the [SONATA documentation](https://github.com/AllenInstitute/sonata/blob/master/docs/SONATA_DEVELOPER_GUIDE.md)."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Nodes\n",
+ "\n",
+ "Brain networks are represented with directed graph so every network needs nodes. (The simplest network consisting of one node and no edges, usually for single-cell simulations). The bmtk is designed to work across different levels of abstraction, so a node can represent a single biophysically detailed cell, a point-cell model, a population of cells, or even an entire brain region. \n",
+ "\n",
+ "To create our node(s) we use the NetworkBuilder class in bmtk.builder, then simply build and save the network."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from bmtk.builder import NetworkBuilder\n",
+ "\n",
+ "# Initialize our network\n",
+ "net = NetworkBuilder(\"mcortex\")\n",
+ "\n",
+ "# Add a population of 10 nodes (all of which share model_type, dynamics_params, etc.)\n",
+ "net.add_nodes(N=10, pop_name='Scnn1a',\n",
+ " mem_potential='e',\n",
+ " model_type='biophysical',\n",
+ " model_template='ctdb:Biophys1.hoc',\n",
+ " model_processing='aibs_perisomatic',\n",
+ " dynamics_params='472363762_fit.json',\n",
+ " morphology='Scnn1a_473845048_m.swc')\n",
+ "\n",
+ "# If needed we can add more populations\n",
+ "# net.add_nodes(N, ...)\n",
+ "\n",
+ "# Builds the network files\n",
+ "net.build()\n",
+ "\n",
+ "# Save the network into the specificed directory\n",
+ "net.save_nodes(output_dir='network')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "When the NetworkBuilder is instantiated we pass in a name, in this case calling it \"**mcortex**\" because we will be using mouse-cortex models - But you can use any name you want. Just be careful, as often a complete simulation will contain multiple networks (the bmtk/SONATA was designed largely to allow different parts of the network to be built indepenently), so having descriptive naming convention is important."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The add_nodes method is then used to add nodes to the network. The first parameter, **N=10**, indicates that we are adding 10 individual nodes each sharing the same pop_name, mem_potential, model_type etc.\n",
+ "\n",
+ "All of the other parameters are completely dependent on the type of network we are looking to build. In this case we want to build a network that runs in BioNet so the parameters are carefully choosen with:\n",
+ "* *__pop_name__*, *__mem_potential__* - optional parameters, not directly used by BioNet but will be helpfull in the descripion\n",
+ "* *__model_type__*, *__model_template__*, *__model_processing__* - [Attributes used by BioNet](https://github.com/AllenInstitute/sonata/blob/master/docs/SONATA_DEVELOPER_GUIDE.md#nodes---required-attributes) as instructions on how to build our NEURON-based cell models. All our cells are biophysically-detailed models, using customized templates and functions to build each cell.\n",
+ "* *__dynamics_params__*, *__morphology__* - Indicates to BioNet the electrophysiological and morphology files used to build each cell. These files can be downloaded from the [Allen Cell-Types Database](http://celltypes.brain-map.org/data).\n",
+ "\n",
+ "However the NetworkBuilder is simulator agnositc allowing modelers to choose whatever parameters they need depending on simulator and/or required to describe the models. For example the following could be used by another simulator to build a network describing 100 Izhikevich point neurons. Notice we no longer need parameters like morphology or model_processing, but have new parameter a, b, c, and d which would be required by any Izhikevich neuron model.\n",
+ "```python\n",
+ "net.add_nodes(N=100, \n",
+ " model_type='point_process',\n",
+ " model_template='nrn:Izhikevich.hoc',\n",
+ " param_a=0.05, param_b=0.25, param_c=-55.0, parm_d=10)\n",
+ "```\n",
+ " "
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Finally the network is built and saved into the _network/_ folder. When we look in the network folder there are two different files - mcortex_nodes.h5 and mcortex_node_types.csv. The individual cells are stored in the \\*nodes.h5 file. But properties that are shared by a group of nodes are stored in the \\*node_types.csv. **Node types** not only makes the format more compact and faster to read (probably not important for 10 cells, but very important when trying to run a simulation of 100K+ cells), and it also makes easier to change properties (like updating ephys params or using a different morphology) between simulations.\n",
+ "\n",
+ "Looking at the files"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "mcortex_nodes.h5\n"
+ ]
+ },
+ {
+ "data": {
+ "text/html": [
+ "
"
+ ],
+ "text/plain": [
+ " node_type_id dynamics_params model_type model_processing pop_name \\\n",
+ "0 100 472363762_fit.json biophysical aibs_perisomatic Scnn1a \n",
+ "\n",
+ " model_template morphology mem_potential \n",
+ "0 ctdb:Biophys1.hoc Scnn1a_473845048_m.swc e "
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "from bmtk.analyzer import node_types_table\n",
+ "print('mcortex_node_types.h5')\n",
+ "node_types_table(node_types_file='network/mcortex_node_types.csv', population='mcortex')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "The nodes and node-types are linked together by the node_type_id foreign key. In this case all information (expect each of the 10 unique cell ids, which were autogenerated during the build processes) is stored in the node_types file because all the properties are shared among every node."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Unique node properties\n",
+ "\n",
+ "Suppose we have some properties that are unqiue to each individual nodes within a node-type. Instead of calling add_nodes N times, we can just pass in a list of size N. \n",
+ "\n",
+ "In the following example we have two types of nodes, 10 biophysical pyramidal type cells and 5 point izhikevich type cells. For the pyramidal cells we have a new parameter 'tuning_angle' which is uniquly assigned a different value to each cell. Similarly for the Izhikevich cells the param_a and param_b parameters are now unqily assigned."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import numpy as np\n",
+ "\n",
+ "net = NetworkBuilder(\"mcortex2\")\n",
+ "net.add_nodes(N=10, pop_name='pyr',\n",
+ " model_type='biophysical',\n",
+ " model_template='ctdb:Biophys1.hoc',\n",
+ " dynamics_params='pyr_ephys.json',\n",
+ " morphology='pyr_morph.swc',\n",
+ " tuning_angle=np.linspace(0.0, 360.0, num=10, endpoint=False))\n",
+ "\n",
+ "net.add_nodes(N=5, pop_name='izh',\n",
+ " model_type='point_process',\n",
+ " model_template='nrn:Izhikevich.hoc',\n",
+ " param_a=[0.01, 0.02, 0.03, 0.04, 0.05], \n",
+ " param_b=np.random.rand(5),\n",
+ " param_c=-55.0, \n",
+ " d=10)\n",
+ "\n",
+ "net.build()\n",
+ "net.save_nodes(output_dir='network')"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "Now when we look at the nodes.h5 file"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "mcortex_nodes.h5\n"
+ ]
+ },
+ {
+ "data": {
+ "text/html": [
+ "