-
Notifications
You must be signed in to change notification settings - Fork 3
/
AlternatingDifferentiation.m
258 lines (234 loc) · 10.1 KB
/
AlternatingDifferentiation.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
% Alternating seed neuron differentiation, from subsong through
% protosyllable stage through splitting, to generate figure 5 a-f
% Emily Mackevicius 1/14/2015, heavily copied from Hannah Payne's code
% which builds off Ila Fiete's model, with help from Michale Fee and Tatsuo
% Okubo.
% Calls HVCIter to step through one iteration of the model
% fixed parameters
seed = 9038;
p.seed = seed; % seed random number generator
p.n = 100; % n neurons
p.trainint = 10; % Time interval between inputs
p.nsteps = 100; % time-steps to simulate -- each time-step is 1
% burst duration.
nstepsSubsong = 1000; % time-steps to simulate for subsong stage
p.pin = .01; % probability of external stimulation of at least
% one neuron at any time
k = 10; % number of training neurons
p.trainingInd = 1:k; % index of training neurons
p.beta = .115; % strength of feedforward inhibition
p.alpha = 30; % strength of neural adaptation
p.eta = .025; % learning rate parameter
p.epsilon = .2; % relative strength of heterosynaptic LTD
p.tau = 4; % time constant of adaptation
gammaStart= .01; % strength of recurrent inhibition
gammaSplit =.18; % increased strength of recurrent inhibition to
% induce splitting
wmaxStart = 1; % single synapse hard bound
wmaxSplit = 2; % single synapse hard bound to induce splitting
% (increased to encourage fewer stronger synapses)
mStart = 10; % desired number of synapses per neuron
% (wmax = Wmax/m)
Wmax = mStart*wmaxStart;% soft bound for weights of each neuron
mSplit = Wmax/wmaxSplit;% keep Wmax constant, change m & wmax to induce
% fewer stronger synapses
HowClamped = 10; % give training neurons higher threshold
HowOn = 10; % higher inputs to training neurons
% how many iterations to run before plotting
nIterProto = 500; % end of protosyllable stage
nIterPlotSplit1 = 492; % number of splitting iterations before plotting
% intermediate splitting phase
nIterPlotSplit2 = 2000; % total number of splitting iterations
% parameters that change over development
protosyllableStage = [true(1,nIterProto) false(1,nIterPlotSplit2)];
splittingStage = [false(1,nIterProto) true(1,nIterPlotSplit2)];
gammas(protosyllableStage) = gammaStart;
gammas(splittingStage) = gammaSplit*sigmf(1:nIterPlotSplit2,[1/200 500]);
wmaxs(protosyllableStage) = wmaxStart;
wmaxs(splittingStage) = wmaxSplit;
ms(protosyllableStage) = mStart;
ms(splittingStage) = mSplit;
%Subsong Inputs
rng(seed)
isOnset = rand(1,nstepsSubsong)>.9;
Input =-HowClamped*ones(k, nstepsSubsong); % clamp training neurons
% (effectively giving them higher threshold)
Input(:,isOnset) = HowOn;
bdyn = double(rand(p.n,nstepsSubsong)>=(1-p.pin)); % Random activation
bdyn(1:k,:) = Input;
subsongInput = bdyn;
%Protosyllable inputs
PsylInput = -HowClamped*ones(k, p.nsteps); %clamp training neurons
% (effectively giving them higher threshold)
PsylInput(:,mod(1:p.nsteps,p.trainint)==1) = HowOn; % rhythmic activation
% of training neurons
%Alternating Inputs
AltInput =-HowClamped*ones(k, p.nsteps);
AltInput(1:k/2,mod(1:p.nsteps,2*p.trainint)==1) = HowOn;
AltInput((k/2+1):k,mod(1:p.nsteps,2*p.trainint)==p.trainint+1) = HowOn;
% alternating rhythmic activation of training neurons
%% Alternating differentiation: run simulation
% random initial weights
rng(seed);
w0 = 2*rand(p.n)*Wmax/p.n;
% subsong stage
pSubsong = p;
pSubsong.gamma = gammas(1);
pSubsong.wmax = wmaxs(1);
pSubsong.m = ms(1);
pSubsong.eta = 0;
pSubsong.epsilon = 0;
pSubsong.nsteps = nstepsSubsong;
pSubsong.w = w0;
pSubsong.input = subsongInput;
% Run subsong network
[wSubsong xdynSubsong] = HVCIter(pSubsong);
w = wSubsong;
% learning stages
for t = 1:(nIterProto+nIterPlotSplit2)
p.w = w;
% set parameters that change over development
p.gamma = gammas(t);
p.wmax = wmaxs(t);
p.m = ms(t);
% Construct input
bdyn = double(rand(p.n,p.nsteps)>=(1-p.pin)); % Random activation
bdyn(1:k,:) = protosyllableStage(t)*PsylInput+ ...
splittingStage(t)*AltInput; % drive to seed neurons
p.input = bdyn;
% run one iteration
[w xdyn] = HVCIter(p);
% save certain iterations for plotting later
switch t
case nIterProto;
wProto = w;
xdynProto = xdyn;
case nIterProto + nIterPlotSplit1;
wSplit1 = w;
xdynSplit1 = xdyn;
case nIterProto + nIterPlotSplit2;
wSplit2 = w;
xdynSplit2 = xdyn;
end
end
%% Alternating differentiation: plotting parameters
figure(1)
isEPS = 0;
clf
set(gcf, 'color', ones(1,3));
if isEPS
PlottingParams.msize = 8;
PlottingParams.linewidth = .25;
set(0,'defaultAxesFontName', 'Arial')
set(0,'defaultTextFontName', 'Arial')
PlottingParams.labelFontSize = 7;
set(gcf, 'units','centimeters', 'position', [5 5 13.5 6])
else
PlottingParams.msize = 10;
PlottingParams.linewidth = .25;
PlottingParams.labelFontSize = 7;
end
PlottingParams.SeedColor = [.95 .5 1];
PlottingParams.Syl1Color = [1 0 0];
PlottingParams.Syl2Color = [0 0 1];
PlottingParams.ProtoSylColor = [0 0 0];
PlottingParams.ProtoSylBarColor = [.5 .5 .5];
PlottingParams.SubsongSylColor = [0 0 0];
PlottingParams.SubsongBarColor = [1 1 1];
PlottingParams.numFontSize = 5;
PlottingParams.wplotmin = 0;
PlottingParams.wplotmax = 2; % this should be wmaxSplit
PlottingParams.wprctile = 0; % plot all weights above this percentile.
% If nonzero, ignores wplotmin, wplotmax
PlottingParams.wperneuron = 6; % max outgoing weights plotted
PlottingParams.wperneuronIn = 9; % min incoming weights plotted
PlottingParams.totalPanels = 4;
nplots = 4;
bottom = .1;
height = .45;
scale = .005;
spacing = .75/(2*nplots);
%% Alternating differentiation: plotting subsong
trainingNeuronsSubsong{1}.nIDs = 1:k;
trainingNeuronsSubsong{1}.tind = find(isOnset);
trainingNeuronsSubsong{1}.candLat = 1:2*p.trainint;
trainingNeuronsSubsong{1}.thres = 12; % criteria for participation during
% subsong (thres from testLatSig --
% must fire at consistent latency
% more than 12 times in the bout of
% ~100 syllables to count as
% participating)
PlottingParams.thisPanel = 1;
PlottingParams.Hor = 1;
plotSubsong(wSubsong, xdynSubsong, trainingNeuronsSubsong, PlottingParams)
%% Alternating differentiation: plotting protosylable
trainingNeuronsPsyl{1}.nIDs = 1:k;
trainingNeuronsPsyl{2}.nIDs = 1:k;
trainingNeuronsPsyl{1}.tind = find(mod(1:p.nsteps, p.trainint)==1);
trainingNeuronsPsyl{2}.tind = find(mod(1:p.nsteps, p.trainint)==1);
trainingNeuronsPsyl{1}.candLat = 1:p.trainint;
trainingNeuronsPsyl{2}.candLat = 1:p.trainint;
trainingNeuronsPsyl{1}.thres = 4; % criteria for participation during
% protosyllable stage (thres from
% testLatSig -- must fire at consistent
% latency more than 4 times in the bout
% of 10 syllables to count as
% participating)
trainingNeuronsPsyl{2}.thres = 4;
PlottingParams.thisPanel = 2;
subplot('position', ...
[PlottingParams.thisPanel/nplots-.9/nplots, .6, .9/nplots, .35])
plotHVCnet(wProto,xdynProto,p.trainint,trainingNeuronsPsyl,PlottingParams)
set(gca, 'color', 'none');
PlottingParams.axesPosition = ...
[PlottingParams.thisPanel/nplots-2*spacing, bottom, 40*scale, height];
plotAlternating(wProto, xdynProto, p.trainint, ...
trainingNeuronsPsyl, PlottingParams)
set(gca, 'color', 'none')
%% Alternating differentiation: plotting splitting stages
trainingNeuronsAlt{1}.nIDs = 1:k/2;
trainingNeuronsAlt{2}.nIDs = (k/2+1):k;
trainingNeuronsAlt{1}.tind = find(mod(1:p.nsteps, 2*p.trainint)==1);
trainingNeuronsAlt{2}.tind = find(mod(1:p.nsteps, ...
2*p.trainint)==p.trainint+1);
trainingNeuronsAlt{1}.candLat = 1:p.trainint;
trainingNeuronsAlt{2}.candLat = 1:p.trainint;
trainingNeuronsAlt{1}.thres = 2; % criteria for participation during
% splitting stage (thres from testLatSig
% -- must fire at consistent latency
% more than 2 times in the bout of 5
% syllables (of each type) to count
% as participating)
trainingNeuronsAlt{2}.thres = 2;
PlottingParams.thisPanel = 3;
subplot('position', ...
[PlottingParams.thisPanel/nplots-.9/nplots, .6, .9/nplots, .35])
plotHVCnet(wSplit1,xdynSplit1,p.trainint,trainingNeuronsAlt,PlottingParams)
set(gca, 'color', 'none');
PlottingParams.axesPosition = ...
[PlottingParams.thisPanel/nplots-2*spacing, bottom, 40*scale, height];
plotAlternating(wSplit1, xdynSplit1, p.trainint, ...
trainingNeuronsAlt, PlottingParams)
set(gca, 'color', 'none')
PlottingParams.thisPanel = 4;
subplot('position', ...
[PlottingParams.thisPanel/nplots-.9/nplots, .6, .9/nplots, .35])
plotHVCnet(wSplit2,xdynSplit2,p.trainint,trainingNeuronsAlt,PlottingParams)
set(gca, 'color', 'none');
PlottingParams.axesPosition = ...
[PlottingParams.thisPanel/nplots-2*spacing, bottom, 40*scale, height];
plotAlternating(wSplit2, xdynSplit2, p.trainint, ...
trainingNeuronsAlt, PlottingParams)
set(gca, 'color', 'none')
%% Alternating differentiation: exporting
if isEPS
cd('Z:\Fee_lab\Papers\HVC_differentiation\Figures\EPS_files');
export_fig(1,'Figure5a.eps','-transparent','-eps','-painters');
else
%figure parameters, exporting
figw = 6;
figh = 2;
set(gcf, 'color', [1 1 1],...
'papersize', [figw figh], 'paperposition', [0 0 figw*.9 figh])
% print -dmeta -r150
end