-
Notifications
You must be signed in to change notification settings - Fork 0
/
dictionary_learn_script.m
119 lines (97 loc) · 4.67 KB
/
dictionary_learn_script.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% dictionary_learn_script.m
% This scrips it an example script for basic sparse dictionary learning
% based on the Olshausen and Field, 1997 paper 'Sparse Coding with
% an Overcomplete Basis Set: a Strategy Employed by V1?'
%
% Deviations from the model presented are:
% - use of L1 regularized BPDN solvers as well as options to use
% hard-sparse MP-type solvers for the inference step. This allows
% faster and more accurate solvutions for the sparse coefficients.
% - Normalization of the dictionary elements to have unit norm at each
% step rather than normalizing the variance of the coefficients. This
% is the default. Also included is a method that normalizes the
% dictionary elements by a Forbeneous norm.
%
% Other optimizations:
%
% - Parallel for loops are used in the inference step to speed up
% run-time. If the script is to be run on a cluster, ust the
% createMatlabPoolJob() function to allow for this speedup.
%
%
% Last Updated 6/3/2010 - Adam Charles
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Set Options
% Set multithreading to Ncores - 1 threads
% This speeds up matrix multiplication etc.
poolobj = gcp('nocreate');
if ~isempty(poolobj)
delete(gcp);
end
parpool();
% Random number seed
RandStream.setGlobalStream (RandStream('mt19937ar','seed',sum(100*clock)));
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Options
% Save Options
opts.save_name = 'dict_learn_test.mat'; % Basis Save Name
opts.save_every = 500; % How many iterations to save after
% Algorithm options
opts.data_type = 'square'; % Type of data (vecror, square or cube)
opts.sparse_type = 'l1ls'; % Choose to use l1ls for sparsification OMP_qr
% Options: l1ls, l1ls_nneg, sparsenet, MP
opts.grad_type = 'norm'; % Choose weather to include the Forb norm in E(a,D)
opts.nneg_dict = 0; % Make the basis values all positive
% Dictionary options
opts.n_elem = 64; % Number of dictionary elements
opts.bl_size = 8; % Block Size (nxn)
opts.dep_size = 1; % Depth size for 'cube' data
% Iteration numbers
opts.iters = 1000; % Number of learning iterations
opts.in_iter = 75; % Number of internal iterations
opts.GD_iters = 1; % Basis Gradient Descent iterations
% Specify Parameters
opts.step_size = 5; % Initial Step Size for Gradient Descent
opts.decay = 0.9995; % Step size decay factor
opts.lambda = 0.6; % Lambda Value for Sparsity
opts.lambda2 = 0.02; % Forbenious norm lambda
opts.tol = 0.001; % Sparsification Tolerance
opts.h_sparse = 6; % Number of basis elements for hard-sparse
% Plotting Options
opts.bshow = 1; % Number of iterations to show basis
opts.disp_size = [8, 8]; % Basis display dimensions
% Data normalization options
opts.ssim_flag = 0; % Reduce variance for high variance blocks
opts.std_min = 0.10; % Minimum standard deviation
%% Load Training Data: using IMAGES.mat dataset used in Olshausen & Field's
% 1997 paper on dictionary learning
% Load Bruno's prewhitened Image set
fprintf('Loading Images...\n')
load('IMAGES.mat')
data_obj = IMAGES;
clear IMAGES;
fprintf('Images Loaded\n')
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Initialize Dictionary Elements
if strcmp(opts.data_type, 'vector')
v_size = size(data_obj, 1);
elseif strcmp(opts.data_type, 'square')
v_size = prod(opts.bl_size);
elseif strcmp(opts.data_type, 'cube')
v_size = prod(opts.bl_size);
else
error('Unknown Data Type!! Choose ''vector'', ''square'' or ''cube''...')
end
dictionary_initial = initialize_dictionary(opts.n_elem, v_size, opts.nneg_dict);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Run Learning Algorithm
% Actually run the learning algorithm
[dictionary_out] = learn_dictionary(data_obj, [], opts);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Closing time
delete(gcp);
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%