comparison toolboxes/RBM/gen_training_rbm.m @ 0:e9a9cd732c1e tip

first hg version after svn
author wolffd
date Tue, 10 Feb 2015 15:05:51 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:e9a9cd732c1e
1 function [W visB hidB] = gen_training_krbm(conf,W,mW,train_file,train_label)
2 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
3 % Training Knowledge Based RBM for generative classification %
4 % conf: training setting %
5 % W: weights of connections %
6 % mW: mask of connections %
7 % -*-sontran2012-*- %
8 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
9 %% load data
10 vars = whos('-file', train_file);
11 A = load(train_file,vars(1).name);
12 data = A.(vars(1).name);
13 vars = whos('-file', train_label);
14 A = load(train_label,vars(1).name);
15 label = A.(vars(1).name);
16 assert(~isempty(data),'[KRBM-GEN] Data is empty');
17 assert(size(data,1) == size(label,1),'[KRBM-GEN] Number of data and label mismatch');
18 Classes = unique(label)';
19 lNum = size(Classes,2);
20 data = [data discrete2softmax(label,Classes)]
21 %% initialization
22 visNum = size(data,2);
23 hidNum = conf.hidNum;
24 sNum = conf.sNum;
25 lr = conf.params(1);
26 N = 10; % Number of epoch training with lr_1
27 W = [W;0.1*randn(visNum - size(W,1),size(W,2))];
28 W = [W 0.1*randn(size(W,1),hidNum-size(W,2))];
29
30 DW = zeros(size(W));
31 visB = zeros(1,visNum);
32 DVB = zeros(1,visNum);
33 hidB = zeros(1,hidNum);
34 DHB = zeros(1,hidNum);
35 visP = zeros(sNum,visNum);
36 visN = zeros(sNum,visNum);
37 visNs = zeros(sNum,visNum);
38 hidP = zeros(sNum,hidNum);
39 hidPs = zeros(sNum,hidNum);
40 hidN = zeros(sNum,hidNum);
41 hidNs = zeros(sNum,hidNum);
42 %% Reconstruction error & evaluation error & early stopping
43 mse = 0;
44 omse = 0;
45 inc_count = 0;
46 MAX_INC = 3; % If the error increase MAX_INC times continuously, then stop training
47 %% Average best settings
48 n_best = 1;
49 aW = size(W);
50 aVB = size(visB);
51 aHB = size(hidB);
52 %% ==================== Start training =========================== %%
53 for i=1:conf.eNum
54 if i== N+1
55 lr = conf.params(2);
56 end
57 omse = mse;
58 mse = 0;
59 for j=1:conf.bNum
60 visP = data((j-1)*conf.sNum+1:j*conf.sNum,:);
61 %up
62 hidP = logistic(visP*W + repmat(hidB,sNum,1));
63 hidPs = 1*(hidP >rand(sNum,hidNum));
64 hidNs = hidPs;
65 for k=1:conf.gNum
66 % down
67 visN = hidNs*W' + repmat(visB,sNum,1);
68 visN(:,1:visNum-lNum) = logistic(visN(:,1:visNum-lNum));
69 visN(:,visNum-lNum+1:visNum) = softmax_activation(visN(:,visNum-lNum+1:visNum));
70 visNs = [1*(visN(:,1:visNum-lNum)>rand(sNum,visNum-lNum)) visN(:,visNum-lNum+1:visNum)];
71 if j==5 && k==1, observe_reconstruction(visN(:,1:visNum-lNum),sNum,i,28,28); end
72 % up
73 hidN = logistic(visNs*W + repmat(hidB,sNum,1));
74 hidNs = 1*(hidN>rand(sNum,hidNum));
75 end
76 % Compute MSE for reconstruction
77 rdiff = (visP - visN);
78 mse = mse + sum(sum(rdiff.*rdiff))/(sNum*visNum);
79 % Update W,visB,hidB
80 diff = (visP'*hidP - visNs'*hidN)/sNum;
81 DW = lr*(diff - conf.params(4)*W) + conf.params(3)*DW;
82 W = W + DW;
83 % W = W.*mW;
84 DVB = lr*sum(visP - visN,1)/sNum + conf.params(3)*DVB;
85 visB = visB + DVB;
86 DHB = lr*sum(hidP - hidN,1)/sNum + conf.params(3)*DHB;
87 hidB = hidB + DHB;
88 end
89 if mse > omse
90 inc_count = inc_count + 1
91 else
92 inc_count = 0;
93 end
94 if inc_count> MAX_INC, break; end;
95 fprintf('Epoch %d : MSE = %f\n',i,mse);
96 end
97 end