Mercurial > hg > camir-aes2014
comparison core/magnatagatune/tests_evals/rbm_subspace/write_mat_results_ISMIR13RBM.m @ 0:e9a9cd732c1e tip
first hg version after svn
author | wolffd |
---|---|
date | Tue, 10 Feb 2015 15:05:51 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:e9a9cd732c1e |
---|---|
1 function [out, stats] = write_mat_results_ISMIR13RBM(dirin,fileout) | |
2 % combine the test results from the directories supplied, | |
3 % and recombine the bins to a coherent dataset | |
4 % | |
5 % allows for distinguishing different hidden State numbers | |
6 | |
7 % folders = {'130505_svm_disttest_subsconv_rbm_largegrid_21_r2385','130505_svm_disttest_subsconv_rbm_largegrid_22_r2385','130505_svm_disttest_subsconv_rbm_largegrid_23_r2385','130505_svm_disttest_subsconv_rbm_largegrid_24_r2385','130506_svm_disttest_subsconv_rbm_largegrid_25_r2385'}; | |
8 | |
9 | |
10 | |
11 features = []; | |
12 show = 1; | |
13 | |
14 if nargin == 0 | |
15 dirin{1} = './'; | |
16 end | |
17 | |
18 global comparison; | |
19 global comparison_ids; | |
20 | |
21 newout = []; | |
22 thisdir = pwd; | |
23 % loop through al lthe result directories and | |
24 for diri = 1:numel(dirin) | |
25 | |
26 % --- | |
27 % go to directory and locate file | |
28 % --- | |
29 cd(dirin{diri}); | |
30 | |
31 u = dir(); | |
32 u = {u.name}; | |
33 [idx, strpos] = substrcellfind(u, '_finalresults.mat', 1); | |
34 | |
35 if numel(idx) < 1 | |
36 error 'This directory contains no valid test data'; | |
37 end | |
38 | |
39 % just one or more tests in this folder? | |
40 if exist('file','var') && isnumeric(file) | |
41 cprint(1, 'loading one result file'); | |
42 file = u{idx(file)}; | |
43 data = load(file); | |
44 sappend(out,data.out); | |
45 else | |
46 for filei = 1:numel(idx) | |
47 cprint(1, 'loading result file %i of %i',filei, numel(idx)); | |
48 file = u{idx(filei)}; | |
49 data = load(file); | |
50 newout = sappend(newout,data.out); | |
51 end | |
52 end | |
53 % reset act directory | |
54 cd(thisdir); | |
55 end | |
56 | |
57 | |
58 % --- | |
59 % filter C values! | |
60 % --- | |
61 allcs = zeros(numel(newout),1); | |
62 for i=1:numel(newout) | |
63 allcs(i) = newout(i).trainparams.C; | |
64 end | |
65 valididx = find(allcs == 1); % select C! | |
66 newout = newout(valididx); | |
67 | |
68 | |
69 % bundle all datasets | |
70 fout = sameparamsubset(newout, 'dataset',''); | |
71 out = []; | |
72 for ci=1:numel(fout) | |
73 filteredout = fout{ci}; | |
74 | |
75 | |
76 ok_test = zeros(2, numel(filteredout)); | |
77 ok_train = zeros(2, numel(filteredout)); | |
78 ok_config = []; | |
79 | |
80 | |
81 tmpout = filteredout(1); | |
82 % cycle over all test sets and get new means | |
83 for i=1:numel(filteredout) | |
84 ok_test(:,i) = filteredout(i).mean_ok_test; | |
85 ok_train(:,i) = filteredout(i).mean_ok_train; | |
86 end | |
87 | |
88 % save the stuff | |
89 tmpout.mean_ok_test = mean(ok_test,2); | |
90 tmpout.var_ok_test = var(ok_test,0,2); | |
91 tmpout.mean_ok_train = mean(ok_train,2); | |
92 tmpout.var_ok_train = var(ok_train,0,2); | |
93 | |
94 tmpout.ok_test = ok_test; | |
95 tmpout.ok_train = ok_train; | |
96 | |
97 % put it in output structure | |
98 out = sappend(out,tmpout); | |
99 end | |
100 | |
101 % --- | |
102 % show results | |
103 % --- | |
104 if numel([out.mean_ok_test]) > 1 && show | |
105 | |
106 % plot means % plot std = sqrt(var) % plot training results | |
107 figure; | |
108 boxplot([out.mean_ok_test], sqrt([out.var_ok_test]), [out.mean_ok_train]); | |
109 title (sprintf('Performance for all configs')); | |
110 end | |
111 | |
112 | |
113 | |
114 % --- | |
115 % write max. test success | |
116 % --- | |
117 mean_ok_test = [out.mean_ok_test]; | |
118 [val, idx] = max(mean_ok_test(1,:)); | |
119 if show | |
120 fprintf(' --- Maximal test set success: nr. %d, %3.2f percent. train result %3.2f percent. --- \n', idx, val * 100,out(idx).mean_ok_train(1,:)*100) | |
121 end | |
122 | |
123 | |
124 % --- | |
125 % write max. test/train success | |
126 % --- | |
127 mean_ok_train = [out.mean_ok_train]; | |
128 [val, idx] = max(mean_ok_train(1,:)); | |
129 if show | |
130 fprintf(' --- Maximal train set success: nr. %d, %3.2f percent, test result %3.2f percent. --- \n', idx, val * 100,out(idx).mean_ok_test(1,:)*100) | |
131 end | |
132 | |
133 % save this summary | |
134 save([hash(strcat(dirin{:}),'md5') '_summary'], 'out'); | |
135 | |
136 | |
137 | |
138 % --- | |
139 % build index of these feature configurations without num_hid | |
140 % --- | |
141 [fout2,param_hash,idxhash] = sameparamsubset(out, '','rbm_hidNum'); | |
142 for ci = 1:numel(fout2) | |
143 | |
144 % search for the training param index | |
145 found = find(idxhash{ci}== idx); | |
146 if ~isempty(found) | |
147 out_withsameconfig = fout2{ci}; | |
148 break; | |
149 end | |
150 end | |
151 | |
152 | |
153 end | |
154 | |
155 function [out, param_hash,idx] = sameparamsubset(in, ignoret,ignoref) | |
156 % --- | |
157 % build index of all existing configurations | |
158 % --- | |
159 param_hash = cell(numel(in),1); | |
160 for i=1:numel(in) | |
161 params = struct('trainparams',in(i).trainparams, ... | |
162 'fparams',in(i).fparams); | |
163 | |
164 % remove the dataset param | |
165 if ~isempty(ignoret) | |
166 params.trainparams = rmfield(params.trainparams,ignoret); | |
167 end | |
168 if ~isempty(ignoref) | |
169 params.fparams = rmfield(params.fparams,ignoref); | |
170 end | |
171 | |
172 phash = hash(xml_format(params),'md5'); | |
173 param_hash{i} = phash; | |
174 end | |
175 | |
176 % --- | |
177 % recombine the data for different datasets! | |
178 % --- | |
179 cvals = unique(param_hash); | |
180 | |
181 out = {}; | |
182 for ci=1:numel(cvals) | |
183 idx{ci} = strcellfind(param_hash,cvals(ci),1); | |
184 out{ci} = in(idx{ci}); | |
185 end | |
186 | |
187 end | |
188 | |
189 function boxplot(mean, std, train); | |
190 | |
191 bar([train; mean]', 1.5); | |
192 hold on; | |
193 errorbar(1:size(mean,2), mean(1,:), std(1,:),'.'); | |
194 % plot(train,'rO'); | |
195 colormap(spring); | |
196 axis([0 size(mean,2)+1 max(0, min(min([train mean] - 0.1))) max(max([train mean] + 0.1))]); | |
197 end |