comparison reproduce_AES53rd/rerun_figure4.m @ 0:e9a9cd732c1e tip

first hg version after svn
author wolffd
date Tue, 10 Feb 2015 15:05:51 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:e9a9cd732c1e
1 % ---
2 % This script trains similarity measures and evaluates the
3 % impact of the number of hidden states as displayed in figure 4 of
4 %
5 % Feature Preprocessing with RBMs for Music Similarity Learning
6 % Son N. Tran, Daniel Wolff, Tillman Weyde, Artur Garcez, AES53rd
7 % conference
8 %
9 % please note that the RBM training is a probabilistic process, and
10 % thus the papers' results can only be reproduced approximately with
11 % some iterations of this script, and selection of RBMs according to
12 % their training set performance.
13 % This takes considerable time, and for the sake of usability of this script
14 % is left our. % It is exemplarily done for hidnum = 1000 in the script "table 1" in the
15 % correpsonding folder.
16 %
17 % % The train and test performances are plotted in a figure
18 %
19 % For convenicence, The precomputed RBM features are stored in the files
20 % accompaining this script.
21 % In order to compute new SVM features, delete these files.
22 % ---
23
24 global globalvars;
25 globalvars.debug = 3;
26
27 % ---
28 % vary feature parameters of mixed features
29 % ---
30
31 ftype = 'MTTMixedFeatureSonRBM'; %'MTTMixedFeatureStober11Genre';
32
33 fparams_all = struct(...
34 ... % ---
35 ... % these are SONfeatRaw parameters
36 ... % ---
37 'son_filename',{{'rel_music_raw_features+simdata_ISMIR12.mat'}}, ...
38 'son_conf', 2, ...
39 ... % ---
40 ... % Following: RBM params
41 ... % ---
42 'norm_pre_rbm', 0, ... % norm before RBM?
43 'norm_post_rbm',0, ... % norm before RBM?
44 'rbm_hidNum',[30 50 100 500 1000], ... % number of hidden units % 500
45 'rbm_eNum', 100, ...
46 'rbm_bNum', 1, ...
47 'rbm_gNum', 1, ...
48 'rbm_lrate1' , [0.05], ... % initial learning rate % 0.01
49 'rbm_lrate2', [0.17], ... % learning rate, %0.05
50 'rbm_momentum', [0.1], ... % 0.5
51 'rbm_cost', [0.00002], ... % cost function
52 'rbm_N', 50, ...
53 'rbm_MAX_INC', 10 ...
54 );
55
56 % ---
57 % vary parameters for svmlight
58 % ---
59
60 trainparams_all = struct(...
61 'C', [1], ...%
62 'weighted', [0], ...
63 'dataset', {{'comp_partBinData_ISMIR12_01.mat'}}, ...
64 'inctrain', 0 ...
65 ... % this optional
66 ... %'deltafun', {{'conv_subspace_delta'}}, ...
67 ... %'deltafun_params', {{{[1],[0]},{[5],[1]},{[10],[1]},{[20],[1]},{[30],[1]},{[50],[1]},{[70],[1]}}} ... % normalisation improves results
68 );
69
70 % set training function
71 trainfun = @svmlight_wrapper;
72
73
74 % create test directory
75 akt_dir = migrate_to_test_dir();
76
77
78 % call eval
79 out = test_generic_features_parameters_crossval...
80 (fparams_all, trainparams_all, trainfun, ftype);
81
82 % get number of Hidden Units
83 fparams = [out(:).fparams];
84 hidNum = [fparams(:).rbm_hidNum];
85 [hidNum,idx] =sort(hidNum);
86
87 % get sorted test data results
88 svm_test_results = [out(idx).mean_ok_test];
89 svm_test_results = svm_test_results(1,:);
90
91 % get sorted train data results
92 svm_train_results = [out(idx).mean_ok_train];
93 svm_train_results = svm_train_results(1,:);
94
95 % ---
96 % plot results
97 % ---
98 figure;
99 plot(hidNum,svm_train_results,'r-');
100 hold on
101 plot(hidNum,svm_test_results,'b');
102 legend('Training','Test');
103 title ('Figure 4: SVM results for different hidNum');