wolffd@0
|
1 % ---
|
wolffd@0
|
2 % This script trains similarity measures and evaluates the
|
wolffd@0
|
3 % impact of the number of hidden states as displayed in figure 4 of
|
wolffd@0
|
4 %
|
wolffd@0
|
5 % Feature Preprocessing with RBMs for Music Similarity Learning
|
wolffd@0
|
6 % Son N. Tran, Daniel Wolff, Tillman Weyde, Artur Garcez, AES53rd
|
wolffd@0
|
7 % conference
|
wolffd@0
|
8 %
|
wolffd@0
|
9 % please note that the RBM training is a probabilistic process, and
|
wolffd@0
|
10 % thus the papers' results can only be reproduced approximately with
|
wolffd@0
|
11 % some iterations of this script, and selection of RBMs according to
|
wolffd@0
|
12 % their training set performance.
|
wolffd@0
|
13 % This takes considerable time, and for the sake of usability of this script
|
wolffd@0
|
14 % is left our. % It is exemplarily done for hidnum = 1000 in the script "table 1" in the
|
wolffd@0
|
15 % correpsonding folder.
|
wolffd@0
|
16 %
|
wolffd@0
|
17 % % The train and test performances are plotted in a figure
|
wolffd@0
|
18 %
|
wolffd@0
|
19 % For convenicence, The precomputed RBM features are stored in the files
|
wolffd@0
|
20 % accompaining this script.
|
wolffd@0
|
21 % In order to compute new SVM features, delete these files.
|
wolffd@0
|
22 % ---
|
wolffd@0
|
23
|
wolffd@0
|
24 global globalvars;
|
wolffd@0
|
25 globalvars.debug = 3;
|
wolffd@0
|
26
|
wolffd@0
|
27 % ---
|
wolffd@0
|
28 % vary feature parameters of mixed features
|
wolffd@0
|
29 % ---
|
wolffd@0
|
30
|
wolffd@0
|
31 ftype = 'MTTMixedFeatureSonRBM'; %'MTTMixedFeatureStober11Genre';
|
wolffd@0
|
32
|
wolffd@0
|
33 fparams_all = struct(...
|
wolffd@0
|
34 ... % ---
|
wolffd@0
|
35 ... % these are SONfeatRaw parameters
|
wolffd@0
|
36 ... % ---
|
wolffd@0
|
37 'son_filename',{{'rel_music_raw_features+simdata_ISMIR12.mat'}}, ...
|
wolffd@0
|
38 'son_conf', 2, ...
|
wolffd@0
|
39 ... % ---
|
wolffd@0
|
40 ... % Following: RBM params
|
wolffd@0
|
41 ... % ---
|
wolffd@0
|
42 'norm_pre_rbm', 0, ... % norm before RBM?
|
wolffd@0
|
43 'norm_post_rbm',0, ... % norm before RBM?
|
wolffd@0
|
44 'rbm_hidNum',[30 50 100 500 1000], ... % number of hidden units % 500
|
wolffd@0
|
45 'rbm_eNum', 100, ...
|
wolffd@0
|
46 'rbm_bNum', 1, ...
|
wolffd@0
|
47 'rbm_gNum', 1, ...
|
wolffd@0
|
48 'rbm_lrate1' , [0.05], ... % initial learning rate % 0.01
|
wolffd@0
|
49 'rbm_lrate2', [0.17], ... % learning rate, %0.05
|
wolffd@0
|
50 'rbm_momentum', [0.1], ... % 0.5
|
wolffd@0
|
51 'rbm_cost', [0.00002], ... % cost function
|
wolffd@0
|
52 'rbm_N', 50, ...
|
wolffd@0
|
53 'rbm_MAX_INC', 10 ...
|
wolffd@0
|
54 );
|
wolffd@0
|
55
|
wolffd@0
|
56 % ---
|
wolffd@0
|
57 % vary parameters for svmlight
|
wolffd@0
|
58 % ---
|
wolffd@0
|
59
|
wolffd@0
|
60 trainparams_all = struct(...
|
wolffd@0
|
61 'C', [1], ...%
|
wolffd@0
|
62 'weighted', [0], ...
|
wolffd@0
|
63 'dataset', {{'comp_partBinData_ISMIR12_01.mat'}}, ...
|
wolffd@0
|
64 'inctrain', 0 ...
|
wolffd@0
|
65 ... % this optional
|
wolffd@0
|
66 ... %'deltafun', {{'conv_subspace_delta'}}, ...
|
wolffd@0
|
67 ... %'deltafun_params', {{{[1],[0]},{[5],[1]},{[10],[1]},{[20],[1]},{[30],[1]},{[50],[1]},{[70],[1]}}} ... % normalisation improves results
|
wolffd@0
|
68 );
|
wolffd@0
|
69
|
wolffd@0
|
70 % set training function
|
wolffd@0
|
71 trainfun = @svmlight_wrapper;
|
wolffd@0
|
72
|
wolffd@0
|
73
|
wolffd@0
|
74 % create test directory
|
wolffd@0
|
75 akt_dir = migrate_to_test_dir();
|
wolffd@0
|
76
|
wolffd@0
|
77
|
wolffd@0
|
78 % call eval
|
wolffd@0
|
79 out = test_generic_features_parameters_crossval...
|
wolffd@0
|
80 (fparams_all, trainparams_all, trainfun, ftype);
|
wolffd@0
|
81
|
wolffd@0
|
82 % get number of Hidden Units
|
wolffd@0
|
83 fparams = [out(:).fparams];
|
wolffd@0
|
84 hidNum = [fparams(:).rbm_hidNum];
|
wolffd@0
|
85 [hidNum,idx] =sort(hidNum);
|
wolffd@0
|
86
|
wolffd@0
|
87 % get sorted test data results
|
wolffd@0
|
88 svm_test_results = [out(idx).mean_ok_test];
|
wolffd@0
|
89 svm_test_results = svm_test_results(1,:);
|
wolffd@0
|
90
|
wolffd@0
|
91 % get sorted train data results
|
wolffd@0
|
92 svm_train_results = [out(idx).mean_ok_train];
|
wolffd@0
|
93 svm_train_results = svm_train_results(1,:);
|
wolffd@0
|
94
|
wolffd@0
|
95 % ---
|
wolffd@0
|
96 % plot results
|
wolffd@0
|
97 % ---
|
wolffd@0
|
98 figure;
|
wolffd@0
|
99 plot(hidNum,svm_train_results,'r-');
|
wolffd@0
|
100 hold on
|
wolffd@0
|
101 plot(hidNum,svm_test_results,'b');
|
wolffd@0
|
102 legend('Training','Test');
|
wolffd@0
|
103 title ('Figure 4: SVM results for different hidNum');
|