diff reproduce_AES53rd/rerun_svm_table3/svm_table3.m @ 0:e9a9cd732c1e tip

first hg version after svn
author wolffd
date Tue, 10 Feb 2015 15:05:51 +0000
parents
children
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/reproduce_AES53rd/rerun_svm_table3/svm_table3.m	Tue Feb 10 15:05:51 2015 +0000
@@ -0,0 +1,76 @@
+% ---
+% this script trains similarity measures using RBM and SVM as in Table 3
+% please note that the RBM training is a probabilistic process.
+% Here, training is done on 20 random initialisations of RBM features ,
+% the test results corresponding to the RBM with the best training result are then
+% returned.
+% ---
+
+% ---
+% vary feature parameters of mixed features
+% ---
+
+global globalvars;
+globalvars.debug = 3;
+        
+ftype = 'MTTMixedFeatureSonRBM'; %'MTTMixedFeatureStober11Genre';
+
+fparams_all = struct(...
+            ... % ---
+            ... % these are SONfeatRaw parameters
+            ... % ---
+            'son_filename',{{'rel_music_raw_features+simdata_ISMIR12.mat'}}, ...
+            'son_conf', 1:5, ...
+             ... % --- 
+            ... % Following: RBM params
+            ... % ---
+            'norm_pre_rbm', 0, ... % norm before RBM?
+            'norm_post_rbm',0, ... % norm before RBM?
+            'rbm_hidNum',[1000], ...   % number of hidden units % 500
+            'rbm_eNum', 100, ...
+            'rbm_bNum', 1, ...
+            'rbm_gNum', 1, ...
+            'rbm_lrate1' , [0.05], ... % initial learning rate % 0.01
+            'rbm_lrate2', [0.10], ... %  learning rate, %0.05  
+            'rbm_momentum', [0.1], ... % 0.5
+            'rbm_cost', [0.00002], ... % cost function
+            'rbm_N', 50, ...
+            'rbm_MAX_INC', 10 ...
+        );
+
+% ---
+% vary parameters for svmlight
+% ---    
+
+trainparams_all = struct(...
+            'C', [1], ...%    
+            'weighted', [0], ...
+            'dataset', {{'comp_partBinData_ISMIR12_01.mat'}}, ...
+            'inctrain', 0 ...
+            ... % this optional
+            ... %'deltafun', {{'conv_subspace_delta'}}, ...
+            ... %'deltafun_params', {{{[1],[0]},{[5],[1]},{[10],[1]},{[20],[1]},{[30],[1]},{[50],[1]},{[70],[1]}}} ... % normalisation improves results
+            );
+
+% set training function
+trainfun = @svmlight_wrapper;
+
+
+% create test directory
+akt_dir = migrate_to_test_dir();
+
+
+% call eval
+out = test_generic_features_parameters_crossval...
+    (fparams_all, trainparams_all, trainfun, ftype);
+
+% ---
+% check training results and select best RBM according to trainign data
+% ---
+svm_train_performances = [out(:).mean_ok_train];
+[bestTrain, idx] = max(svm_train_performances(1,:));
+result = out(idx);
+
+% get corresponding test performance
+svm_test_performance = result.mean_ok_test(1,:);
+fprintf('SVM RBM Test/Train Result=%f / %f\n',svm_test_performance*100,bestTrain*100);