Mercurial > hg > camir-aes2014
comparison reproduce_AES53rd/rerun_svm_table3/svm_table3.m @ 0:e9a9cd732c1e tip
first hg version after svn
author | wolffd |
---|---|
date | Tue, 10 Feb 2015 15:05:51 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:e9a9cd732c1e |
---|---|
1 % --- | |
2 % this script trains similarity measures using RBM and SVM as in Table 3 | |
3 % please note that the RBM training is a probabilistic process. | |
4 % Here, training is done on 20 random initialisations of RBM features , | |
5 % the test results corresponding to the RBM with the best training result are then | |
6 % returned. | |
7 % --- | |
8 | |
9 % --- | |
10 % vary feature parameters of mixed features | |
11 % --- | |
12 | |
13 global globalvars; | |
14 globalvars.debug = 3; | |
15 | |
16 ftype = 'MTTMixedFeatureSonRBM'; %'MTTMixedFeatureStober11Genre'; | |
17 | |
18 fparams_all = struct(... | |
19 ... % --- | |
20 ... % these are SONfeatRaw parameters | |
21 ... % --- | |
22 'son_filename',{{'rel_music_raw_features+simdata_ISMIR12.mat'}}, ... | |
23 'son_conf', 1:5, ... | |
24 ... % --- | |
25 ... % Following: RBM params | |
26 ... % --- | |
27 'norm_pre_rbm', 0, ... % norm before RBM? | |
28 'norm_post_rbm',0, ... % norm before RBM? | |
29 'rbm_hidNum',[1000], ... % number of hidden units % 500 | |
30 'rbm_eNum', 100, ... | |
31 'rbm_bNum', 1, ... | |
32 'rbm_gNum', 1, ... | |
33 'rbm_lrate1' , [0.05], ... % initial learning rate % 0.01 | |
34 'rbm_lrate2', [0.10], ... % learning rate, %0.05 | |
35 'rbm_momentum', [0.1], ... % 0.5 | |
36 'rbm_cost', [0.00002], ... % cost function | |
37 'rbm_N', 50, ... | |
38 'rbm_MAX_INC', 10 ... | |
39 ); | |
40 | |
41 % --- | |
42 % vary parameters for svmlight | |
43 % --- | |
44 | |
45 trainparams_all = struct(... | |
46 'C', [1], ...% | |
47 'weighted', [0], ... | |
48 'dataset', {{'comp_partBinData_ISMIR12_01.mat'}}, ... | |
49 'inctrain', 0 ... | |
50 ... % this optional | |
51 ... %'deltafun', {{'conv_subspace_delta'}}, ... | |
52 ... %'deltafun_params', {{{[1],[0]},{[5],[1]},{[10],[1]},{[20],[1]},{[30],[1]},{[50],[1]},{[70],[1]}}} ... % normalisation improves results | |
53 ); | |
54 | |
55 % set training function | |
56 trainfun = @svmlight_wrapper; | |
57 | |
58 | |
59 % create test directory | |
60 akt_dir = migrate_to_test_dir(); | |
61 | |
62 | |
63 % call eval | |
64 out = test_generic_features_parameters_crossval... | |
65 (fparams_all, trainparams_all, trainfun, ftype); | |
66 | |
67 % --- | |
68 % check training results and select best RBM according to trainign data | |
69 % --- | |
70 svm_train_performances = [out(:).mean_ok_train]; | |
71 [bestTrain, idx] = max(svm_train_performances(1,:)); | |
72 result = out(idx); | |
73 | |
74 % get corresponding test performance | |
75 svm_test_performance = result.mean_ok_test(1,:); | |
76 fprintf('SVM RBM Test/Train Result=%f / %f\n',svm_test_performance*100,bestTrain*100); |