wolffd@0: % --- wolffd@0: % This script trains similarity measures and evaluates the wolffd@0: % impact of the number of hidden states as displayed in figure 4 of wolffd@0: % wolffd@0: % Feature Preprocessing with RBMs for Music Similarity Learning wolffd@0: % Son N. Tran, Daniel Wolff, Tillman Weyde, Artur Garcez, AES53rd wolffd@0: % conference wolffd@0: % wolffd@0: % please note that the RBM training is a probabilistic process, and wolffd@0: % thus the papers' results can only be reproduced approximately with wolffd@0: % some iterations of this script, and selection of RBMs according to wolffd@0: % their training set performance. wolffd@0: % This takes considerable time, and for the sake of usability of this script wolffd@0: % is left our. % It is exemplarily done for hidnum = 1000 in the script "table 1" in the wolffd@0: % correpsonding folder. wolffd@0: % wolffd@0: % % The train and test performances are plotted in a figure wolffd@0: % wolffd@0: % For convenicence, The precomputed RBM features are stored in the files wolffd@0: % accompaining this script. wolffd@0: % In order to compute new SVM features, delete these files. wolffd@0: % --- wolffd@0: wolffd@0: global globalvars; wolffd@0: globalvars.debug = 3; wolffd@0: wolffd@0: % --- wolffd@0: % vary feature parameters of mixed features wolffd@0: % --- wolffd@0: wolffd@0: ftype = 'MTTMixedFeatureSonRBM'; %'MTTMixedFeatureStober11Genre'; wolffd@0: wolffd@0: fparams_all = struct(... wolffd@0: ... % --- wolffd@0: ... % these are SONfeatRaw parameters wolffd@0: ... % --- wolffd@0: 'son_filename',{{'rel_music_raw_features+simdata_ISMIR12.mat'}}, ... wolffd@0: 'son_conf', 2, ... wolffd@0: ... % --- wolffd@0: ... % Following: RBM params wolffd@0: ... % --- wolffd@0: 'norm_pre_rbm', 0, ... % norm before RBM? wolffd@0: 'norm_post_rbm',0, ... % norm before RBM? wolffd@0: 'rbm_hidNum',[30 50 100 500 1000], ... % number of hidden units % 500 wolffd@0: 'rbm_eNum', 100, ... wolffd@0: 'rbm_bNum', 1, ... wolffd@0: 'rbm_gNum', 1, ... wolffd@0: 'rbm_lrate1' , [0.05], ... % initial learning rate % 0.01 wolffd@0: 'rbm_lrate2', [0.17], ... % learning rate, %0.05 wolffd@0: 'rbm_momentum', [0.1], ... % 0.5 wolffd@0: 'rbm_cost', [0.00002], ... % cost function wolffd@0: 'rbm_N', 50, ... wolffd@0: 'rbm_MAX_INC', 10 ... wolffd@0: ); wolffd@0: wolffd@0: % --- wolffd@0: % vary parameters for svmlight wolffd@0: % --- wolffd@0: wolffd@0: trainparams_all = struct(... wolffd@0: 'C', [1], ...% wolffd@0: 'weighted', [0], ... wolffd@0: 'dataset', {{'comp_partBinData_ISMIR12_01.mat'}}, ... wolffd@0: 'inctrain', 0 ... wolffd@0: ... % this optional wolffd@0: ... %'deltafun', {{'conv_subspace_delta'}}, ... wolffd@0: ... %'deltafun_params', {{{[1],[0]},{[5],[1]},{[10],[1]},{[20],[1]},{[30],[1]},{[50],[1]},{[70],[1]}}} ... % normalisation improves results wolffd@0: ); wolffd@0: wolffd@0: % set training function wolffd@0: trainfun = @svmlight_wrapper; wolffd@0: wolffd@0: wolffd@0: % create test directory wolffd@0: akt_dir = migrate_to_test_dir(); wolffd@0: wolffd@0: wolffd@0: % call eval wolffd@0: out = test_generic_features_parameters_crossval... wolffd@0: (fparams_all, trainparams_all, trainfun, ftype); wolffd@0: wolffd@0: % get number of Hidden Units wolffd@0: fparams = [out(:).fparams]; wolffd@0: hidNum = [fparams(:).rbm_hidNum]; wolffd@0: [hidNum,idx] =sort(hidNum); wolffd@0: wolffd@0: % get sorted test data results wolffd@0: svm_test_results = [out(idx).mean_ok_test]; wolffd@0: svm_test_results = svm_test_results(1,:); wolffd@0: wolffd@0: % get sorted train data results wolffd@0: svm_train_results = [out(idx).mean_ok_train]; wolffd@0: svm_train_results = svm_train_results(1,:); wolffd@0: wolffd@0: % --- wolffd@0: % plot results wolffd@0: % --- wolffd@0: figure; wolffd@0: plot(hidNum,svm_train_results,'r-'); wolffd@0: hold on wolffd@0: plot(hidNum,svm_test_results,'b'); wolffd@0: legend('Training','Test'); wolffd@0: title ('Figure 4: SVM results for different hidNum');