view reproduce_AES53rd/rerun_figure4.m @ 0:e9a9cd732c1e tip

first hg version after svn
author wolffd
date Tue, 10 Feb 2015 15:05:51 +0000
parents
children
line wrap: on
line source
% ---
% This script trains similarity measures and evaluates the 
% impact of the number of hidden states as displayed in figure 4 of
%
% Feature Preprocessing with RBMs for Music Similarity Learning
% Son N. Tran, Daniel Wolff, Tillman Weyde, Artur Garcez, AES53rd
% conference 
% 
% please note that the RBM training is a probabilistic process, and 
% thus the papers' results can only be reproduced approximately with 
% some iterations of this script, and selection of RBMs according to
% their training set performance.
% This takes considerable time, and for the sake of usability of this script
% is left our. % It is exemplarily done for hidnum = 1000  in the script "table 1" in the
% correpsonding folder.
%
% % The train and test performances are plotted in a figure
%
% For convenicence, The precomputed RBM features are stored in the files
% accompaining this script.
% In order to compute new SVM features, delete these files.
% ---

global globalvars;
globalvars.debug = 3;

% ---
% vary feature parameters of mixed features
% ---
        
ftype = 'MTTMixedFeatureSonRBM'; %'MTTMixedFeatureStober11Genre';

fparams_all = struct(...
            ... % ---
            ... % these are SONfeatRaw parameters
            ... % ---
            'son_filename',{{'rel_music_raw_features+simdata_ISMIR12.mat'}}, ...
            'son_conf', 2, ...
             ... % --- 
            ... % Following: RBM params
            ... % ---
            'norm_pre_rbm', 0, ... % norm before RBM?
            'norm_post_rbm',0, ... % norm before RBM?
            'rbm_hidNum',[30 50 100 500 1000], ...   % number of hidden units % 500
            'rbm_eNum', 100, ...
            'rbm_bNum', 1, ...
            'rbm_gNum', 1, ...
            'rbm_lrate1' , [0.05], ... % initial learning rate % 0.01
            'rbm_lrate2', [0.17], ... %  learning rate, %0.05  
            'rbm_momentum', [0.1], ... % 0.5
            'rbm_cost', [0.00002], ... % cost function
            'rbm_N', 50, ...
            'rbm_MAX_INC', 10 ...
        );

% ---
% vary parameters for svmlight
% ---    

trainparams_all = struct(...
            'C', [1], ...%    
            'weighted', [0], ...
            'dataset', {{'comp_partBinData_ISMIR12_01.mat'}}, ...
            'inctrain', 0 ...
            ... % this optional
            ... %'deltafun', {{'conv_subspace_delta'}}, ...
            ... %'deltafun_params', {{{[1],[0]},{[5],[1]},{[10],[1]},{[20],[1]},{[30],[1]},{[50],[1]},{[70],[1]}}} ... % normalisation improves results
            );

% set training function
trainfun = @svmlight_wrapper;


% create test directory
akt_dir = migrate_to_test_dir();


% call eval
out = test_generic_features_parameters_crossval...
    (fparams_all, trainparams_all, trainfun, ftype);

% get number of Hidden Units
fparams = [out(:).fparams];
hidNum = [fparams(:).rbm_hidNum];
[hidNum,idx] =sort(hidNum);

% get sorted test data results
svm_test_results = [out(idx).mean_ok_test];
svm_test_results = svm_test_results(1,:);

% get sorted train data results
svm_train_results = [out(idx).mean_ok_train];
svm_train_results = svm_train_results(1,:);

% ---
% plot results
% ---
figure;
plot(hidNum,svm_train_results,'r-');
hold on
plot(hidNum,svm_test_results,'b');
legend('Training','Test');
title ('Figure 4: SVM results for different hidNum');