annotate toolboxes/FullBNT-1.0.7/netlab3.3/demmlp2.m @ 0:cc4b1211e677 tip

initial commit to HG from Changeset: 646 (e263d8a21543) added further path and more save "camirversion.m"
author Daniel Wolff
date Fri, 19 Aug 2016 13:07:06 +0200
parents
children
rev   line source
Daniel@0 1 %DEMMLP2 Demonstrate simple classification using a multi-layer perceptron
Daniel@0 2 %
Daniel@0 3 % Description
Daniel@0 4 % The problem consists of input data in two dimensions drawn from a
Daniel@0 5 % mixture of three Gaussians: two of which are assigned to a single
Daniel@0 6 % class. An MLP with logistic outputs trained with a quasi-Newton
Daniel@0 7 % optimisation algorithm is compared with the optimal Bayesian decision
Daniel@0 8 % rule.
Daniel@0 9 %
Daniel@0 10 % See also
Daniel@0 11 % MLP, MLPFWD, NETERR, QUASINEW
Daniel@0 12 %
Daniel@0 13
Daniel@0 14 % Copyright (c) Ian T Nabney (1996-2001)
Daniel@0 15
Daniel@0 16
Daniel@0 17 % Set up some figure parameters
Daniel@0 18 AxisShift = 0.05;
Daniel@0 19 ClassSymbol1 = 'r.';
Daniel@0 20 ClassSymbol2 = 'y.';
Daniel@0 21 PointSize = 12;
Daniel@0 22 titleSize = 10;
Daniel@0 23
Daniel@0 24 % Fix the seeds
Daniel@0 25 rand('state', 423);
Daniel@0 26 randn('state', 423);
Daniel@0 27
Daniel@0 28 clc
Daniel@0 29 disp('This demonstration shows how an MLP with logistic outputs and')
Daniel@0 30 disp('and cross entropy error function can be trained to model the')
Daniel@0 31 disp('posterior class probabilities in a classification problem.')
Daniel@0 32 disp('The results are compared with the optimal Bayes rule classifier,')
Daniel@0 33 disp('which can be computed exactly as we know the form of the generating')
Daniel@0 34 disp('distribution.')
Daniel@0 35 disp(' ')
Daniel@0 36 disp('Press any key to continue.')
Daniel@0 37 pause
Daniel@0 38
Daniel@0 39 fh1 = figure;
Daniel@0 40 set(fh1, 'Name', 'True Data Distribution');
Daniel@0 41 whitebg(fh1, 'k');
Daniel@0 42
Daniel@0 43 %
Daniel@0 44 % Generate the data
Daniel@0 45 %
Daniel@0 46 n=200;
Daniel@0 47
Daniel@0 48 % Set up mixture model: 2d data with three centres
Daniel@0 49 % Class 1 is first centre, class 2 from the other two
Daniel@0 50 mix = gmm(2, 3, 'full');
Daniel@0 51 mix.priors = [0.5 0.25 0.25];
Daniel@0 52 mix.centres = [0 -0.1; 1 1; 1 -1];
Daniel@0 53 mix.covars(:,:,1) = [0.625 -0.2165; -0.2165 0.875];
Daniel@0 54 mix.covars(:,:,2) = [0.2241 -0.1368; -0.1368 0.9759];
Daniel@0 55 mix.covars(:,:,3) = [0.2375 0.1516; 0.1516 0.4125];
Daniel@0 56
Daniel@0 57 [data, label] = gmmsamp(mix, n);
Daniel@0 58
Daniel@0 59 %
Daniel@0 60 % Calculate some useful axis limits
Daniel@0 61 %
Daniel@0 62 x0 = min(data(:,1));
Daniel@0 63 x1 = max(data(:,1));
Daniel@0 64 y0 = min(data(:,2));
Daniel@0 65 y1 = max(data(:,2));
Daniel@0 66 dx = x1-x0;
Daniel@0 67 dy = y1-y0;
Daniel@0 68 expand = 5/100; % Add on 5 percent each way
Daniel@0 69 x0 = x0 - dx*expand;
Daniel@0 70 x1 = x1 + dx*expand;
Daniel@0 71 y0 = y0 - dy*expand;
Daniel@0 72 y1 = y1 + dy*expand;
Daniel@0 73 resolution = 100;
Daniel@0 74 step = dx/resolution;
Daniel@0 75 xrange = [x0:step:x1];
Daniel@0 76 yrange = [y0:step:y1];
Daniel@0 77 %
Daniel@0 78 % Generate the grid
Daniel@0 79 %
Daniel@0 80 [X Y]=meshgrid([x0:step:x1],[y0:step:y1]);
Daniel@0 81 %
Daniel@0 82 % Calculate the class conditional densities, the unconditional densities and
Daniel@0 83 % the posterior probabilities
Daniel@0 84 %
Daniel@0 85 px_j = gmmactiv(mix, [X(:) Y(:)]);
Daniel@0 86 px = reshape(px_j*(mix.priors)',size(X));
Daniel@0 87 post = gmmpost(mix, [X(:) Y(:)]);
Daniel@0 88 p1_x = reshape(post(:, 1), size(X));
Daniel@0 89 p2_x = reshape(post(:, 2) + post(:, 3), size(X));
Daniel@0 90
Daniel@0 91 %
Daniel@0 92 % Generate some pretty pictures !!
Daniel@0 93 %
Daniel@0 94 colormap(hot)
Daniel@0 95 colorbar
Daniel@0 96 subplot(1,2,1)
Daniel@0 97 hold on
Daniel@0 98 plot(data((label==1),1),data(label==1,2),ClassSymbol1, 'MarkerSize', PointSize)
Daniel@0 99 plot(data((label>1),1),data(label>1,2),ClassSymbol2, 'MarkerSize', PointSize)
Daniel@0 100 contour(xrange,yrange,p1_x,[0.5 0.5],'w-');
Daniel@0 101 axis([x0 x1 y0 y1])
Daniel@0 102 set(gca,'Box','On')
Daniel@0 103 title('The Sampled Data');
Daniel@0 104 rect=get(gca,'Position');
Daniel@0 105 rect(1)=rect(1)-AxisShift;
Daniel@0 106 rect(3)=rect(3)+AxisShift;
Daniel@0 107 set(gca,'Position',rect)
Daniel@0 108 hold off
Daniel@0 109
Daniel@0 110 subplot(1,2,2)
Daniel@0 111 imagesc(X(:),Y(:),px);
Daniel@0 112 hold on
Daniel@0 113 [cB, hB] = contour(xrange,yrange,p1_x,[0.5 0.5],'w:');
Daniel@0 114 set(hB,'LineWidth', 2);
Daniel@0 115 axis([x0 x1 y0 y1])
Daniel@0 116 set(gca,'YDir','normal')
Daniel@0 117 title('Probability Density p(x)')
Daniel@0 118 hold off
Daniel@0 119
Daniel@0 120 drawnow;
Daniel@0 121 clc;
Daniel@0 122 disp('The first figure shows the data sampled from a mixture of three')
Daniel@0 123 disp('Gaussians, the first of which (whose centre is near the origin) is')
Daniel@0 124 disp('labelled red and the other two are labelled yellow. The second plot')
Daniel@0 125 disp('shows the unconditional density of the data with the optimal Bayesian')
Daniel@0 126 disp('decision boundary superimposed.')
Daniel@0 127 disp(' ')
Daniel@0 128 disp('Press any key to continue.')
Daniel@0 129 pause
Daniel@0 130 fh2 = figure;
Daniel@0 131 set(fh2, 'Name', 'Class-conditional Densities and Posterior Probabilities');
Daniel@0 132 whitebg(fh2, 'w');
Daniel@0 133
Daniel@0 134 subplot(2,2,1)
Daniel@0 135 p1=reshape(px_j(:,1),size(X));
Daniel@0 136 imagesc(X(:),Y(:),p1);
Daniel@0 137 colormap hot
Daniel@0 138 colorbar
Daniel@0 139 axis(axis)
Daniel@0 140 set(gca,'YDir','normal')
Daniel@0 141 hold on
Daniel@0 142 plot(mix.centres(:,1),mix.centres(:,2),'b+','MarkerSize',8,'LineWidth',2)
Daniel@0 143 title('Density p(x|red)')
Daniel@0 144 hold off
Daniel@0 145
Daniel@0 146 subplot(2,2,2)
Daniel@0 147 p2=reshape((px_j(:,2)+px_j(:,3)),size(X));
Daniel@0 148 imagesc(X(:),Y(:),p2);
Daniel@0 149 colorbar
Daniel@0 150 set(gca,'YDir','normal')
Daniel@0 151 hold on
Daniel@0 152 plot(mix.centres(:,1),mix.centres(:,2),'b+','MarkerSize',8,'LineWidth',2)
Daniel@0 153 title('Density p(x|yellow)')
Daniel@0 154 hold off
Daniel@0 155
Daniel@0 156 subplot(2,2,3)
Daniel@0 157 imagesc(X(:),Y(:),p1_x);
Daniel@0 158 set(gca,'YDir','normal')
Daniel@0 159 colorbar
Daniel@0 160 title('Posterior Probability p(red|x)')
Daniel@0 161 hold on
Daniel@0 162 plot(mix.centres(:,1),mix.centres(:,2),'b+','MarkerSize',8,'LineWidth',2)
Daniel@0 163 hold off
Daniel@0 164
Daniel@0 165 subplot(2,2,4)
Daniel@0 166 imagesc(X(:),Y(:),p2_x);
Daniel@0 167 set(gca,'YDir','normal')
Daniel@0 168 colorbar
Daniel@0 169 title('Posterior Probability p(yellow|x)')
Daniel@0 170 hold on
Daniel@0 171 plot(mix.centres(:,1),mix.centres(:,2),'b+','MarkerSize',8,'LineWidth',2)
Daniel@0 172 hold off
Daniel@0 173
Daniel@0 174 % Now set up and train the MLP
Daniel@0 175 nhidden=6;
Daniel@0 176 nout=1;
Daniel@0 177 alpha = 0.2; % Weight decay
Daniel@0 178 ncycles = 60; % Number of training cycles.
Daniel@0 179 % Set up MLP network
Daniel@0 180 net = mlp(2, nhidden, nout, 'logistic', alpha);
Daniel@0 181 options = zeros(1,18);
Daniel@0 182 options(1) = 1; % Print out error values
Daniel@0 183 options(14) = ncycles;
Daniel@0 184
Daniel@0 185 mlpstring = ['We now set up an MLP with ', num2str(nhidden), ...
Daniel@0 186 ' hidden units, logistic output and cross'];
Daniel@0 187 trainstring = ['entropy error function, and train it for ', ...
Daniel@0 188 num2str(ncycles), ' cycles using the'];
Daniel@0 189 wdstring = ['quasi-Newton optimisation algorithm with weight decay of ', ...
Daniel@0 190 num2str(alpha), '.'];
Daniel@0 191
Daniel@0 192 % Force out the figure before training the MLP
Daniel@0 193 drawnow;
Daniel@0 194 disp(' ')
Daniel@0 195 disp('The second figure shows the class conditional densities and posterior')
Daniel@0 196 disp('probabilities for each class. The blue crosses mark the centres of')
Daniel@0 197 disp('the three Gaussians.')
Daniel@0 198 disp(' ')
Daniel@0 199 disp(mlpstring)
Daniel@0 200 disp(trainstring)
Daniel@0 201 disp(wdstring)
Daniel@0 202 disp(' ')
Daniel@0 203 disp('Press any key to continue.')
Daniel@0 204 pause
Daniel@0 205
Daniel@0 206 % Convert targets to 0-1 encoding
Daniel@0 207 target=[label==1];
Daniel@0 208
Daniel@0 209 % Train using quasi-Newton.
Daniel@0 210 [net] = netopt(net, options, data, target, 'quasinew');
Daniel@0 211 y = mlpfwd(net, data);
Daniel@0 212 yg = mlpfwd(net, [X(:) Y(:)]);
Daniel@0 213 yg = reshape(yg(:,1),size(X));
Daniel@0 214
Daniel@0 215 fh3 = figure;
Daniel@0 216 set(fh3, 'Name', 'Network Output');
Daniel@0 217 whitebg(fh3, 'k')
Daniel@0 218 subplot(1, 2, 1)
Daniel@0 219 hold on
Daniel@0 220 plot(data((label==1),1),data(label==1,2),'r.', 'MarkerSize', PointSize)
Daniel@0 221 plot(data((label>1),1),data(label>1,2),'y.', 'MarkerSize', PointSize)
Daniel@0 222 % Bayesian decision boundary
Daniel@0 223 [cB, hB] = contour(xrange,yrange,p1_x,[0.5 0.5],'b-');
Daniel@0 224 [cN, hN] = contour(xrange,yrange,yg,[0.5 0.5],'r-');
Daniel@0 225 set(hB, 'LineWidth', 2);
Daniel@0 226 set(hN, 'LineWidth', 2);
Daniel@0 227 Chandles = [hB(1) hN(1)];
Daniel@0 228 legend(Chandles, 'Bayes', ...
Daniel@0 229 'Network', 3);
Daniel@0 230
Daniel@0 231 axis([x0 x1 y0 y1])
Daniel@0 232 set(gca,'Box','on','XTick',[],'YTick',[])
Daniel@0 233
Daniel@0 234 title('Training Data','FontSize',titleSize);
Daniel@0 235 hold off
Daniel@0 236
Daniel@0 237 subplot(1, 2, 2)
Daniel@0 238 imagesc(X(:),Y(:),yg);
Daniel@0 239 colormap hot
Daniel@0 240 colorbar
Daniel@0 241 axis(axis)
Daniel@0 242 set(gca,'YDir','normal','XTick',[],'YTick',[])
Daniel@0 243 title('Network Output','FontSize',titleSize)
Daniel@0 244
Daniel@0 245 clc
Daniel@0 246 disp('This figure shows the training data with the decision boundary')
Daniel@0 247 disp('produced by the trained network and the network''s prediction of')
Daniel@0 248 disp('the posterior probability of the red class.')
Daniel@0 249 disp(' ')
Daniel@0 250 disp('Press any key to continue.')
Daniel@0 251 pause
Daniel@0 252
Daniel@0 253 %
Daniel@0 254 % Now generate and classify a test data set
Daniel@0 255 %
Daniel@0 256 [testdata testlabel] = gmmsamp(mix, n);
Daniel@0 257 testlab=[testlabel==1 testlabel>1];
Daniel@0 258
Daniel@0 259 % This is the Bayesian classification
Daniel@0 260 tpx_j = gmmpost(mix, testdata);
Daniel@0 261 Bpost = [tpx_j(:,1), tpx_j(:,2)+tpx_j(:,3)];
Daniel@0 262 [Bcon Brate]=confmat(Bpost, [testlabel==1 testlabel>1]);
Daniel@0 263
Daniel@0 264 % Compute network classification
Daniel@0 265 yt = mlpfwd(net, testdata);
Daniel@0 266 % Convert single output to posteriors for both classes
Daniel@0 267 testpost = [yt 1-yt];
Daniel@0 268 [C trate]=confmat(testpost,[testlabel==1 testlabel>1]);
Daniel@0 269
Daniel@0 270 fh4 = figure;
Daniel@0 271 set(fh4, 'Name', 'Decision Boundaries');
Daniel@0 272 whitebg(fh4, 'k');
Daniel@0 273 hold on
Daniel@0 274 plot(testdata((testlabel==1),1),testdata((testlabel==1),2),...
Daniel@0 275 ClassSymbol1, 'MarkerSize', PointSize)
Daniel@0 276 plot(testdata((testlabel>1),1),testdata((testlabel>1),2),...
Daniel@0 277 ClassSymbol2, 'MarkerSize', PointSize)
Daniel@0 278 % Bayesian decision boundary
Daniel@0 279 [cB, hB] = contour(xrange,yrange,p1_x,[0.5 0.5],'b-');
Daniel@0 280 set(hB, 'LineWidth', 2);
Daniel@0 281 % Network decision boundary
Daniel@0 282 [cN, hN] = contour(xrange,yrange,yg,[0.5 0.5],'r-');
Daniel@0 283 set(hN, 'LineWidth', 2);
Daniel@0 284 Chandles = [hB(1) hN(1)];
Daniel@0 285 legend(Chandles, 'Bayes decision boundary', ...
Daniel@0 286 'Network decision boundary', -1);
Daniel@0 287 axis([x0 x1 y0 y1])
Daniel@0 288 title('Test Data')
Daniel@0 289 set(gca,'Box','On','Xtick',[],'YTick',[])
Daniel@0 290
Daniel@0 291 clc
Daniel@0 292 disp('This figure shows the test data with the decision boundary')
Daniel@0 293 disp('produced by the trained network and the optimal Bayes rule.')
Daniel@0 294 disp(' ')
Daniel@0 295 disp('Press any key to continue.')
Daniel@0 296 pause
Daniel@0 297
Daniel@0 298 fh5 = figure;
Daniel@0 299 set(fh5, 'Name', 'Test Set Performance');
Daniel@0 300 whitebg(fh5, 'w');
Daniel@0 301 % Bayes rule performance
Daniel@0 302 subplot(1,2,1)
Daniel@0 303 plotmat(Bcon,'b','k',12)
Daniel@0 304 set(gca,'XTick',[0.5 1.5])
Daniel@0 305 set(gca,'YTick',[0.5 1.5])
Daniel@0 306 grid('off')
Daniel@0 307 set(gca,'XTickLabel',['Red ' ; 'Yellow'])
Daniel@0 308 set(gca,'YTickLabel',['Yellow' ; 'Red '])
Daniel@0 309 ylabel('True')
Daniel@0 310 xlabel('Predicted')
Daniel@0 311 title(['Bayes Confusion Matrix (' num2str(Brate(1)) '%)'])
Daniel@0 312
Daniel@0 313 % Network performance
Daniel@0 314 subplot(1,2, 2)
Daniel@0 315 plotmat(C,'b','k',12)
Daniel@0 316 set(gca,'XTick',[0.5 1.5])
Daniel@0 317 set(gca,'YTick',[0.5 1.5])
Daniel@0 318 grid('off')
Daniel@0 319 set(gca,'XTickLabel',['Red ' ; 'Yellow'])
Daniel@0 320 set(gca,'YTickLabel',['Yellow' ; 'Red '])
Daniel@0 321 ylabel('True')
Daniel@0 322 xlabel('Predicted')
Daniel@0 323 title(['Network Confusion Matrix (' num2str(trate(1)) '%)'])
Daniel@0 324
Daniel@0 325 disp('The final figure shows the confusion matrices for the')
Daniel@0 326 disp('two rules on the test set.')
Daniel@0 327 disp(' ')
Daniel@0 328 disp('Press any key to exit.')
Daniel@0 329 pause
Daniel@0 330 whitebg(fh1, 'w');
Daniel@0 331 whitebg(fh2, 'w');
Daniel@0 332 whitebg(fh3, 'w');
Daniel@0 333 whitebg(fh4, 'w');
Daniel@0 334 whitebg(fh5, 'w');
Daniel@0 335 close(fh1); close(fh2); close(fh3);
Daniel@0 336 close(fh4); close(fh5);
Daniel@0 337 clear all;