Daniel@0: %DEMMLP2 Demonstrate simple classification using a multi-layer perceptron Daniel@0: % Daniel@0: % Description Daniel@0: % The problem consists of input data in two dimensions drawn from a Daniel@0: % mixture of three Gaussians: two of which are assigned to a single Daniel@0: % class. An MLP with logistic outputs trained with a quasi-Newton Daniel@0: % optimisation algorithm is compared with the optimal Bayesian decision Daniel@0: % rule. Daniel@0: % Daniel@0: % See also Daniel@0: % MLP, MLPFWD, NETERR, QUASINEW Daniel@0: % Daniel@0: Daniel@0: % Copyright (c) Ian T Nabney (1996-2001) Daniel@0: Daniel@0: Daniel@0: % Set up some figure parameters Daniel@0: AxisShift = 0.05; Daniel@0: ClassSymbol1 = 'r.'; Daniel@0: ClassSymbol2 = 'y.'; Daniel@0: PointSize = 12; Daniel@0: titleSize = 10; Daniel@0: Daniel@0: % Fix the seeds Daniel@0: rand('state', 423); Daniel@0: randn('state', 423); Daniel@0: Daniel@0: clc Daniel@0: disp('This demonstration shows how an MLP with logistic outputs and') Daniel@0: disp('and cross entropy error function can be trained to model the') Daniel@0: disp('posterior class probabilities in a classification problem.') Daniel@0: disp('The results are compared with the optimal Bayes rule classifier,') Daniel@0: disp('which can be computed exactly as we know the form of the generating') Daniel@0: disp('distribution.') Daniel@0: disp(' ') Daniel@0: disp('Press any key to continue.') Daniel@0: pause Daniel@0: Daniel@0: fh1 = figure; Daniel@0: set(fh1, 'Name', 'True Data Distribution'); Daniel@0: whitebg(fh1, 'k'); Daniel@0: Daniel@0: % Daniel@0: % Generate the data Daniel@0: % Daniel@0: n=200; Daniel@0: Daniel@0: % Set up mixture model: 2d data with three centres Daniel@0: % Class 1 is first centre, class 2 from the other two Daniel@0: mix = gmm(2, 3, 'full'); Daniel@0: mix.priors = [0.5 0.25 0.25]; Daniel@0: mix.centres = [0 -0.1; 1 1; 1 -1]; Daniel@0: mix.covars(:,:,1) = [0.625 -0.2165; -0.2165 0.875]; Daniel@0: mix.covars(:,:,2) = [0.2241 -0.1368; -0.1368 0.9759]; Daniel@0: mix.covars(:,:,3) = [0.2375 0.1516; 0.1516 0.4125]; Daniel@0: Daniel@0: [data, label] = gmmsamp(mix, n); Daniel@0: Daniel@0: % Daniel@0: % Calculate some useful axis limits Daniel@0: % Daniel@0: x0 = min(data(:,1)); Daniel@0: x1 = max(data(:,1)); Daniel@0: y0 = min(data(:,2)); Daniel@0: y1 = max(data(:,2)); Daniel@0: dx = x1-x0; Daniel@0: dy = y1-y0; Daniel@0: expand = 5/100; % Add on 5 percent each way Daniel@0: x0 = x0 - dx*expand; Daniel@0: x1 = x1 + dx*expand; Daniel@0: y0 = y0 - dy*expand; Daniel@0: y1 = y1 + dy*expand; Daniel@0: resolution = 100; Daniel@0: step = dx/resolution; Daniel@0: xrange = [x0:step:x1]; Daniel@0: yrange = [y0:step:y1]; Daniel@0: % Daniel@0: % Generate the grid Daniel@0: % Daniel@0: [X Y]=meshgrid([x0:step:x1],[y0:step:y1]); Daniel@0: % Daniel@0: % Calculate the class conditional densities, the unconditional densities and Daniel@0: % the posterior probabilities Daniel@0: % Daniel@0: px_j = gmmactiv(mix, [X(:) Y(:)]); Daniel@0: px = reshape(px_j*(mix.priors)',size(X)); Daniel@0: post = gmmpost(mix, [X(:) Y(:)]); Daniel@0: p1_x = reshape(post(:, 1), size(X)); Daniel@0: p2_x = reshape(post(:, 2) + post(:, 3), size(X)); Daniel@0: Daniel@0: % Daniel@0: % Generate some pretty pictures !! Daniel@0: % Daniel@0: colormap(hot) Daniel@0: colorbar Daniel@0: subplot(1,2,1) Daniel@0: hold on Daniel@0: plot(data((label==1),1),data(label==1,2),ClassSymbol1, 'MarkerSize', PointSize) Daniel@0: plot(data((label>1),1),data(label>1,2),ClassSymbol2, 'MarkerSize', PointSize) Daniel@0: contour(xrange,yrange,p1_x,[0.5 0.5],'w-'); Daniel@0: axis([x0 x1 y0 y1]) Daniel@0: set(gca,'Box','On') Daniel@0: title('The Sampled Data'); Daniel@0: rect=get(gca,'Position'); Daniel@0: rect(1)=rect(1)-AxisShift; Daniel@0: rect(3)=rect(3)+AxisShift; Daniel@0: set(gca,'Position',rect) Daniel@0: hold off Daniel@0: Daniel@0: subplot(1,2,2) Daniel@0: imagesc(X(:),Y(:),px); Daniel@0: hold on Daniel@0: [cB, hB] = contour(xrange,yrange,p1_x,[0.5 0.5],'w:'); Daniel@0: set(hB,'LineWidth', 2); Daniel@0: axis([x0 x1 y0 y1]) Daniel@0: set(gca,'YDir','normal') Daniel@0: title('Probability Density p(x)') Daniel@0: hold off Daniel@0: Daniel@0: drawnow; Daniel@0: clc; Daniel@0: disp('The first figure shows the data sampled from a mixture of three') Daniel@0: disp('Gaussians, the first of which (whose centre is near the origin) is') Daniel@0: disp('labelled red and the other two are labelled yellow. The second plot') Daniel@0: disp('shows the unconditional density of the data with the optimal Bayesian') Daniel@0: disp('decision boundary superimposed.') Daniel@0: disp(' ') Daniel@0: disp('Press any key to continue.') Daniel@0: pause Daniel@0: fh2 = figure; Daniel@0: set(fh2, 'Name', 'Class-conditional Densities and Posterior Probabilities'); Daniel@0: whitebg(fh2, 'w'); Daniel@0: Daniel@0: subplot(2,2,1) Daniel@0: p1=reshape(px_j(:,1),size(X)); Daniel@0: imagesc(X(:),Y(:),p1); Daniel@0: colormap hot Daniel@0: colorbar Daniel@0: axis(axis) Daniel@0: set(gca,'YDir','normal') Daniel@0: hold on Daniel@0: plot(mix.centres(:,1),mix.centres(:,2),'b+','MarkerSize',8,'LineWidth',2) Daniel@0: title('Density p(x|red)') Daniel@0: hold off Daniel@0: Daniel@0: subplot(2,2,2) Daniel@0: p2=reshape((px_j(:,2)+px_j(:,3)),size(X)); Daniel@0: imagesc(X(:),Y(:),p2); Daniel@0: colorbar Daniel@0: set(gca,'YDir','normal') Daniel@0: hold on Daniel@0: plot(mix.centres(:,1),mix.centres(:,2),'b+','MarkerSize',8,'LineWidth',2) Daniel@0: title('Density p(x|yellow)') Daniel@0: hold off Daniel@0: Daniel@0: subplot(2,2,3) Daniel@0: imagesc(X(:),Y(:),p1_x); Daniel@0: set(gca,'YDir','normal') Daniel@0: colorbar Daniel@0: title('Posterior Probability p(red|x)') Daniel@0: hold on Daniel@0: plot(mix.centres(:,1),mix.centres(:,2),'b+','MarkerSize',8,'LineWidth',2) Daniel@0: hold off Daniel@0: Daniel@0: subplot(2,2,4) Daniel@0: imagesc(X(:),Y(:),p2_x); Daniel@0: set(gca,'YDir','normal') Daniel@0: colorbar Daniel@0: title('Posterior Probability p(yellow|x)') Daniel@0: hold on Daniel@0: plot(mix.centres(:,1),mix.centres(:,2),'b+','MarkerSize',8,'LineWidth',2) Daniel@0: hold off Daniel@0: Daniel@0: % Now set up and train the MLP Daniel@0: nhidden=6; Daniel@0: nout=1; Daniel@0: alpha = 0.2; % Weight decay Daniel@0: ncycles = 60; % Number of training cycles. Daniel@0: % Set up MLP network Daniel@0: net = mlp(2, nhidden, nout, 'logistic', alpha); Daniel@0: options = zeros(1,18); Daniel@0: options(1) = 1; % Print out error values Daniel@0: options(14) = ncycles; Daniel@0: Daniel@0: mlpstring = ['We now set up an MLP with ', num2str(nhidden), ... Daniel@0: ' hidden units, logistic output and cross']; Daniel@0: trainstring = ['entropy error function, and train it for ', ... Daniel@0: num2str(ncycles), ' cycles using the']; Daniel@0: wdstring = ['quasi-Newton optimisation algorithm with weight decay of ', ... Daniel@0: num2str(alpha), '.']; Daniel@0: Daniel@0: % Force out the figure before training the MLP Daniel@0: drawnow; Daniel@0: disp(' ') Daniel@0: disp('The second figure shows the class conditional densities and posterior') Daniel@0: disp('probabilities for each class. The blue crosses mark the centres of') Daniel@0: disp('the three Gaussians.') Daniel@0: disp(' ') Daniel@0: disp(mlpstring) Daniel@0: disp(trainstring) Daniel@0: disp(wdstring) Daniel@0: disp(' ') Daniel@0: disp('Press any key to continue.') Daniel@0: pause Daniel@0: Daniel@0: % Convert targets to 0-1 encoding Daniel@0: target=[label==1]; Daniel@0: Daniel@0: % Train using quasi-Newton. Daniel@0: [net] = netopt(net, options, data, target, 'quasinew'); Daniel@0: y = mlpfwd(net, data); Daniel@0: yg = mlpfwd(net, [X(:) Y(:)]); Daniel@0: yg = reshape(yg(:,1),size(X)); Daniel@0: Daniel@0: fh3 = figure; Daniel@0: set(fh3, 'Name', 'Network Output'); Daniel@0: whitebg(fh3, 'k') Daniel@0: subplot(1, 2, 1) Daniel@0: hold on Daniel@0: plot(data((label==1),1),data(label==1,2),'r.', 'MarkerSize', PointSize) Daniel@0: plot(data((label>1),1),data(label>1,2),'y.', 'MarkerSize', PointSize) Daniel@0: % Bayesian decision boundary Daniel@0: [cB, hB] = contour(xrange,yrange,p1_x,[0.5 0.5],'b-'); Daniel@0: [cN, hN] = contour(xrange,yrange,yg,[0.5 0.5],'r-'); Daniel@0: set(hB, 'LineWidth', 2); Daniel@0: set(hN, 'LineWidth', 2); Daniel@0: Chandles = [hB(1) hN(1)]; Daniel@0: legend(Chandles, 'Bayes', ... Daniel@0: 'Network', 3); Daniel@0: Daniel@0: axis([x0 x1 y0 y1]) Daniel@0: set(gca,'Box','on','XTick',[],'YTick',[]) Daniel@0: Daniel@0: title('Training Data','FontSize',titleSize); Daniel@0: hold off Daniel@0: Daniel@0: subplot(1, 2, 2) Daniel@0: imagesc(X(:),Y(:),yg); Daniel@0: colormap hot Daniel@0: colorbar Daniel@0: axis(axis) Daniel@0: set(gca,'YDir','normal','XTick',[],'YTick',[]) Daniel@0: title('Network Output','FontSize',titleSize) Daniel@0: Daniel@0: clc Daniel@0: disp('This figure shows the training data with the decision boundary') Daniel@0: disp('produced by the trained network and the network''s prediction of') Daniel@0: disp('the posterior probability of the red class.') Daniel@0: disp(' ') Daniel@0: disp('Press any key to continue.') Daniel@0: pause Daniel@0: Daniel@0: % Daniel@0: % Now generate and classify a test data set Daniel@0: % Daniel@0: [testdata testlabel] = gmmsamp(mix, n); Daniel@0: testlab=[testlabel==1 testlabel>1]; Daniel@0: Daniel@0: % This is the Bayesian classification Daniel@0: tpx_j = gmmpost(mix, testdata); Daniel@0: Bpost = [tpx_j(:,1), tpx_j(:,2)+tpx_j(:,3)]; Daniel@0: [Bcon Brate]=confmat(Bpost, [testlabel==1 testlabel>1]); Daniel@0: Daniel@0: % Compute network classification Daniel@0: yt = mlpfwd(net, testdata); Daniel@0: % Convert single output to posteriors for both classes Daniel@0: testpost = [yt 1-yt]; Daniel@0: [C trate]=confmat(testpost,[testlabel==1 testlabel>1]); Daniel@0: Daniel@0: fh4 = figure; Daniel@0: set(fh4, 'Name', 'Decision Boundaries'); Daniel@0: whitebg(fh4, 'k'); Daniel@0: hold on Daniel@0: plot(testdata((testlabel==1),1),testdata((testlabel==1),2),... Daniel@0: ClassSymbol1, 'MarkerSize', PointSize) Daniel@0: plot(testdata((testlabel>1),1),testdata((testlabel>1),2),... Daniel@0: ClassSymbol2, 'MarkerSize', PointSize) Daniel@0: % Bayesian decision boundary Daniel@0: [cB, hB] = contour(xrange,yrange,p1_x,[0.5 0.5],'b-'); Daniel@0: set(hB, 'LineWidth', 2); Daniel@0: % Network decision boundary Daniel@0: [cN, hN] = contour(xrange,yrange,yg,[0.5 0.5],'r-'); Daniel@0: set(hN, 'LineWidth', 2); Daniel@0: Chandles = [hB(1) hN(1)]; Daniel@0: legend(Chandles, 'Bayes decision boundary', ... Daniel@0: 'Network decision boundary', -1); Daniel@0: axis([x0 x1 y0 y1]) Daniel@0: title('Test Data') Daniel@0: set(gca,'Box','On','Xtick',[],'YTick',[]) Daniel@0: Daniel@0: clc Daniel@0: disp('This figure shows the test data with the decision boundary') Daniel@0: disp('produced by the trained network and the optimal Bayes rule.') Daniel@0: disp(' ') Daniel@0: disp('Press any key to continue.') Daniel@0: pause Daniel@0: Daniel@0: fh5 = figure; Daniel@0: set(fh5, 'Name', 'Test Set Performance'); Daniel@0: whitebg(fh5, 'w'); Daniel@0: % Bayes rule performance Daniel@0: subplot(1,2,1) Daniel@0: plotmat(Bcon,'b','k',12) Daniel@0: set(gca,'XTick',[0.5 1.5]) Daniel@0: set(gca,'YTick',[0.5 1.5]) Daniel@0: grid('off') Daniel@0: set(gca,'XTickLabel',['Red ' ; 'Yellow']) Daniel@0: set(gca,'YTickLabel',['Yellow' ; 'Red ']) Daniel@0: ylabel('True') Daniel@0: xlabel('Predicted') Daniel@0: title(['Bayes Confusion Matrix (' num2str(Brate(1)) '%)']) Daniel@0: Daniel@0: % Network performance Daniel@0: subplot(1,2, 2) Daniel@0: plotmat(C,'b','k',12) Daniel@0: set(gca,'XTick',[0.5 1.5]) Daniel@0: set(gca,'YTick',[0.5 1.5]) Daniel@0: grid('off') Daniel@0: set(gca,'XTickLabel',['Red ' ; 'Yellow']) Daniel@0: set(gca,'YTickLabel',['Yellow' ; 'Red ']) Daniel@0: ylabel('True') Daniel@0: xlabel('Predicted') Daniel@0: title(['Network Confusion Matrix (' num2str(trate(1)) '%)']) Daniel@0: Daniel@0: disp('The final figure shows the confusion matrices for the') Daniel@0: disp('two rules on the test set.') Daniel@0: disp(' ') Daniel@0: disp('Press any key to exit.') Daniel@0: pause Daniel@0: whitebg(fh1, 'w'); Daniel@0: whitebg(fh2, 'w'); Daniel@0: whitebg(fh3, 'w'); Daniel@0: whitebg(fh4, 'w'); Daniel@0: whitebg(fh5, 'w'); Daniel@0: close(fh1); close(fh2); close(fh3); Daniel@0: close(fh4); close(fh5); Daniel@0: clear all;