annotate toolboxes/FullBNT-1.0.7/netlab3.3/demknn1.m @ 0:cc4b1211e677 tip

initial commit to HG from Changeset: 646 (e263d8a21543) added further path and more save "camirversion.m"
author Daniel Wolff
date Fri, 19 Aug 2016 13:07:06 +0200
parents
children
rev   line source
Daniel@0 1 %DEMKNN1 Demonstrate nearest neighbour classifier.
Daniel@0 2 %
Daniel@0 3 % Description
Daniel@0 4 % The problem consists of data in a two-dimensional space. The data is
Daniel@0 5 % drawn from three spherical Gaussian distributions with priors 0.3,
Daniel@0 6 % 0.5 and 0.2; centres (2, 3.5), (0, 0) and (0,2); and standard
Daniel@0 7 % deviations 0.2, 0.5 and 1.0. The first figure contains a scatter plot
Daniel@0 8 % of the data. The data is the same as in DEMGMM1.
Daniel@0 9 %
Daniel@0 10 % The second figure shows the data labelled with the corresponding
Daniel@0 11 % class given by the classifier.
Daniel@0 12 %
Daniel@0 13 % See also
Daniel@0 14 % DEM2DDAT, DEMGMM1, KNN
Daniel@0 15 %
Daniel@0 16
Daniel@0 17 % Copyright (c) Ian T Nabney (1996-2001)
Daniel@0 18
Daniel@0 19 clc
Daniel@0 20 disp('This program demonstrates the use of the K nearest neighbour algorithm.')
Daniel@0 21 disp(' ')
Daniel@0 22 disp('Press any key to continue.')
Daniel@0 23 pause
Daniel@0 24 % Generate the test data
Daniel@0 25 ndata = 250;
Daniel@0 26 randn('state', 42);
Daniel@0 27 rand('state', 42);
Daniel@0 28
Daniel@0 29 [data, c] = dem2ddat(ndata);
Daniel@0 30
Daniel@0 31 % Randomise data order
Daniel@0 32 data = data(randperm(ndata),:);
Daniel@0 33
Daniel@0 34 clc
Daniel@0 35 disp('We generate the data in two-dimensional space from a mixture of')
Daniel@0 36 disp('three spherical Gaussians. The centres are shown as black crosses')
Daniel@0 37 disp('in the plot.')
Daniel@0 38 disp(' ')
Daniel@0 39 disp('Press any key to continue.')
Daniel@0 40 pause
Daniel@0 41 fh1 = figure;
Daniel@0 42 plot(data(:, 1), data(:, 2), 'o')
Daniel@0 43 set(gca, 'Box', 'on')
Daniel@0 44 hold on
Daniel@0 45 title('Data')
Daniel@0 46 hp1 = plot(c(:, 1), c(:,2), 'k+')
Daniel@0 47 % Increase size of crosses
Daniel@0 48 set(hp1, 'MarkerSize', 8);
Daniel@0 49 set(hp1, 'LineWidth', 2);
Daniel@0 50 hold off
Daniel@0 51
Daniel@0 52 clc
Daniel@0 53 disp('We next use the centres as training examplars for the K nearest')
Daniel@0 54 disp('neighbour algorithm.')
Daniel@0 55 disp(' ')
Daniel@0 56 disp('Press any key to continue.')
Daniel@0 57 pause
Daniel@0 58
Daniel@0 59 % Use centres as training data
Daniel@0 60 train_labels = [1, 0, 0; 0, 1, 0; 0, 0, 1];
Daniel@0 61
Daniel@0 62 % Label the test data up to kmax neighbours
Daniel@0 63 kmax = 1;
Daniel@0 64 net = knn(2, 3, kmax, c, train_labels);
Daniel@0 65 [y, l] = knnfwd(net, data);
Daniel@0 66
Daniel@0 67 clc
Daniel@0 68 disp('We now plot each data point coloured according to its classification.')
Daniel@0 69 disp(' ')
Daniel@0 70 disp('Press any key to continue.')
Daniel@0 71 pause
Daniel@0 72 % Plot the result
Daniel@0 73 fh2 = figure;
Daniel@0 74 colors = ['b.'; 'r.'; 'g.'];
Daniel@0 75 for i = 1:3
Daniel@0 76 thisX = data(l == i,1);
Daniel@0 77 thisY = data(l == i,2);
Daniel@0 78 hp(i) = plot(thisX, thisY, colors(i,:));
Daniel@0 79 set(hp(i), 'MarkerSize', 12);
Daniel@0 80 if i == 1
Daniel@0 81 hold on
Daniel@0 82 end
Daniel@0 83 end
Daniel@0 84 set(gca, 'Box', 'on');
Daniel@0 85 legend('Class 1', 'Class 2', 'Class 3', 2)
Daniel@0 86 hold on
Daniel@0 87 labels = ['1', '2', '3'];
Daniel@0 88 hp2 = plot(c(:, 1), c(:,2), 'k+');
Daniel@0 89 % Increase size of crosses
Daniel@0 90 set(hp2, 'MarkerSize', 8);
Daniel@0 91 set(hp2, 'LineWidth', 2);
Daniel@0 92
Daniel@0 93 test_labels = labels(l(:,1));
Daniel@0 94
Daniel@0 95 title('Training data and data labels')
Daniel@0 96 hold off
Daniel@0 97
Daniel@0 98 disp('The demonstration is now complete: press any key to exit.')
Daniel@0 99 pause
Daniel@0 100 close(fh1);
Daniel@0 101 close(fh2);
Daniel@0 102 clear all;
Daniel@0 103