annotate toolboxes/FullBNT-1.0.7/netlab3.3/demgmm5.m @ 0:cc4b1211e677 tip

initial commit to HG from Changeset: 646 (e263d8a21543) added further path and more save "camirversion.m"
author Daniel Wolff
date Fri, 19 Aug 2016 13:07:06 +0200
parents
children
rev   line source
Daniel@0 1 %DEMGMM5 Demonstrate density modelling with a PPCA mixture model.
Daniel@0 2 %
Daniel@0 3 % Description
Daniel@0 4 % The problem consists of modelling data generated by a mixture of
Daniel@0 5 % three Gaussians in 2 dimensions with a mixture model using full
Daniel@0 6 % covariance matrices. The priors are 0.3, 0.5 and 0.2; the centres
Daniel@0 7 % are (2, 3.5), (0, 0) and (0,2); the variances are (0.16, 0.64) axis
Daniel@0 8 % aligned, (0.25, 1) rotated by 30 degrees and the identity matrix. The
Daniel@0 9 % first figure contains a scatter plot of the data.
Daniel@0 10 %
Daniel@0 11 % A mixture model with three one-dimensional PPCA components is trained
Daniel@0 12 % using EM. The parameter vector is printed before training and after
Daniel@0 13 % training. The parameter vector consists of priors (the column), and
Daniel@0 14 % centres (given as (x, y) pairs as the next two columns).
Daniel@0 15 %
Daniel@0 16 % The second figure is a 3 dimensional view of the density function,
Daniel@0 17 % while the third shows the axes of the 1-standard deviation ellipses
Daniel@0 18 % for the three components of the mixture model together with the one
Daniel@0 19 % standard deviation along the principal component of each mixture
Daniel@0 20 % model component.
Daniel@0 21 %
Daniel@0 22 % See also
Daniel@0 23 % GMM, GMMINIT, GMMEM, GMMPROB, PPCA
Daniel@0 24 %
Daniel@0 25
Daniel@0 26 % Copyright (c) Ian T Nabney (1996-2001)
Daniel@0 27
Daniel@0 28
Daniel@0 29 ndata = 500;
Daniel@0 30 data = randn(ndata, 2);
Daniel@0 31 prior = [0.3 0.5 0.2];
Daniel@0 32 % Mixture model swaps clusters 1 and 3
Daniel@0 33 datap = [0.2 0.5 0.3];
Daniel@0 34 datac = [0 2; 0 0; 2 3.5];
Daniel@0 35 datacov = repmat(eye(2), [1 1 3]);
Daniel@0 36 data1 = data(1:prior(1)*ndata,:);
Daniel@0 37 data2 = data(prior(1)*ndata+1:(prior(2)+prior(1))*ndata, :);
Daniel@0 38 data3 = data((prior(1)+prior(2))*ndata +1:ndata, :);
Daniel@0 39
Daniel@0 40 % First cluster has axis aligned variance and centre (2, 3.5)
Daniel@0 41 data1(:, 1) = data1(:, 1)*0.1 + 2.0;
Daniel@0 42 data1(:, 2) = data1(:, 2)*0.8 + 3.5;
Daniel@0 43 datacov(:, :, 3) = [0.1*0.1 0; 0 0.8*0.8];
Daniel@0 44
Daniel@0 45 % Second cluster has variance axes rotated by 30 degrees and centre (0, 0)
Daniel@0 46 rotn = [cos(pi/6) -sin(pi/6); sin(pi/6) cos(pi/6)];
Daniel@0 47 data2(:,1) = data2(:, 1)*0.2;
Daniel@0 48 data2 = data2*rotn;
Daniel@0 49 datacov(:, :, 2) = rotn' * [0.04 0; 0 1] * rotn;
Daniel@0 50
Daniel@0 51 % Third cluster is at (0,2)
Daniel@0 52 data3(:, 2) = data3(:, 2)*0.1;
Daniel@0 53 data3 = data3 + repmat([0 2], prior(3)*ndata, 1);
Daniel@0 54
Daniel@0 55 % Put the dataset together again
Daniel@0 56 data = [data1; data2; data3];
Daniel@0 57
Daniel@0 58 ndata = 100; % Number of data points.
Daniel@0 59 noise = 0.2; % Standard deviation of noise distribution.
Daniel@0 60 x = [0:1/(2*(ndata - 1)):0.5]';
Daniel@0 61 randn('state', 1);
Daniel@0 62 rand('state', 1);
Daniel@0 63 t = sin(2*pi*x) + noise*randn(ndata, 1);
Daniel@0 64
Daniel@0 65 % Fit three one-dimensional PPCA models
Daniel@0 66 ncentres = 3;
Daniel@0 67 ppca_dim = 1;
Daniel@0 68
Daniel@0 69 clc
Daniel@0 70 disp('This demonstration illustrates the use of a Gaussian mixture model')
Daniel@0 71 disp('with a probabilistic PCA covariance structure to approximate the')
Daniel@0 72 disp('unconditional probability density of data in a two-dimensional space.')
Daniel@0 73 disp('We begin by generating the data from a mixture of three Gaussians and')
Daniel@0 74 disp('plotting it.')
Daniel@0 75 disp(' ')
Daniel@0 76 disp('The first cluster has axis aligned variance and centre (0, 2).')
Daniel@0 77 disp('The variance parallel to the x-axis is significantly greater')
Daniel@0 78 disp('than that parallel to the y-axis.')
Daniel@0 79 disp('The second cluster has variance axes rotated by 30 degrees')
Daniel@0 80 disp('and centre (0, 0). The third cluster has significant variance')
Daniel@0 81 disp('parallel to the y-axis and centre (2, 3.5).')
Daniel@0 82 disp(' ')
Daniel@0 83 disp('Press any key to continue.')
Daniel@0 84 pause
Daniel@0 85
Daniel@0 86 fh1 = figure;
Daniel@0 87 plot(data(:, 1), data(:, 2), 'o')
Daniel@0 88 set(gca, 'Box', 'on')
Daniel@0 89 axis equal
Daniel@0 90 hold on
Daniel@0 91
Daniel@0 92 mix = gmm(2, ncentres, 'ppca', ppca_dim);
Daniel@0 93 options = foptions;
Daniel@0 94 options(14) = 10;
Daniel@0 95 options(1) = -1; % Switch off all warnings
Daniel@0 96
Daniel@0 97 % Just use 10 iterations of k-means in initialisation
Daniel@0 98 % Initialise the model parameters from the data
Daniel@0 99 mix = gmminit(mix, data, options);
Daniel@0 100 disp('The mixture model has three components with 1-dimensional')
Daniel@0 101 disp('PPCA subspaces. The model parameters after initialisation using')
Daniel@0 102 disp('the k-means algorithm are as follows')
Daniel@0 103 disp(' Priors Centres')
Daniel@0 104 disp([mix.priors' mix.centres])
Daniel@0 105 disp(' ')
Daniel@0 106 disp('Press any key to continue')
Daniel@0 107 pause
Daniel@0 108
Daniel@0 109 options(1) = 1; % Prints out error values.
Daniel@0 110 options(14) = 30; % Number of iterations.
Daniel@0 111
Daniel@0 112 disp('We now train the model using the EM algorithm for up to 30 iterations.')
Daniel@0 113 disp(' ')
Daniel@0 114 disp('Press any key to continue.')
Daniel@0 115 pause
Daniel@0 116
Daniel@0 117 [mix, options, errlog] = gmmem(mix, data, options);
Daniel@0 118 disp('The trained model has priors and centres:')
Daniel@0 119 disp(' Priors Centres')
Daniel@0 120 disp([mix.priors' mix.centres])
Daniel@0 121
Daniel@0 122 % Now plot the result
Daniel@0 123 for i = 1:ncentres
Daniel@0 124 % Plot the PC vectors
Daniel@0 125 v = mix.U(:,:,i);
Daniel@0 126 start=mix.centres(i,:)-sqrt(mix.lambda(i))*(v');
Daniel@0 127 endpt=mix.centres(i,:)+sqrt(mix.lambda(i))*(v');
Daniel@0 128 linex = [start(1) endpt(1)];
Daniel@0 129 liney = [start(2) endpt(2)];
Daniel@0 130 line(linex, liney, 'Color', 'k', 'LineWidth', 3)
Daniel@0 131 % Plot ellipses of one standard deviation
Daniel@0 132 theta = 0:0.02:2*pi;
Daniel@0 133 x = sqrt(mix.lambda(i))*cos(theta);
Daniel@0 134 y = sqrt(mix.covars(i))*sin(theta);
Daniel@0 135 % Rotate ellipse axes
Daniel@0 136 rot_matrix = [v(1) -v(2); v(2) v(1)];
Daniel@0 137 ellipse = (rot_matrix*([x; y]))';
Daniel@0 138 % Adjust centre
Daniel@0 139 ellipse = ellipse + ones(length(theta), 1)*mix.centres(i,:);
Daniel@0 140 plot(ellipse(:,1), ellipse(:,2), 'r-')
Daniel@0 141 end
Daniel@0 142
Daniel@0 143 disp(' ')
Daniel@0 144 disp('Press any key to exit')
Daniel@0 145 pause
Daniel@0 146 close (fh1);
Daniel@0 147 clear all;