comparison toolboxes/FullBNT-1.0.7/netlab3.3/demgmm5.m @ 0:e9a9cd732c1e tip

first hg version after svn
author wolffd
date Tue, 10 Feb 2015 15:05:51 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:e9a9cd732c1e
1 %DEMGMM5 Demonstrate density modelling with a PPCA mixture model.
2 %
3 % Description
4 % The problem consists of modelling data generated by a mixture of
5 % three Gaussians in 2 dimensions with a mixture model using full
6 % covariance matrices. The priors are 0.3, 0.5 and 0.2; the centres
7 % are (2, 3.5), (0, 0) and (0,2); the variances are (0.16, 0.64) axis
8 % aligned, (0.25, 1) rotated by 30 degrees and the identity matrix. The
9 % first figure contains a scatter plot of the data.
10 %
11 % A mixture model with three one-dimensional PPCA components is trained
12 % using EM. The parameter vector is printed before training and after
13 % training. The parameter vector consists of priors (the column), and
14 % centres (given as (x, y) pairs as the next two columns).
15 %
16 % The second figure is a 3 dimensional view of the density function,
17 % while the third shows the axes of the 1-standard deviation ellipses
18 % for the three components of the mixture model together with the one
19 % standard deviation along the principal component of each mixture
20 % model component.
21 %
22 % See also
23 % GMM, GMMINIT, GMMEM, GMMPROB, PPCA
24 %
25
26 % Copyright (c) Ian T Nabney (1996-2001)
27
28
29 ndata = 500;
30 data = randn(ndata, 2);
31 prior = [0.3 0.5 0.2];
32 % Mixture model swaps clusters 1 and 3
33 datap = [0.2 0.5 0.3];
34 datac = [0 2; 0 0; 2 3.5];
35 datacov = repmat(eye(2), [1 1 3]);
36 data1 = data(1:prior(1)*ndata,:);
37 data2 = data(prior(1)*ndata+1:(prior(2)+prior(1))*ndata, :);
38 data3 = data((prior(1)+prior(2))*ndata +1:ndata, :);
39
40 % First cluster has axis aligned variance and centre (2, 3.5)
41 data1(:, 1) = data1(:, 1)*0.1 + 2.0;
42 data1(:, 2) = data1(:, 2)*0.8 + 3.5;
43 datacov(:, :, 3) = [0.1*0.1 0; 0 0.8*0.8];
44
45 % Second cluster has variance axes rotated by 30 degrees and centre (0, 0)
46 rotn = [cos(pi/6) -sin(pi/6); sin(pi/6) cos(pi/6)];
47 data2(:,1) = data2(:, 1)*0.2;
48 data2 = data2*rotn;
49 datacov(:, :, 2) = rotn' * [0.04 0; 0 1] * rotn;
50
51 % Third cluster is at (0,2)
52 data3(:, 2) = data3(:, 2)*0.1;
53 data3 = data3 + repmat([0 2], prior(3)*ndata, 1);
54
55 % Put the dataset together again
56 data = [data1; data2; data3];
57
58 ndata = 100; % Number of data points.
59 noise = 0.2; % Standard deviation of noise distribution.
60 x = [0:1/(2*(ndata - 1)):0.5]';
61 randn('state', 1);
62 rand('state', 1);
63 t = sin(2*pi*x) + noise*randn(ndata, 1);
64
65 % Fit three one-dimensional PPCA models
66 ncentres = 3;
67 ppca_dim = 1;
68
69 clc
70 disp('This demonstration illustrates the use of a Gaussian mixture model')
71 disp('with a probabilistic PCA covariance structure to approximate the')
72 disp('unconditional probability density of data in a two-dimensional space.')
73 disp('We begin by generating the data from a mixture of three Gaussians and')
74 disp('plotting it.')
75 disp(' ')
76 disp('The first cluster has axis aligned variance and centre (0, 2).')
77 disp('The variance parallel to the x-axis is significantly greater')
78 disp('than that parallel to the y-axis.')
79 disp('The second cluster has variance axes rotated by 30 degrees')
80 disp('and centre (0, 0). The third cluster has significant variance')
81 disp('parallel to the y-axis and centre (2, 3.5).')
82 disp(' ')
83 disp('Press any key to continue.')
84 pause
85
86 fh1 = figure;
87 plot(data(:, 1), data(:, 2), 'o')
88 set(gca, 'Box', 'on')
89 axis equal
90 hold on
91
92 mix = gmm(2, ncentres, 'ppca', ppca_dim);
93 options = foptions;
94 options(14) = 10;
95 options(1) = -1; % Switch off all warnings
96
97 % Just use 10 iterations of k-means in initialisation
98 % Initialise the model parameters from the data
99 mix = gmminit(mix, data, options);
100 disp('The mixture model has three components with 1-dimensional')
101 disp('PPCA subspaces. The model parameters after initialisation using')
102 disp('the k-means algorithm are as follows')
103 disp(' Priors Centres')
104 disp([mix.priors' mix.centres])
105 disp(' ')
106 disp('Press any key to continue')
107 pause
108
109 options(1) = 1; % Prints out error values.
110 options(14) = 30; % Number of iterations.
111
112 disp('We now train the model using the EM algorithm for up to 30 iterations.')
113 disp(' ')
114 disp('Press any key to continue.')
115 pause
116
117 [mix, options, errlog] = gmmem(mix, data, options);
118 disp('The trained model has priors and centres:')
119 disp(' Priors Centres')
120 disp([mix.priors' mix.centres])
121
122 % Now plot the result
123 for i = 1:ncentres
124 % Plot the PC vectors
125 v = mix.U(:,:,i);
126 start=mix.centres(i,:)-sqrt(mix.lambda(i))*(v');
127 endpt=mix.centres(i,:)+sqrt(mix.lambda(i))*(v');
128 linex = [start(1) endpt(1)];
129 liney = [start(2) endpt(2)];
130 line(linex, liney, 'Color', 'k', 'LineWidth', 3)
131 % Plot ellipses of one standard deviation
132 theta = 0:0.02:2*pi;
133 x = sqrt(mix.lambda(i))*cos(theta);
134 y = sqrt(mix.covars(i))*sin(theta);
135 % Rotate ellipse axes
136 rot_matrix = [v(1) -v(2); v(2) v(1)];
137 ellipse = (rot_matrix*([x; y]))';
138 % Adjust centre
139 ellipse = ellipse + ones(length(theta), 1)*mix.centres(i,:);
140 plot(ellipse(:,1), ellipse(:,2), 'r-')
141 end
142
143 disp(' ')
144 disp('Press any key to exit')
145 pause
146 close (fh1);
147 clear all;