comparison toolboxes/FullBNT-1.0.7/netlab3.3/gmmem.m @ 0:e9a9cd732c1e tip

first hg version after svn
author wolffd
date Tue, 10 Feb 2015 15:05:51 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:e9a9cd732c1e
1 function [mix, options, errlog] = gmmem(mix, x, options)
2 %GMMEM EM algorithm for Gaussian mixture model.
3 %
4 % Description
5 % [MIX, OPTIONS, ERRLOG] = GMMEM(MIX, X, OPTIONS) uses the Expectation
6 % Maximization algorithm of Dempster et al. to estimate the parameters
7 % of a Gaussian mixture model defined by a data structure MIX. The
8 % matrix X represents the data whose expectation is maximized, with
9 % each row corresponding to a vector. The optional parameters have
10 % the following interpretations.
11 %
12 % OPTIONS(1) is set to 1 to display error values; also logs error
13 % values in the return argument ERRLOG. If OPTIONS(1) is set to 0, then
14 % only warning messages are displayed. If OPTIONS(1) is -1, then
15 % nothing is displayed.
16 %
17 % OPTIONS(3) is a measure of the absolute precision required of the
18 % error function at the solution. If the change in log likelihood
19 % between two steps of the EM algorithm is less than this value, then
20 % the function terminates.
21 %
22 % OPTIONS(5) is set to 1 if a covariance matrix is reset to its
23 % original value when any of its singular values are too small (less
24 % than MIN_COVAR which has the value eps). With the default value of
25 % 0 no action is taken.
26 %
27 % OPTIONS(14) is the maximum number of iterations; default 100.
28 %
29 % The optional return value OPTIONS contains the final error value
30 % (i.e. data log likelihood) in OPTIONS(8).
31 %
32 % See also
33 % GMM, GMMINIT
34 %
35
36 % Copyright (c) Ian T Nabney (1996-2001)
37
38 % Check that inputs are consistent
39 errstring = consist(mix, 'gmm', x);
40 if ~isempty(errstring)
41 error(errstring);
42 end
43
44 [ndata, xdim] = size(x);
45
46 % Sort out the options
47 if (options(14))
48 niters = options(14);
49 else
50 niters = 100;
51 end
52
53 display = options(1);
54 store = 0;
55 if (nargout > 2)
56 store = 1; % Store the error values to return them
57 errlog = zeros(1, niters);
58 end
59 test = 0;
60 if options(3) > 0.0
61 test = 1; % Test log likelihood for termination
62 end
63
64 check_covars = 0;
65 if options(5) >= 1
66 if display >= 0
67 disp('check_covars is on');
68 end
69 check_covars = 1; % Ensure that covariances don't collapse
70 MIN_COVAR = eps; % Minimum singular value of covariance matrix
71 init_covars = mix.covars;
72 end
73
74 % Main loop of algorithm
75 for n = 1:niters
76
77 % Calculate posteriors based on old parameters
78 [post, act] = gmmpost(mix, x);
79
80 % Calculate error value if needed
81 if (display | store | test)
82 prob = act*(mix.priors)';
83 % Error value is negative log likelihood of data
84 e = - sum(log(prob));
85 if store
86 errlog(n) = e;
87 end
88 if display > 0
89 fprintf(1, 'Cycle %4d Error %11.6f\n', n, e);
90 end
91 if test
92 if (n > 1 & abs(e - eold) < options(3))
93 options(8) = e;
94 return;
95 else
96 eold = e;
97 end
98 end
99 end
100
101 % Adjust the new estimates for the parameters
102 new_pr = sum(post, 1);
103 new_c = post' * x;
104
105 % Now move new estimates to old parameter vectors
106 mix.priors = new_pr ./ ndata;
107
108 mix.centres = new_c ./ (new_pr' * ones(1, mix.nin));
109
110 switch mix.covar_type
111 case 'spherical'
112 n2 = dist2(x, mix.centres);
113 for j = 1:mix.ncentres
114 v(j) = (post(:,j)'*n2(:,j));
115 end
116 mix.covars = ((v./new_pr))./mix.nin;
117 if check_covars
118 % Ensure that no covariance is too small
119 for j = 1:mix.ncentres
120 if mix.covars(j) < MIN_COVAR
121 mix.covars(j) = init_covars(j);
122 end
123 end
124 end
125 case 'diag'
126 for j = 1:mix.ncentres
127 diffs = x - (ones(ndata, 1) * mix.centres(j,:));
128 mix.covars(j,:) = sum((diffs.*diffs).*(post(:,j)*ones(1, ...
129 mix.nin)), 1)./new_pr(j);
130 end
131 if check_covars
132 % Ensure that no covariance is too small
133 for j = 1:mix.ncentres
134 if min(mix.covars(j,:)) < MIN_COVAR
135 mix.covars(j,:) = init_covars(j,:);
136 end
137 end
138 end
139 case 'full'
140 for j = 1:mix.ncentres
141 diffs = x - (ones(ndata, 1) * mix.centres(j,:));
142 diffs = diffs.*(sqrt(post(:,j))*ones(1, mix.nin));
143 mix.covars(:,:,j) = (diffs'*diffs)/new_pr(j);
144 end
145 if check_covars
146 % Ensure that no covariance is too small
147 for j = 1:mix.ncentres
148 if min(svd(mix.covars(:,:,j))) < MIN_COVAR
149 mix.covars(:,:,j) = init_covars(:,:,j);
150 end
151 end
152 end
153 case 'ppca'
154 for j = 1:mix.ncentres
155 diffs = x - (ones(ndata, 1) * mix.centres(j,:));
156 diffs = diffs.*(sqrt(post(:,j))*ones(1, mix.nin));
157 [tempcovars, tempU, templambda] = ...
158 ppca((diffs'*diffs)/new_pr(j), mix.ppca_dim);
159 if length(templambda) ~= mix.ppca_dim
160 error('Unable to extract enough components');
161 else
162 mix.covars(j) = tempcovars;
163 mix.U(:, :, j) = tempU;
164 mix.lambda(j, :) = templambda;
165 end
166 end
167 if check_covars
168 if mix.covars(j) < MIN_COVAR
169 mix.covars(j) = init_covars(j);
170 end
171 end
172 otherwise
173 error(['Unknown covariance type ', mix.covar_type]);
174 end
175 end
176
177 options(8) = -sum(log(gmmprob(mix, x)));
178 if (display >= 0)
179 disp(maxitmess);
180 end
181