Mercurial > hg > smallbox
comparison util/classes/dictionaryMatrices/rotatematrix.m @ 169:290cca7d3469 danieleb
Added dictionary decorrelation functions and test script for ICASSP paper.
author | Daniele Barchiesi <daniele.barchiesi@eecs.qmul.ac.uk> |
---|---|
date | Thu, 29 Sep 2011 09:46:52 +0100 |
parents | |
children | 68fb71aa5339 |
comparison
equal
deleted
inserted
replaced
168:ff866a412be5 | 169:290cca7d3469 |
---|---|
1 function [Dhat cost W] = rotatematrix(D,Phi,method,param) | |
2 % | |
3 % | |
4 % | |
5 % REFERENCE | |
6 % M.D. Plumbley, Geometrical Methods for Non-Negative ICA: Manifolds, Lie | |
7 % Groups and Toral Subalgebra, Neurocomputing | |
8 if ~nargin, testrotatematrix; return, end | |
9 | |
10 | |
11 if ~exist('method','var') || isempty(method), method = 'unconstrained'; end | |
12 | |
13 J = @(W) 0.5*norm(D-W*Phi,'fro'); | |
14 cost = zeros(param.nIter,1); | |
15 | |
16 W = eye(size(Phi,1)); | |
17 t = 0; | |
18 Gprev = 0; | |
19 Hprev = 0; | |
20 for i=1:param.nIter | |
21 cost(i) = J(W); | |
22 grad = (W*Phi-D)*Phi'; | |
23 switch method | |
24 case 'unconstrained' % gradient descent | |
25 eta = param.step; | |
26 W = W - eta*grad; % update W by steepest descent | |
27 case 'tangent' % self correcting tangent | |
28 eta = param.step; | |
29 mu = param.reg; | |
30 W = W - 0.5*eta*(grad - W*grad'*W + mu*W*(W'*W-eye(size(W)))); | |
31 case 'steepestlie' | |
32 eta = param.step; | |
33 B = 2*skew(grad*W'); % calculate gradient in lie algebra | |
34 W = expm(-eta*B)*W; % update W by steepest descent | |
35 case 'linesearchlie' | |
36 B = 2*skew(grad*W'); % calculate gradient in lie algebra | |
37 H = -B; % calculate direction as negative gradient | |
38 t = searchline(J,H,W,t);% line search in one-parameter lie subalgebra | |
39 W = expm(t*H)*W; % update W by line search | |
40 case 'conjgradlie' | |
41 G = 2*skew(grad*W'); % calculate gradient in lie algebra | |
42 H = -G + polakRibiere(G,Gprev)*Hprev; %calculate conjugate gradient direction | |
43 t = searchline(J,H,W,t);% line search in one-parameter lie subalgebra | |
44 W = expm(t*H)*W; % update W by line search | |
45 Hprev = H; % % save search direction | |
46 Gprev = G; % % save gradient | |
47 end | |
48 end | |
49 Dhat = W*Phi; | |
50 end | |
51 % function C = matcomm(A,B) | |
52 % %Matrix commutator | |
53 % C = A*B-B*A; | |
54 | |
55 function gamma = polakRibiere(G,Gprev) | |
56 gamma = G(:)'*(G(:)-Gprev(:))/(norm(Gprev(:))^2); | |
57 if isnan(gamma) || isinf(gamma) | |
58 gamma = 0; | |
59 end | |
60 end | |
61 | |
62 function t = searchline(J,H,W,t) | |
63 t = fminsearch(@(x) J(expm(x*H)*W),t); | |
64 end | |
65 | |
66 function B = skew(A) | |
67 B = 0.5*(A - A'); | |
68 end | |
69 | |
70 | |
71 function testrotatematrix | |
72 clear, clc, close all | |
73 n = 256; | |
74 m = 512; | |
75 disp('A random matrix...'); | |
76 Phi = randn(n,m); | |
77 disp('And its rotated mate...'); | |
78 Qtrue = expm(skew(randn(n))); | |
79 D = Qtrue*Phi; | |
80 disp('Now, lets try to find the right rotation...'); | |
81 param.nIter = 1000; | |
82 param.step = 0.001; | |
83 | |
84 cost = zeros(param.nIter,4); | |
85 [~, cost(:,1)] = rotatematrix(D,Phi,'unconstrained',param); | |
86 [~, cost(:,2)] = rotatematrix(D,Phi,'steepestlie',param); | |
87 [~, cost(:,3)] = rotatematrix(D,Phi,'linesearchlie',param); | |
88 [~, cost(:,4)] = rotatematrix(D,Phi,'conjgradlie',param); | |
89 | |
90 figure, plot(cost) | |
91 set(gca,'XScale','log','Yscale','log') | |
92 legend({'uncons','settpestlie','linesearchlie','conjgradlie'}) | |
93 grid on | |
94 xlabel('number of iterations') | |
95 ylabel('J(W)') | |
96 end |