diff util/classes/dictionaryMatrices/rotatematrix.m @ 169:290cca7d3469 danieleb

Added dictionary decorrelation functions and test script for ICASSP paper.
author Daniele Barchiesi <daniele.barchiesi@eecs.qmul.ac.uk>
date Thu, 29 Sep 2011 09:46:52 +0100
parents
children 68fb71aa5339
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/util/classes/dictionaryMatrices/rotatematrix.m	Thu Sep 29 09:46:52 2011 +0100
@@ -0,0 +1,96 @@
+function [Dhat cost W] = rotatematrix(D,Phi,method,param)
+%
+%
+%
+% REFERENCE
+% M.D. Plumbley, Geometrical Methods for Non-Negative ICA: Manifolds, Lie
+% Groups and Toral Subalgebra, Neurocomputing
+if ~nargin, testrotatematrix; return, end
+
+
+if ~exist('method','var') || isempty(method), method = 'unconstrained'; end
+
+J = @(W) 0.5*norm(D-W*Phi,'fro');
+cost = zeros(param.nIter,1);
+
+W   = eye(size(Phi,1));
+t = 0;
+Gprev = 0;
+Hprev = 0;
+for i=1:param.nIter
+	cost(i) = J(W);
+	grad = (W*Phi-D)*Phi';
+	switch method
+		case 'unconstrained'		% gradient descent
+			eta = param.step;
+			W = W - eta*grad;		% update W by steepest descent
+		case 'tangent'				% self correcting tangent
+			eta = param.step;
+			mu  = param.reg;
+			W = W - 0.5*eta*(grad - W*grad'*W + mu*W*(W'*W-eye(size(W))));
+		case 'steepestlie'
+			eta = param.step;
+			B = 2*skew(grad*W');	% calculate gradient in lie algebra
+			W = expm(-eta*B)*W;		% update W by steepest descent
+		case 'linesearchlie'
+			B = 2*skew(grad*W');	% calculate gradient in lie algebra
+			H = -B;					% calculate direction as negative gradient
+			t = searchline(J,H,W,t);% line search in one-parameter lie subalgebra
+			W = expm(t*H)*W;		% update W by line search
+		case 'conjgradlie'
+			G = 2*skew(grad*W');	% calculate gradient in lie algebra
+			H = -G + polakRibiere(G,Gprev)*Hprev; %calculate conjugate gradient direction
+			t = searchline(J,H,W,t);% line search in one-parameter lie subalgebra
+			W = expm(t*H)*W;		% update W by line search
+			Hprev = H;				% % save search direction
+			Gprev = G;				% % save gradient
+	end
+end
+Dhat = W*Phi;
+end
+% function C = matcomm(A,B)
+% %Matrix commutator
+% C = A*B-B*A;
+
+function gamma = polakRibiere(G,Gprev)
+gamma = G(:)'*(G(:)-Gprev(:))/(norm(Gprev(:))^2);
+if isnan(gamma) || isinf(gamma)
+	gamma = 0;
+end
+end
+
+function t = searchline(J,H,W,t)
+t = fminsearch(@(x) J(expm(x*H)*W),t);
+end
+
+function B = skew(A)
+B = 0.5*(A - A');
+end
+
+
+function testrotatematrix
+clear, clc, close all
+n = 256;
+m = 512;
+disp('A random matrix...');
+Phi = randn(n,m);
+disp('And its rotated mate...');
+Qtrue = expm(skew(randn(n)));
+D = Qtrue*Phi; 
+disp('Now, lets try to find the right rotation...');
+param.nIter = 1000;
+param.step  = 0.001;
+
+cost = zeros(param.nIter,4);
+[~, cost(:,1)] = rotatematrix(D,Phi,'unconstrained',param);
+[~, cost(:,2)] = rotatematrix(D,Phi,'steepestlie',param);
+[~, cost(:,3)] = rotatematrix(D,Phi,'linesearchlie',param);
+[~, cost(:,4)] = rotatematrix(D,Phi,'conjgradlie',param);
+
+figure, plot(cost)
+set(gca,'XScale','log','Yscale','log')
+legend({'uncons','settpestlie','linesearchlie','conjgradlie'})
+grid on
+xlabel('number of iterations')
+ylabel('J(W)')
+end