daniele@169: function [Dhat cost W] = rotatematrix(D,Phi,method,param) daniele@169: % daniele@169: % daniele@169: % daniele@169: % REFERENCE daniele@169: % M.D. Plumbley, Geometrical Methods for Non-Negative ICA: Manifolds, Lie daniele@169: % Groups and Toral Subalgebra, Neurocomputing daniele@169: if ~nargin, testrotatematrix; return, end daniele@169: daniele@169: daniele@169: if ~exist('method','var') || isempty(method), method = 'unconstrained'; end daniele@169: daniele@169: J = @(W) 0.5*norm(D-W*Phi,'fro'); daniele@169: cost = zeros(param.nIter,1); daniele@169: daniele@169: W = eye(size(Phi,1)); daniele@169: t = 0; daniele@169: Gprev = 0; daniele@169: Hprev = 0; daniele@169: for i=1:param.nIter daniele@169: cost(i) = J(W); daniele@169: grad = (W*Phi-D)*Phi'; daniele@169: switch method daniele@169: case 'unconstrained' % gradient descent daniele@169: eta = param.step; daniele@169: W = W - eta*grad; % update W by steepest descent daniele@169: case 'tangent' % self correcting tangent daniele@169: eta = param.step; daniele@169: mu = param.reg; daniele@169: W = W - 0.5*eta*(grad - W*grad'*W + mu*W*(W'*W-eye(size(W)))); daniele@169: case 'steepestlie' daniele@169: eta = param.step; daniele@169: B = 2*skew(grad*W'); % calculate gradient in lie algebra daniele@169: W = expm(-eta*B)*W; % update W by steepest descent daniele@169: case 'linesearchlie' daniele@169: B = 2*skew(grad*W'); % calculate gradient in lie algebra daniele@169: H = -B; % calculate direction as negative gradient daniele@169: t = searchline(J,H,W,t);% line search in one-parameter lie subalgebra daniele@169: W = expm(t*H)*W; % update W by line search daniele@169: case 'conjgradlie' daniele@169: G = 2*skew(grad*W'); % calculate gradient in lie algebra daniele@169: H = -G + polakRibiere(G,Gprev)*Hprev; %calculate conjugate gradient direction daniele@169: t = searchline(J,H,W,t);% line search in one-parameter lie subalgebra daniele@169: W = expm(t*H)*W; % update W by line search daniele@169: Hprev = H; % % save search direction daniele@169: Gprev = G; % % save gradient daniele@169: end daniele@169: end daniele@169: Dhat = W*Phi; daniele@169: end daniele@169: % function C = matcomm(A,B) daniele@169: % %Matrix commutator daniele@169: % C = A*B-B*A; daniele@169: daniele@169: function gamma = polakRibiere(G,Gprev) daniele@169: gamma = G(:)'*(G(:)-Gprev(:))/(norm(Gprev(:))^2); daniele@169: if isnan(gamma) || isinf(gamma) daniele@169: gamma = 0; daniele@169: end daniele@169: end daniele@169: daniele@169: function t = searchline(J,H,W,t) daniele@169: t = fminsearch(@(x) J(expm(x*H)*W),t); daniele@169: end daniele@169: daniele@169: function B = skew(A) daniele@169: B = 0.5*(A - A'); daniele@169: end daniele@169: daniele@169: daniele@169: function testrotatematrix daniele@169: clear, clc, close all daniele@169: n = 256; daniele@169: m = 512; daniele@169: disp('A random matrix...'); daniele@169: Phi = randn(n,m); daniele@169: disp('And its rotated mate...'); daniele@169: Qtrue = expm(skew(randn(n))); daniele@169: D = Qtrue*Phi; daniele@169: disp('Now, lets try to find the right rotation...'); daniele@169: param.nIter = 1000; daniele@169: param.step = 0.001; daniele@169: daniele@169: cost = zeros(param.nIter,4); daniele@169: [~, cost(:,1)] = rotatematrix(D,Phi,'unconstrained',param); daniele@169: [~, cost(:,2)] = rotatematrix(D,Phi,'steepestlie',param); daniele@169: [~, cost(:,3)] = rotatematrix(D,Phi,'linesearchlie',param); daniele@169: [~, cost(:,4)] = rotatematrix(D,Phi,'conjgradlie',param); daniele@169: daniele@169: figure, plot(cost) daniele@169: set(gca,'XScale','log','Yscale','log') daniele@169: legend({'uncons','settpestlie','linesearchlie','conjgradlie'}) daniele@169: grid on daniele@169: xlabel('number of iterations') daniele@169: ylabel('J(W)') daniele@169: end