diff DL/Majorization Minimization DL/dict_update_REG_fn.m @ 155:b14209313ba4 ivand_dev

Integration of Majorization Minimisation Dictionary Learning
author Ivan Damnjanovic lnx <ivan.damnjanovic@eecs.qmul.ac.uk>
date Mon, 22 Aug 2011 11:46:35 +0100
parents
children
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/DL/Majorization Minimization DL/dict_update_REG_fn.m	Mon Aug 22 11:46:35 2011 +0100
@@ -0,0 +1,39 @@
+function [Phiout,unhatnz] = dict_update_REG_fn(Phi,x,unhat,maxIT,eps,cvset)
+%% Regularized Dictionary Learning with the constraint on the matrix frobenius norms %%%%%
+% Phi = Normalized Initial Dictionary
+% x = Signal(x). This can be a vector or a matrix
+% unhat = Initial guess for the coefficients
+% to = 1/(step size) . It is larger than spectral norm of coefficient matrix x
+% eps = Stopping criterion for iterative softthresholding and MM dictionary update
+% cvset = Dictionary constraint. 0 = Non convex ||D|| = N, 1 = Convex ||D||<=N
+% Phiout = Updated Dictionary
+% unhatnz Updated Coefficients (the same as input in this version)
+
+%%
+B = Phi;
+phim = norm(Phi, 'fro');
+K = zeros(size(Phi,1),size(Phi,2));
+c = .1 + svds(unhat,1)^2; 
+
+%%
+i = 1;       
+while (sum(sum((B-K).^2)) > eps)&&(i<=maxIT)
+    if i>1
+        B = K;
+    end
+    K = 1/c *(x*unhat' + B*(c*eye(size(B,2))-unhat*unhat'));
+    Kfn = sum(sum(K.^2));
+    if cvset == 1,
+        K = min(1,phim/Kfn)*K; % with convex constraint set
+    else
+        K = (phim/Kfn)*K; % with fixed-norm constraint set
+    end
+    i = i+1;
+end
+
+%% depleted atoms cancellation %%%
+[Y,I] = sort(sum(K.^2),'descend');
+RR = sum(Y>=0.0001);
+Phiout = K(:,I(1:RR));
+unhatnz = unhat(I(1:RR),:);
+end
\ No newline at end of file