ivan@155: clear ivan@155: M = 20; % Signal length ivan@155: N = 40; % Coefficient Space Dimension ivan@155: L = 32*N; % Number of Training Samples ivan@155: R = 3; % Sparsity ivan@155: IT = 1000; % Number of alternating sparse approximation and dictionary update ivan@155: map = 1; % Debiasing. 0 = No, 1 = Yes ivan@155: maxIT = 1000; % Inner-loop maximum iteration number. ivan@155: lambda = 2*.2; % Lagrangian multiplier. ivan@155: epsx = 10^-7; % Stopping criterion for iterative softthresholding ivan@155: epsd = 10^-7; % Stopping criterion for MM dictionary update ivan@155: cvset = 0; % Dictionary constraint. 0 = Non convex ||d|| = 1, 1 = Convex ||d||<=1 ivan@155: Tre = .99; % Threshold for accepting too atoms identical ivan@155: %%%% Generative Dictionaries ivan@155: Do = randn(M,N); % Generative Dictionary ivan@155: Do = Do*(diag(sum((Do'*Do).*eye(length(Do))).^-.5)); % Normalization ivan@155: %%%% Sparse signal generation %%%%% ivan@155: Xo = zeros(N,L); % Original Sparse Coefficients ivan@155: for l = 1:L ivan@155: r = 1; ivan@155: while r<=R ivan@155: ind = fix(rand(1)*N)+ones(1); ivan@155: a = rand(1); ivan@155: if Xo(ind)==0 ivan@155: Xo(ind,l) = (.8*rand(1)+.2)*((a>=.5)-(a<.5)); ivan@155: r = r+1; ivan@155: end ivan@155: end ivan@155: end ivan@155: Y = Do*Xo; % Sparse Signals ivan@155: %%%% Algorithm initialization ivan@155: D = randn(M,N); % Initial Dictionary ivan@155: D = D*(diag(sum((D'*D).*eye(length(D))).^-.5)); % Normalization ivan@155: X = ones(size(Xo)); % Initial coefficients ivan@155: for it = 1:IT, ivan@155: it ivan@155: to = .1+svds(D,1); ivan@155: [X,cost(it)] = mm1(D,Y,X,to,lambda,maxIT,epsx,map); ivan@155: plot(cost); ivan@155: [D,X] = dict_update_REG_cn(D,Y,X,maxIT,epsd,cvset); ivan@155: end ivan@155: %%% ivan@155: success = sum(max(abs((Do'*D)))>=Tre); ivan@155: display([' ------------------']) ivan@155: display([' ',num2str(success),'% of the atoms successfully recovered after ',num2str(IT),' iterations.']);