daniele@177: function DL = dl_ramirez(Problem,DL) daniele@177: %% Dictionary learning with incoherent dictionary daniele@177: % daniele@177: % REFERENCE daniele@177: % I. Ramirez, F. Lecumberry and G. Sapiro, Sparse modeling with universal daniele@177: % priors and learned incoherent dictionaries. daniele@177: daniele@177: %% daniele@177: % Centre for Digital Music, Queen Mary, University of London. daniele@177: % This file copyright 2011 Daniele Barchiesi. daniele@177: % daniele@177: % This program is free software; you can redistribute it and/or daniele@177: % modify it under the terms of the GNU General Public License as daniele@177: % published by the Free Software Foundation; either version 2 of the daniele@177: % License, or (at your option) any later version. See the file daniele@177: % COPYING included with this distribution for more information. daniele@177: daniele@177: %% Test function daniele@177: if ~nargin, testdl_ramirez; return; end daniele@177: daniele@177: %% Parameters & Defaults daniele@177: X = Problem.b; %matrix of observed signals daniele@177: daniele@177: % determine dictionary size % daniele@177: if (isfield(DL.param,'initdict')) %if the dictionary has been initialised daniele@177: if (any(size(DL.param.initdict)==1) && all(iswhole(DL.param.initdict(:)))) daniele@177: dictSize = length(DL.param.initdict); daniele@177: else daniele@177: dictSize = size(DL.param.initdict,2); daniele@177: end daniele@177: end daniele@177: if (isfield(DL.param,'dictsize')) daniele@177: dictSize = DL.param.dictsize; daniele@177: end daniele@177: daniele@177: if (size(X,2) < dictSize) daniele@177: error('Number of training signals is smaller than number of atoms to train'); daniele@177: end daniele@177: daniele@177: daniele@177: % initialize the dictionary % daniele@177: if (isfield(DL.param,'initdict')) && ~isempty(DL.param.initdict); daniele@177: if (any(size(DL.param.initdict)==1) && all(iswhole(DL.param.initdict(:)))) daniele@177: D = X(:,DL.param.initdict(1:dictSize)); daniele@177: else daniele@177: if (size(DL.param.initdict,1)~=size(X,1) || size(DL.param.initdict,2) 1e-6); % ensure no zero data elements are chosen daniele@177: perm = randperm(length(data_ids)); daniele@177: D = X(:,data_ids(perm(1:dictSize))); daniele@177: end daniele@177: daniele@177: daniele@177: % coherence penalty factor daniele@177: if isfield(DL.param,'zeta') daniele@177: zeta = DL.param.zeta; daniele@177: else daniele@177: zeta = 0.1; daniele@177: end daniele@177: daniele@177: % atoms norm penalty factor daniele@177: if isfield(DL.param,'eta') daniele@177: eta = DL.param.eta; daniele@177: else daniele@177: eta = 0.1; daniele@177: end daniele@177: daniele@177: % number of iterations (default is 40) % daniele@177: if isfield(DL.param,'iternum') daniele@177: iternum = DL.param.iternum; daniele@177: else daniele@177: iternum = 40; daniele@177: end daniele@177: daniele@177: % show dictonary every specified number of iterations daniele@177: if isfield(DL.param,'show_dict') daniele@177: show_dictionary=1; daniele@177: show_iter=DL.param.show_dict; daniele@177: else daniele@177: show_dictionary=0; daniele@177: show_iter=0; daniele@177: end daniele@177: daniele@177: tmpTraining = Problem.b1; daniele@177: Problem.b1 = X; daniele@177: if isfield(Problem,'reconstruct') daniele@177: Problem = rmfield(Problem, 'reconstruct'); daniele@177: end daniele@177: daniele@177: daniele@177: %% Main Algorithm daniele@177: Dprev = D; %initial dictionary daniele@177: Aprev = D\X; %set initial solution as pseudoinverse daniele@177: for i = 1:iternum daniele@177: %Sparse Coding by daniele@177: A = sparsecoding(X,D,Aprev); daniele@177: %Dictionary Update daniele@177: D = dictionaryupdate(X,A,Dprev,zeta,eta); daniele@177: daniele@177: Dprev = D; daniele@177: Aprev = A; daniele@177: if ((show_dictionary)&&(mod(i,show_iter)==0)) daniele@177: dictimg = SMALL_showdict(dico,[8 8],... daniele@177: round(sqrt(size(dico,2))),round(sqrt(size(dico,2))),'lines','highcontrast'); daniele@177: figure(2); imagesc(dictimg);colormap(gray);axis off; axis image; daniele@177: pause(0.02); daniele@177: end daniele@177: end daniele@177: daniele@177: Problem.b1 = tmpTraining; daniele@177: DL.D = D; daniele@177: daniele@177: end daniele@177: daniele@177: function A = sparsecoding(X,D,Aprev) daniele@177: %Sparse coding using a mixture of laplacians (MOL) as universal prior. daniele@177: daniele@177: %parameters daniele@177: K = size(D,2); %number of atoms daniele@177: M = size(X,2); %number of signals daniele@177: daniele@177: mu1 = mean(abs(Aprev(:))); %first moment of distribution of Aprev daniele@177: mu2 = (norm(Aprev(:))^2)/numel(Aprev);%second moment of distribution of Aprev daniele@177: kappa = 2*(mu2-mu1^2)/(mu2-2*mu2^2); %parameter kappa of the MOL distribution daniele@177: beta = (kappa-1)*mu1; %parameter beta of the MOL distribution daniele@177: daniele@177: E = X-D*Aprev; %error term daniele@177: sigmasq = mean(var(E)); %error variance daniele@177: tau = 2*sigmasq*(kappa+1); %sparsity factor daniele@177: daniele@177: %solve a succession of subproblems to approximate the non-convex cost daniele@177: %function daniele@177: nIter = 10; %number of iterations of surrogate subproblem daniele@177: Psi = zeros(K,M); %initialise solution of subproblem daniele@177: for iIter=1:nIter daniele@177: Reg = 1./(abs(Psi) + beta); daniele@177: Psi = solvel1(X,D,tau,Reg); daniele@177: end daniele@177: A = Psi; daniele@177: end daniele@177: daniele@177: function Psi = solvel1(X,D,tau,A) daniele@177: [K M] = size(A); daniele@177: Psi = zeros(K,M); daniele@177: for m=1:M daniele@177: cvx_begin quiet daniele@177: variable v(K) daniele@177: minimise (norm(X(:,m)-D*v) + tau*norm(A(:,m).*v,1)); daniele@177: cvx_end daniele@177: Psi(:,m) = v; daniele@177: end daniele@177: end daniele@177: daniele@177: function D = dictionaryupdate(X,A,Dprev,zeta,eta) daniele@177: D = (X*A' + 2*(zeta + eta)*Dprev)/(A*A' + 2*zeta*(Dprev'*Dprev) + 2*eta*diag(diag(Dprev'*Dprev))); daniele@177: end daniele@177: daniele@177: daniele@177: daniele@177: function Y = colnorms_squared(X) daniele@177: % compute in blocks to conserve memory daniele@177: Y = zeros(1,size(X,2)); daniele@177: blocksize = 2000; daniele@177: for i = 1:blocksize:size(X,2) daniele@177: blockids = i : min(i+blocksize-1,size(X,2)); daniele@177: Y(blockids) = sum(X(:,blockids).^2); daniele@177: end daniele@177: end daniele@177: daniele@177: function testdl_ramirez daniele@177: clc daniele@177: N = 10; %ambient dimension daniele@177: K = 20; %number of atoms daniele@177: M = 30; %number of observed signals daniele@177: X = randn(N,M); %observed signals daniele@177: D = normcol(randn(N,K)); %initial dictionary daniele@177: Problem.b = X; %sparse representation problem daniele@177: Problem.b1 = X; daniele@177: DL = SMALL_init_DL('dl_ramirez'); daniele@177: DL.param.initdict = D; daniele@177: DL.param = struct('initdict',D,... daniele@177: 'zeta',0.5,... daniele@177: 'eta',0.5); daniele@177: DL = SMALL_learn(Problem,DL); daniele@177: end