view DL/dl_ramirez.m @ 177:714fa7b8c1ad danieleb

added ramirez dl (to be completed) and MOCOD dictionary update
author Daniele Barchiesi <daniele.barchiesi@eecs.qmul.ac.uk>
date Thu, 17 Nov 2011 11:18:25 +0000
parents
children
line wrap: on
line source
function DL = dl_ramirez(Problem,DL)
%% Dictionary learning with incoherent dictionary
%
% REFERENCE
% I. Ramirez, F. Lecumberry and G. Sapiro, Sparse modeling with universal
% priors and learned incoherent dictionaries.

%%
%   Centre for Digital Music, Queen Mary, University of London.
%   This file copyright 2011 Daniele Barchiesi.
%
%   This program is free software; you can redistribute it and/or
%   modify it under the terms of the GNU General Public License as
%   published by the Free Software Foundation; either version 2 of the
%   License, or (at your option) any later version.  See the file
%   COPYING included with this distribution for more information.

%% Test function
if ~nargin, testdl_ramirez; return; end

%% Parameters & Defaults
X = Problem.b;					%matrix of observed signals

% determine dictionary size %
if (isfield(DL.param,'initdict')) %if the dictionary has been initialised
	if (any(size(DL.param.initdict)==1) && all(iswhole(DL.param.initdict(:))))
		dictSize = length(DL.param.initdict);
	else
		dictSize = size(DL.param.initdict,2);
	end
end
if (isfield(DL.param,'dictsize'))
	dictSize = DL.param.dictsize;
end

if (size(X,2) < dictSize)
	error('Number of training signals is smaller than number of atoms to train');
end


% initialize the dictionary %
if (isfield(DL.param,'initdict')) && ~isempty(DL.param.initdict);
	if (any(size(DL.param.initdict)==1) && all(iswhole(DL.param.initdict(:))))
		D = X(:,DL.param.initdict(1:dictSize));
	else
		if (size(DL.param.initdict,1)~=size(X,1) || size(DL.param.initdict,2)<dictSize)
			error('Invalid initial dictionary');
		end
		D = DL.param.initdict(:,1:dictSize);
	end
else
	data_ids = find(colnorms_squared(X) > 1e-6);   % ensure no zero data elements are chosen
	perm = randperm(length(data_ids));
	D = X(:,data_ids(perm(1:dictSize)));
end


% coherence penalty factor
if isfield(DL.param,'zeta')
	zeta = DL.param.zeta;
else
	zeta = 0.1; 
end

% atoms norm penalty factor
if isfield(DL.param,'eta')
	eta = DL.param.eta;
else
	eta = 0.1; 
end

% number of iterations (default is 40) %
if isfield(DL.param,'iternum')
    iternum = DL.param.iternum;
else
    iternum = 40;
end

% show dictonary every specified number of iterations
if isfield(DL.param,'show_dict')
    show_dictionary=1;
    show_iter=DL.param.show_dict;
else
    show_dictionary=0;
    show_iter=0;
end

tmpTraining = Problem.b1;
Problem.b1 = X;
if isfield(Problem,'reconstruct')
    Problem = rmfield(Problem, 'reconstruct');
end


%% Main Algorithm
Dprev = D;						%initial dictionary
Aprev = D\X;				    %set initial solution as pseudoinverse
for i = 1:iternum
	%Sparse Coding by 
	A = sparsecoding(X,D,Aprev);
	%Dictionary Update
	D = dictionaryupdate(X,A,Dprev,zeta,eta);
	
	Dprev = D;
	Aprev = A;
   if ((show_dictionary)&&(mod(i,show_iter)==0))
       dictimg = SMALL_showdict(dico,[8 8],...
            round(sqrt(size(dico,2))),round(sqrt(size(dico,2))),'lines','highcontrast');  
       figure(2); imagesc(dictimg);colormap(gray);axis off; axis image;
       pause(0.02);
   end
end

Problem.b1 = tmpTraining;
DL.D = D;

end

function A = sparsecoding(X,D,Aprev)
%Sparse coding using a mixture of laplacians (MOL) as universal prior.

%parameters
K = size(D,2);	%number of atoms
M = size(X,2);	%number of signals

mu1 = mean(abs(Aprev(:)));			%first moment of distribution of Aprev
mu2 = (norm(Aprev(:))^2)/numel(Aprev);%second moment of distribution of Aprev
kappa = 2*(mu2-mu1^2)/(mu2-2*mu2^2); %parameter kappa of the MOL distribution
beta = (kappa-1)*mu1;				%parameter beta of the MOL distribution

E = X-D*Aprev;						%error term
sigmasq = mean(var(E));				%error variance
tau = 2*sigmasq*(kappa+1);			%sparsity factor

%solve a succession of subproblems to approximate the non-convex cost
%function
nIter = 10;							%number of iterations of surrogate subproblem
Psi   = zeros(K,M);					%initialise solution of subproblem
for iIter=1:nIter
	Reg = 1./(abs(Psi) + beta);
	Psi = solvel1(X,D,tau,Reg);
end
A = Psi;
end

function Psi = solvel1(X,D,tau,A)
	[K M] = size(A);
	Psi = zeros(K,M);
	for m=1:M
		cvx_begin quiet
			variable v(K)
			minimise (norm(X(:,m)-D*v) + tau*norm(A(:,m).*v,1));
		cvx_end
		Psi(:,m) = v;
	end
end

function D = dictionaryupdate(X,A,Dprev,zeta,eta)
	D = (X*A' + 2*(zeta + eta)*Dprev)/(A*A' + 2*zeta*(Dprev'*Dprev) + 2*eta*diag(diag(Dprev'*Dprev)));
end



function Y = colnorms_squared(X)
% compute in blocks to conserve memory
Y = zeros(1,size(X,2));
blocksize = 2000;
for i = 1:blocksize:size(X,2)
	blockids = i : min(i+blocksize-1,size(X,2));
	Y(blockids) = sum(X(:,blockids).^2);
end
end

function testdl_ramirez
	clc
	N = 10;		%ambient dimension
	K = 20;		%number of atoms
	M = 30;		%number of observed signals
	X = randn(N,M);		%observed signals
	D = normcol(randn(N,K)); %initial dictionary
	Problem.b = X;		%sparse representation problem
	Problem.b1 = X;
	DL = SMALL_init_DL('dl_ramirez');
	DL.param.initdict = D;
	DL.param = struct('initdict',D,...
					  'zeta',0.5,...
					  'eta',0.5);
	DL = SMALL_learn(Problem,DL);
end