annotate toolboxes/FullBNT-1.0.7/netlabKPM/mlphdotv_weighted.m @ 0:cc4b1211e677 tip

initial commit to HG from Changeset: 646 (e263d8a21543) added further path and more save "camirversion.m"
author Daniel Wolff
date Fri, 19 Aug 2016 13:07:06 +0200
parents
children
rev   line source
Daniel@0 1 function hdv = mlphdotv_weighted(net, x, t, eso_w, v)
Daniel@0 2 %MLPHDOTV Evaluate the product of the data Hessian with a vector.
Daniel@0 3 %
Daniel@0 4 % Description
Daniel@0 5 %
Daniel@0 6 % HDV = MLPHDOTV(NET, X, T, V) takes an MLP network data structure NET,
Daniel@0 7 % together with the matrix X of input vectors, the matrix T of target
Daniel@0 8 % vectors and an arbitrary row vector V whose length equals the number
Daniel@0 9 % of parameters in the network, and returns the product of the data-
Daniel@0 10 % dependent contribution to the Hessian matrix with V. The
Daniel@0 11 % implementation is based on the R-propagation algorithm of
Daniel@0 12 % Pearlmutter.
Daniel@0 13 %
Daniel@0 14 % See also
Daniel@0 15 % MLP, MLPHESS, HESSCHEK
Daniel@0 16 %
Daniel@0 17
Daniel@0 18 % Copyright (c) Ian T Nabney (1996-9)
Daniel@0 19
Daniel@0 20 % Check arguments for consistency
Daniel@0 21 errstring = consist(net, 'mlp', x, t);
Daniel@0 22 if ~isempty(errstring);
Daniel@0 23 error(errstring);
Daniel@0 24 end
Daniel@0 25
Daniel@0 26 ndata = size(x, 1);
Daniel@0 27
Daniel@0 28 [y, z] = mlpfwd(net, x); % Standard forward propagation.
Daniel@0 29 zprime = (1 - z.*z); % Hidden unit first derivatives.
Daniel@0 30 zpprime = -2.0*z.*zprime; % Hidden unit second derivatives.
Daniel@0 31
Daniel@0 32 vnet = mlpunpak(net, v); % Unpack the v vector.
Daniel@0 33
Daniel@0 34 % Do the R-forward propagation.
Daniel@0 35
Daniel@0 36 ra1 = x*vnet.w1 + ones(ndata, 1)*vnet.b1;
Daniel@0 37 rz = zprime.*ra1;
Daniel@0 38 ra2 = rz*net.w2 + z*vnet.w2 + ones(ndata, 1)*vnet.b2;
Daniel@0 39
Daniel@0 40 switch net.actfn
Daniel@0 41 case 'softmax' % Softmax outputs
Daniel@0 42
Daniel@0 43 nout = size(t, 2);
Daniel@0 44 ry = y.*ra2 - y.*(sum(y.*ra2, 2)*ones(1, nout));
Daniel@0 45
Daniel@0 46 otherwise
Daniel@0 47 error(['Unknown activation function ', net.actfn]);
Daniel@0 48 end
Daniel@0 49
Daniel@0 50 % Evaluate a weighted delta for the output units.
Daniel@0 51 temp = y - t;
Daniel@0 52 for m=1:ndata,
Daniel@0 53 delout(m,:)=eso_w(m,1)*temp(m,:);
Daniel@0 54 end
Daniel@0 55 clear temp;
Daniel@0 56
Daniel@0 57 % Do the standard backpropagation.
Daniel@0 58
Daniel@0 59 delhid = zprime.*(delout*net.w2');
Daniel@0 60
Daniel@0 61 % Now do the R-backpropagation.
Daniel@0 62
Daniel@0 63 rdelhid = zpprime.*ra1.*(delout*net.w2') + zprime.*(delout*vnet.w2') + ...
Daniel@0 64 zprime.*(ry*net.w2');
Daniel@0 65
Daniel@0 66 % Finally, evaluate the components of hdv and then merge into long vector.
Daniel@0 67
Daniel@0 68 hw1 = x'*rdelhid;
Daniel@0 69 hb1 = sum(rdelhid, 1);
Daniel@0 70 hw2 = z'*ry + rz'*delout;
Daniel@0 71 hb2 = sum(ry, 1);
Daniel@0 72
Daniel@0 73 hdv = [hw1(:)', hb1, hw2(:)', hb2];