annotate toolboxes/FullBNT-1.0.7/netlab3.3/mlphdotv.m @ 0:cc4b1211e677 tip

initial commit to HG from Changeset: 646 (e263d8a21543) added further path and more save "camirversion.m"
author Daniel Wolff
date Fri, 19 Aug 2016 13:07:06 +0200
parents
children
rev   line source
Daniel@0 1 function hdv = mlphdotv(net, x, t, v)
Daniel@0 2 %MLPHDOTV Evaluate the product of the data Hessian with a vector.
Daniel@0 3 %
Daniel@0 4 % Description
Daniel@0 5 %
Daniel@0 6 % HDV = MLPHDOTV(NET, X, T, V) takes an MLP network data structure NET,
Daniel@0 7 % together with the matrix X of input vectors, the matrix T of target
Daniel@0 8 % vectors and an arbitrary row vector V whose length equals the number
Daniel@0 9 % of parameters in the network, and returns the product of the data-
Daniel@0 10 % dependent contribution to the Hessian matrix with V. The
Daniel@0 11 % implementation is based on the R-propagation algorithm of
Daniel@0 12 % Pearlmutter.
Daniel@0 13 %
Daniel@0 14 % See also
Daniel@0 15 % MLP, MLPHESS, HESSCHEK
Daniel@0 16 %
Daniel@0 17
Daniel@0 18 % Copyright (c) Ian T Nabney (1996-2001)
Daniel@0 19
Daniel@0 20 % Check arguments for consistency
Daniel@0 21 errstring = consist(net, 'mlp', x, t);
Daniel@0 22 if ~isempty(errstring);
Daniel@0 23 error(errstring);
Daniel@0 24 end
Daniel@0 25
Daniel@0 26 ndata = size(x, 1);
Daniel@0 27
Daniel@0 28 [y, z] = mlpfwd(net, x); % Standard forward propagation.
Daniel@0 29 zprime = (1 - z.*z); % Hidden unit first derivatives.
Daniel@0 30 zpprime = -2.0*z.*zprime; % Hidden unit second derivatives.
Daniel@0 31
Daniel@0 32 vnet = mlpunpak(net, v); % Unpack the v vector.
Daniel@0 33
Daniel@0 34 % Do the R-forward propagation.
Daniel@0 35
Daniel@0 36 ra1 = x*vnet.w1 + ones(ndata, 1)*vnet.b1;
Daniel@0 37 rz = zprime.*ra1;
Daniel@0 38 ra2 = rz*net.w2 + z*vnet.w2 + ones(ndata, 1)*vnet.b2;
Daniel@0 39
Daniel@0 40 switch net.outfn
Daniel@0 41
Daniel@0 42 case 'linear' % Linear outputs
Daniel@0 43
Daniel@0 44 ry = ra2;
Daniel@0 45
Daniel@0 46 case 'logistic' % Logistic outputs
Daniel@0 47
Daniel@0 48 ry = y.*(1 - y).*ra2;
Daniel@0 49
Daniel@0 50 case 'softmax' % Softmax outputs
Daniel@0 51
Daniel@0 52 nout = size(t, 2);
Daniel@0 53 ry = y.*ra2 - y.*(sum(y.*ra2, 2)*ones(1, nout));
Daniel@0 54
Daniel@0 55 otherwise
Daniel@0 56 error(['Unknown activation function ', net.outfn]);
Daniel@0 57 end
Daniel@0 58
Daniel@0 59 % Evaluate delta for the output units.
Daniel@0 60
Daniel@0 61 delout = y - t;
Daniel@0 62
Daniel@0 63 % Do the standard backpropagation.
Daniel@0 64
Daniel@0 65 delhid = zprime.*(delout*net.w2');
Daniel@0 66
Daniel@0 67 % Now do the R-backpropagation.
Daniel@0 68
Daniel@0 69 rdelhid = zpprime.*ra1.*(delout*net.w2') + zprime.*(delout*vnet.w2') + ...
Daniel@0 70 zprime.*(ry*net.w2');
Daniel@0 71
Daniel@0 72 % Finally, evaluate the components of hdv and then merge into long vector.
Daniel@0 73
Daniel@0 74 hw1 = x'*rdelhid;
Daniel@0 75 hb1 = sum(rdelhid, 1);
Daniel@0 76 hw2 = z'*ry + rz'*delout;
Daniel@0 77 hb2 = sum(ry, 1);
Daniel@0 78
Daniel@0 79 hdv = [hw1(:)', hb1, hw2(:)', hb2];