annotate toolboxes/MIRtoolbox1.3.2/somtoolbox/neural_gas.m @ 0:cc4b1211e677 tip

initial commit to HG from Changeset: 646 (e263d8a21543) added further path and more save "camirversion.m"
author Daniel Wolff
date Fri, 19 Aug 2016 13:07:06 +0200
parents
children
rev   line source
Daniel@0 1 function [Neurons] = neural_gas(D,n,epochs,alpha0,lambda0)
Daniel@0 2
Daniel@0 3 %NEURAL_GAS Quantizes the data space using the neural gas algorithm.
Daniel@0 4 %
Daniel@0 5 % Neurons = neural_gas(D, n, epochs, [alpha0], [lambda0])
Daniel@0 6 %
Daniel@0 7 % C = neural_gas(D,50,10);
Daniel@0 8 % sM = som_map_struct(sD);
Daniel@0 9 % sM.codebook = neural_gas(sD,size(sM.codebook,1),10);
Daniel@0 10 %
Daniel@0 11 % Input and output arguments ([]'s are optional):
Daniel@0 12 % D (matrix) the data matrix, size dlen x dim
Daniel@0 13 % (struct) a data struct
Daniel@0 14 % n (scalar) the number of neurons
Daniel@0 15 % epochs (scalar) the number of training epochs (the number of
Daniel@0 16 % training steps is dlen*epochs)
Daniel@0 17 % [alpha0] (scalar) initial step size, 0.5 by default
Daniel@0 18 % [lambda0] (scalar) initial decay constant, n/2 by default
Daniel@0 19 %
Daniel@0 20 % Neurons (matrix) the neuron matrix, size n x dim
Daniel@0 21 %
Daniel@0 22 % See also SOM_MAKE, KMEANS.
Daniel@0 23
Daniel@0 24 % References:
Daniel@0 25 % T.M.Martinetz, S.G.Berkovich, and K.J.Schulten. "Neural-gas" network
Daniel@0 26 % for vector quantization and its application to time-series prediction.
Daniel@0 27 % IEEE Transactions on Neural Networks, 4(4):558-569, 1993.
Daniel@0 28
Daniel@0 29 % Contributed to SOM Toolbox vs2, February 2nd, 2000 by Juha Vesanto
Daniel@0 30 % Copyright (c) by Juha Vesanto
Daniel@0 31 % http://www.cis.hut.fi/projects/somtoolbox/
Daniel@0 32
Daniel@0 33 % juuso 101297 020200
Daniel@0 34
Daniel@0 35 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Daniel@0 36 %% Check arguments and initialize
Daniel@0 37
Daniel@0 38 error(nargchk(3, 5, nargin)); % check the number of input arguments
Daniel@0 39
Daniel@0 40 if isstruct(D), D = D.data; end
Daniel@0 41 [dlen,dim] = size(D);
Daniel@0 42 Neurons = (rand(n,dim)-0.5)*10e-5; % small initial values
Daniel@0 43 train_len = epochs*dlen;
Daniel@0 44
Daniel@0 45 if nargin<4 | isempty(alpha0) | isnan(alpha0), alpha0 = 0.5; end
Daniel@0 46 if nargin<5 | isempty(lambda0) | isnan(lambda0), lambda0 = n/2; end
Daniel@0 47
Daniel@0 48 % random sample order
Daniel@0 49 rand('state',sum(100*clock));
Daniel@0 50 sample_inds = ceil(dlen*rand(train_len,1));
Daniel@0 51
Daniel@0 52 % lambda
Daniel@0 53 lambda = lambda0 * (0.01/lambda0).^([0:(train_len-1)]/train_len);
Daniel@0 54
Daniel@0 55 % alpha
Daniel@0 56 alpha = alpha0 * (0.005/alpha0).^([0:(train_len-1)]/train_len);
Daniel@0 57
Daniel@0 58 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Daniel@0 59 %% Action
Daniel@0 60
Daniel@0 61 for i=1:train_len,
Daniel@0 62
Daniel@0 63 % sample vector
Daniel@0 64 x = D(sample_inds(i),:); % sample vector
Daniel@0 65 known = ~isnan(x); % its known components
Daniel@0 66 X = x(ones(n,1),known); % we'll need this
Daniel@0 67
Daniel@0 68 % neighborhood ranking
Daniel@0 69 Dx = Neurons(:,known) - X; % difference between vector and all map units
Daniel@0 70 [qerrs, inds] = sort((Dx.^2)*known'); % 1-BMU, 2-BMU, etc.
Daniel@0 71 ranking(inds) = [0:(n-1)];
Daniel@0 72 h = exp(-ranking/lambda(i));
Daniel@0 73 H = h(ones(length(known),1),:)';
Daniel@0 74
Daniel@0 75 % update
Daniel@0 76 Neurons = Neurons + alpha(i)*H.*(x(ones(n,1),known) - Neurons(:,known));
Daniel@0 77
Daniel@0 78 % track
Daniel@0 79 fprintf(1,'%d / %d \r',i,train_len);
Daniel@0 80 if 0 & mod(i,50) == 0,
Daniel@0 81 hold off, plot3(D(:,1),D(:,2),D(:,3),'bo')
Daniel@0 82 hold on, plot3(Neurons(:,1),Neurons(:,2),Neurons(:,3),'r+')
Daniel@0 83 drawnow
Daniel@0 84 end
Daniel@0 85 end
Daniel@0 86
Daniel@0 87 fprintf(1,'\n');
Daniel@0 88
Daniel@0 89 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%