Daniel@0: %DEMRBF1 Demonstrate simple regression using a radial basis function network. Daniel@0: % Daniel@0: % Description Daniel@0: % The problem consists of one input variable X and one target variable Daniel@0: % T with data generated by sampling X at equal intervals and then Daniel@0: % generating target data by computing SIN(2*PI*X) and adding Gaussian Daniel@0: % noise. This data is the same as that used in demmlp1. Daniel@0: % Daniel@0: % Three different RBF networks (with different activation functions) Daniel@0: % are trained in two stages. First, a Gaussian mixture model is trained Daniel@0: % using the EM algorithm, and the centres of this model are used to set Daniel@0: % the centres of the RBF. Second, the output weights (and biases) are Daniel@0: % determined using the pseudo-inverse of the design matrix. Daniel@0: % Daniel@0: % See also Daniel@0: % DEMMLP1, RBF, RBFFWD, GMM, GMMEM Daniel@0: % Daniel@0: Daniel@0: % Copyright (c) Ian T Nabney (1996-2001) Daniel@0: Daniel@0: Daniel@0: % Generate the matrix of inputs x and targets t. Daniel@0: randn('state', 42); Daniel@0: rand('state', 42); Daniel@0: ndata = 20; % Number of data points. Daniel@0: noise = 0.2; % Standard deviation of noise distribution. Daniel@0: x = (linspace(0, 1, ndata))'; Daniel@0: t = sin(2*pi*x) + noise*randn(ndata, 1); Daniel@0: mu = mean(x); Daniel@0: sigma = std(x); Daniel@0: tr_in = (x - mu)./(sigma); Daniel@0: Daniel@0: clc Daniel@0: disp('This demonstration illustrates the use of a Radial Basis Function') Daniel@0: disp('network for regression problems. The data is generated from a noisy') Daniel@0: disp('sine function.') Daniel@0: disp(' ') Daniel@0: disp('Press any key to continue.') Daniel@0: pause Daniel@0: % Set up network parameters. Daniel@0: nin = 1; % Number of inputs. Daniel@0: nhidden = 7; % Number of hidden units. Daniel@0: nout = 1; % Number of outputs. Daniel@0: Daniel@0: clc Daniel@0: disp('We assess the effect of three different activation functions.') Daniel@0: disp('First we create a network with Gaussian activations.') Daniel@0: disp(' ') Daniel@0: disp('Press any key to continue.') Daniel@0: pause Daniel@0: % Create and initialize network weight and parameter vectors. Daniel@0: net = rbf(nin, nhidden, nout, 'gaussian'); Daniel@0: Daniel@0: disp('A two-stage training algorithm is used: it uses a small number of') Daniel@0: disp('iterations of EM to position the centres, and then the pseudo-inverse') Daniel@0: disp('of the design matrix to find the second layer weights.') Daniel@0: disp(' ') Daniel@0: disp('Press any key to continue.') Daniel@0: pause Daniel@0: disp('Error values from EM training.') Daniel@0: % Use fast training method Daniel@0: options = foptions; Daniel@0: options(1) = 1; % Display EM training Daniel@0: options(14) = 10; % number of iterations of EM Daniel@0: net = rbftrain(net, options, tr_in, t); Daniel@0: Daniel@0: disp(' ') Daniel@0: disp('Press any key to continue.') Daniel@0: pause Daniel@0: clc Daniel@0: disp('The second RBF network has thin plate spline activations.') Daniel@0: disp('The same centres are used again, so we just need to calculate') Daniel@0: disp('the second layer weights.') Daniel@0: disp(' ') Daniel@0: disp('Press any key to continue.') Daniel@0: pause Daniel@0: % Create a second RBF with thin plate spline functions Daniel@0: net2 = rbf(nin, nhidden, nout, 'tps'); Daniel@0: Daniel@0: % Re-use previous centres rather than calling rbftrain again Daniel@0: net2.c = net.c; Daniel@0: [y, act2] = rbffwd(net2, tr_in); Daniel@0: Daniel@0: % Solve for new output weights and biases from RBF activations Daniel@0: temp = pinv([act2 ones(ndata, 1)]) * t; Daniel@0: net2.w2 = temp(1:nhidden, :); Daniel@0: net2.b2 = temp(nhidden+1, :); Daniel@0: Daniel@0: disp('The third RBF network has r^4 log r activations.') Daniel@0: disp(' ') Daniel@0: disp('Press any key to continue.') Daniel@0: pause Daniel@0: % Create a third RBF with r^4 log r functions Daniel@0: net3 = rbf(nin, nhidden, nout, 'r4logr'); Daniel@0: Daniel@0: % Overwrite weight vector with parameters from first RBF Daniel@0: net3.c = net.c; Daniel@0: [y, act3] = rbffwd(net3, tr_in); Daniel@0: temp = pinv([act3 ones(ndata, 1)]) * t; Daniel@0: net3.w2 = temp(1:nhidden, :); Daniel@0: net3.b2 = temp(nhidden+1, :); Daniel@0: Daniel@0: disp('Now we plot the data, underlying function, and network outputs') Daniel@0: disp('on a single graph to compare the results.') Daniel@0: disp(' ') Daniel@0: disp('Press any key to continue.') Daniel@0: pause Daniel@0: % Plot the data, the original function, and the trained network functions. Daniel@0: plotvals = [x(1):0.01:x(end)]'; Daniel@0: inputvals = (plotvals-mu)./sigma; Daniel@0: y = rbffwd(net, inputvals); Daniel@0: y2 = rbffwd(net2, inputvals); Daniel@0: y3 = rbffwd(net3, inputvals); Daniel@0: fh1 = figure; Daniel@0: Daniel@0: plot(x, t, 'ob') Daniel@0: hold on Daniel@0: xlabel('Input') Daniel@0: ylabel('Target') Daniel@0: axis([x(1) x(end) -1.5 1.5]) Daniel@0: [fx, fy] = fplot('sin(2*pi*x)', [x(1) x(end)]); Daniel@0: plot(fx, fy, '-r', 'LineWidth', 2) Daniel@0: plot(plotvals, y, '--g', 'LineWidth', 2) Daniel@0: plot(plotvals, y2, 'k--', 'LineWidth', 2) Daniel@0: plot(plotvals, y3, '-.c', 'LineWidth', 2) Daniel@0: legend('data', 'function', 'Gaussian RBF', 'Thin plate spline RBF', ... Daniel@0: 'r^4 log r RBF'); Daniel@0: hold off Daniel@0: Daniel@0: disp('RBF training errors are'); Daniel@0: disp(['Gaussian ', num2str(rbferr(net, tr_in, t)), ' TPS ', ... Daniel@0: num2str(rbferr(net2, tr_in, t)), ' R4logr ', num2str(rbferr(net3, tr_in, t))]); Daniel@0: Daniel@0: disp(' ') Daniel@0: disp('Press any key to end.') Daniel@0: pause Daniel@0: close(fh1); Daniel@0: clear all;