wolffd@0: %DEMMLP1 Demonstrate simple regression using a multi-layer perceptron wolffd@0: % wolffd@0: % Description wolffd@0: % The problem consists of one input variable X and one target variable wolffd@0: % T with data generated by sampling X at equal intervals and then wolffd@0: % generating target data by computing SIN(2*PI*X) and adding Gaussian wolffd@0: % noise. A 2-layer network with linear outputs is trained by minimizing wolffd@0: % a sum-of-squares error function using the scaled conjugate gradient wolffd@0: % optimizer. wolffd@0: % wolffd@0: % See also wolffd@0: % MLP, MLPERR, MLPGRAD, SCG wolffd@0: % wolffd@0: wolffd@0: % Copyright (c) Ian T Nabney (1996-2001) wolffd@0: wolffd@0: wolffd@0: % Generate the matrix of inputs x and targets t. wolffd@0: wolffd@0: ndata = 20; % Number of data points. wolffd@0: noise = 0.2; % Standard deviation of noise distribution. wolffd@0: x = [0:1/(ndata - 1):1]'; wolffd@0: randn('state', 1); wolffd@0: t = sin(2*pi*x) + noise*randn(ndata, 1); wolffd@0: wolffd@0: clc wolffd@0: disp('This demonstration illustrates the use of a Multi-Layer Perceptron') wolffd@0: disp('network for regression problems. The data is generated from a noisy') wolffd@0: disp('sine function.') wolffd@0: disp(' ') wolffd@0: disp('Press any key to continue.') wolffd@0: pause wolffd@0: wolffd@0: % Set up network parameters. wolffd@0: nin = 1; % Number of inputs. wolffd@0: nhidden = 3; % Number of hidden units. wolffd@0: nout = 1; % Number of outputs. wolffd@0: alpha = 0.01; % Coefficient of weight-decay prior. wolffd@0: wolffd@0: % Create and initialize network weight vector. wolffd@0: wolffd@0: net = mlp(nin, nhidden, nout, 'linear', alpha); wolffd@0: wolffd@0: % Set up vector of options for the optimiser. wolffd@0: wolffd@0: options = zeros(1,18); wolffd@0: options(1) = 1; % This provides display of error values. wolffd@0: options(14) = 100; % Number of training cycles. wolffd@0: wolffd@0: clc wolffd@0: disp(['The network has ', num2str(nhidden), ' hidden units and a weight decay']) wolffd@0: disp(['coefficient of ', num2str(alpha), '.']) wolffd@0: disp(' ') wolffd@0: disp('After initializing the network, we train it use the scaled conjugate') wolffd@0: disp('gradients algorithm for 100 cycles.') wolffd@0: disp(' ') wolffd@0: disp('Press any key to continue') wolffd@0: pause wolffd@0: wolffd@0: % Train using scaled conjugate gradients. wolffd@0: [net, options] = netopt(net, options, x, t, 'scg'); wolffd@0: wolffd@0: disp(' ') wolffd@0: disp('Now we plot the data, underlying function, and network outputs') wolffd@0: disp('on a single graph to compare the results.') wolffd@0: disp(' ') wolffd@0: disp('Press any key to continue.') wolffd@0: pause wolffd@0: wolffd@0: % Plot the data, the original function, and the trained network function. wolffd@0: plotvals = [0:0.01:1]'; wolffd@0: y = mlpfwd(net, plotvals); wolffd@0: fh1 = figure; wolffd@0: plot(x, t, 'ob') wolffd@0: hold on wolffd@0: xlabel('Input') wolffd@0: ylabel('Target') wolffd@0: axis([0 1 -1.5 1.5]) wolffd@0: [fx, fy] = fplot('sin(2*pi*x)', [0 1]); wolffd@0: plot(fx, fy, '-r', 'LineWidth', 2) wolffd@0: plot(plotvals, y, '-k', 'LineWidth', 2) wolffd@0: legend('data', 'function', 'network'); wolffd@0: wolffd@0: disp(' ') wolffd@0: disp('Press any key to end.') wolffd@0: pause wolffd@0: close(fh1); wolffd@0: clear all;