comparison toolboxes/FullBNT-1.0.7/netlab3.3/demmlp1.m @ 0:e9a9cd732c1e tip

first hg version after svn
author wolffd
date Tue, 10 Feb 2015 15:05:51 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:e9a9cd732c1e
1 %DEMMLP1 Demonstrate simple regression using a multi-layer perceptron
2 %
3 % Description
4 % The problem consists of one input variable X and one target variable
5 % T with data generated by sampling X at equal intervals and then
6 % generating target data by computing SIN(2*PI*X) and adding Gaussian
7 % noise. A 2-layer network with linear outputs is trained by minimizing
8 % a sum-of-squares error function using the scaled conjugate gradient
9 % optimizer.
10 %
11 % See also
12 % MLP, MLPERR, MLPGRAD, SCG
13 %
14
15 % Copyright (c) Ian T Nabney (1996-2001)
16
17
18 % Generate the matrix of inputs x and targets t.
19
20 ndata = 20; % Number of data points.
21 noise = 0.2; % Standard deviation of noise distribution.
22 x = [0:1/(ndata - 1):1]';
23 randn('state', 1);
24 t = sin(2*pi*x) + noise*randn(ndata, 1);
25
26 clc
27 disp('This demonstration illustrates the use of a Multi-Layer Perceptron')
28 disp('network for regression problems. The data is generated from a noisy')
29 disp('sine function.')
30 disp(' ')
31 disp('Press any key to continue.')
32 pause
33
34 % Set up network parameters.
35 nin = 1; % Number of inputs.
36 nhidden = 3; % Number of hidden units.
37 nout = 1; % Number of outputs.
38 alpha = 0.01; % Coefficient of weight-decay prior.
39
40 % Create and initialize network weight vector.
41
42 net = mlp(nin, nhidden, nout, 'linear', alpha);
43
44 % Set up vector of options for the optimiser.
45
46 options = zeros(1,18);
47 options(1) = 1; % This provides display of error values.
48 options(14) = 100; % Number of training cycles.
49
50 clc
51 disp(['The network has ', num2str(nhidden), ' hidden units and a weight decay'])
52 disp(['coefficient of ', num2str(alpha), '.'])
53 disp(' ')
54 disp('After initializing the network, we train it use the scaled conjugate')
55 disp('gradients algorithm for 100 cycles.')
56 disp(' ')
57 disp('Press any key to continue')
58 pause
59
60 % Train using scaled conjugate gradients.
61 [net, options] = netopt(net, options, x, t, 'scg');
62
63 disp(' ')
64 disp('Now we plot the data, underlying function, and network outputs')
65 disp('on a single graph to compare the results.')
66 disp(' ')
67 disp('Press any key to continue.')
68 pause
69
70 % Plot the data, the original function, and the trained network function.
71 plotvals = [0:0.01:1]';
72 y = mlpfwd(net, plotvals);
73 fh1 = figure;
74 plot(x, t, 'ob')
75 hold on
76 xlabel('Input')
77 ylabel('Target')
78 axis([0 1 -1.5 1.5])
79 [fx, fy] = fplot('sin(2*pi*x)', [0 1]);
80 plot(fx, fy, '-r', 'LineWidth', 2)
81 plot(plotvals, y, '-k', 'LineWidth', 2)
82 legend('data', 'function', 'network');
83
84 disp(' ')
85 disp('Press any key to end.')
86 pause
87 close(fh1);
88 clear all;