Mercurial > hg > camir-aes2014
comparison toolboxes/FullBNT-1.0.7/netlab3.3/demhmc2.m @ 0:e9a9cd732c1e tip
first hg version after svn
author | wolffd |
---|---|
date | Tue, 10 Feb 2015 15:05:51 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:e9a9cd732c1e |
---|---|
1 %DEMHMC2 Demonstrate Bayesian regression with Hybrid Monte Carlo sampling. | |
2 % | |
3 % Description | |
4 % The problem consists of one input variable X and one target variable | |
5 % T with data generated by sampling X at equal intervals and then | |
6 % generating target data by computing SIN(2*PI*X) and adding Gaussian | |
7 % noise. The model is a 2-layer network with linear outputs, and the | |
8 % hybrid Monte Carlo algorithm (without persistence) is used to sample | |
9 % from the posterior distribution of the weights. The graph shows the | |
10 % underlying function, 100 samples from the function given by the | |
11 % posterior distribution of the weights, and the average prediction | |
12 % (weighted by the posterior probabilities). | |
13 % | |
14 % See also | |
15 % DEMHMC3, HMC, MLP, MLPERR, MLPGRAD | |
16 % | |
17 | |
18 % Copyright (c) Ian T Nabney (1996-2001) | |
19 | |
20 | |
21 % Generate the matrix of inputs x and targets t. | |
22 ndata = 20; % Number of data points. | |
23 noise = 0.1; % Standard deviation of noise distribution. | |
24 nin = 1; % Number of inputs. | |
25 nout = 1; % Number of outputs. | |
26 | |
27 seed = 42; % Seed for random weight initialization. | |
28 randn('state', seed); | |
29 rand('state', seed); | |
30 | |
31 x = 0.25 + 0.1*randn(ndata, nin); | |
32 t = sin(2*pi*x) + noise*randn(size(x)); | |
33 | |
34 clc | |
35 disp('This demonstration illustrates the use of the hybrid Monte Carlo') | |
36 disp('algorithm to sample from the posterior weight distribution of a') | |
37 disp('multi-layer perceptron.') | |
38 disp(' ') | |
39 disp('A regression problem is used, with the one-dimensional data drawn') | |
40 disp('from a noisy sine function. The x values are sampled from a normal') | |
41 disp('distribution with mean 0.25 and variance 0.01.') | |
42 disp(' ') | |
43 disp('First we initialise the network.') | |
44 disp(' ') | |
45 disp('Press any key to continue.') | |
46 pause | |
47 | |
48 % Set up network parameters. | |
49 nhidden = 5; % Number of hidden units. | |
50 alpha = 0.001; % Coefficient of weight-decay prior. | |
51 beta = 100.0; % Coefficient of data error. | |
52 | |
53 % Create and initialize network model. | |
54 % Initialise weights reasonably close to 0 | |
55 net = mlp(nin, nhidden, nout, 'linear', alpha, beta); | |
56 net = mlpinit(net, 10); | |
57 | |
58 clc | |
59 disp('Next we take 100 samples from the posterior distribution. The first') | |
60 disp('200 samples at the start of the chain are omitted. As persistence') | |
61 disp('is not used, the momentum is randomised at each step. 100 iterations') | |
62 disp('are used at each step. The new state is accepted if the threshold') | |
63 disp('value is greater than a random number between 0 and 1.') | |
64 disp(' ') | |
65 disp('Negative step numbers indicate samples discarded from the start of the') | |
66 disp('chain.') | |
67 disp(' ') | |
68 disp('Press any key to continue.') | |
69 pause | |
70 % Set up vector of options for hybrid Monte Carlo. | |
71 nsamples = 100; % Number of retained samples. | |
72 | |
73 options = foptions; % Default options vector. | |
74 options(1) = 1; % Switch on diagnostics. | |
75 options(7) = 100; % Number of steps in trajectory. | |
76 options(14) = nsamples; % Number of Monte Carlo samples returned. | |
77 options(15) = 200; % Number of samples omitted at start of chain. | |
78 options(18) = 0.002; % Step size. | |
79 | |
80 w = mlppak(net); | |
81 % Initialise HMC | |
82 hmc('state', 42); | |
83 [samples, energies] = hmc('neterr', w, options, 'netgrad', net, x, t); | |
84 | |
85 clc | |
86 disp('The plot shows the underlying noise free function, the 100 samples') | |
87 disp('produced from the MLP, and their average as a Monte Carlo estimate') | |
88 disp('of the true posterior average.') | |
89 disp(' ') | |
90 disp('Press any key to continue.') | |
91 pause | |
92 nplot = 300; | |
93 plotvals = [0 : 1/(nplot - 1) : 1]'; | |
94 pred = zeros(size(plotvals)); | |
95 fh = figure; | |
96 for k = 1:nsamples | |
97 w2 = samples(k,:); | |
98 net2 = mlpunpak(net, w2); | |
99 y = mlpfwd(net2, plotvals); | |
100 % Average sample predictions as Monte Carlo estimate of true integral | |
101 pred = pred + y; | |
102 h4 = plot(plotvals, y, '-r', 'LineWidth', 1); | |
103 if k == 1 | |
104 hold on | |
105 end | |
106 end | |
107 pred = pred./nsamples; | |
108 | |
109 % Plot data | |
110 h1 = plot(x, t, 'ob', 'LineWidth', 2, 'MarkerFaceColor', 'blue'); | |
111 axis([0 1 -3 3]) | |
112 | |
113 % Plot function | |
114 [fx, fy] = fplot('sin(2*pi*x)', [0 1], '--g'); | |
115 h2 = plot(fx, fy, '--g', 'LineWidth', 2); | |
116 set(gca, 'box', 'on'); | |
117 | |
118 % Plot averaged prediction | |
119 h3 = plot(plotvals, pred, '-c', 'LineWidth', 2); | |
120 hold off | |
121 | |
122 lstrings = char('Data', 'Function', 'Prediction', 'Samples'); | |
123 legend([h1 h2 h3 h4], lstrings, 3); | |
124 | |
125 disp('Note how the predictions become much further from the true function') | |
126 disp('away from the region of high data density.') | |
127 disp(' ') | |
128 disp('Press any key to exit.') | |
129 pause | |
130 close(fh); | |
131 clear all; | |
132 |