wolffd@0: %DEMHMC2 Demonstrate Bayesian regression with Hybrid Monte Carlo sampling. wolffd@0: % wolffd@0: % Description wolffd@0: % The problem consists of one input variable X and one target variable wolffd@0: % T with data generated by sampling X at equal intervals and then wolffd@0: % generating target data by computing SIN(2*PI*X) and adding Gaussian wolffd@0: % noise. The model is a 2-layer network with linear outputs, and the wolffd@0: % hybrid Monte Carlo algorithm (without persistence) is used to sample wolffd@0: % from the posterior distribution of the weights. The graph shows the wolffd@0: % underlying function, 100 samples from the function given by the wolffd@0: % posterior distribution of the weights, and the average prediction wolffd@0: % (weighted by the posterior probabilities). wolffd@0: % wolffd@0: % See also wolffd@0: % DEMHMC3, HMC, MLP, MLPERR, MLPGRAD wolffd@0: % wolffd@0: wolffd@0: % Copyright (c) Ian T Nabney (1996-2001) wolffd@0: wolffd@0: wolffd@0: % Generate the matrix of inputs x and targets t. wolffd@0: ndata = 20; % Number of data points. wolffd@0: noise = 0.1; % Standard deviation of noise distribution. wolffd@0: nin = 1; % Number of inputs. wolffd@0: nout = 1; % Number of outputs. wolffd@0: wolffd@0: seed = 42; % Seed for random weight initialization. wolffd@0: randn('state', seed); wolffd@0: rand('state', seed); wolffd@0: wolffd@0: x = 0.25 + 0.1*randn(ndata, nin); wolffd@0: t = sin(2*pi*x) + noise*randn(size(x)); wolffd@0: wolffd@0: clc wolffd@0: disp('This demonstration illustrates the use of the hybrid Monte Carlo') wolffd@0: disp('algorithm to sample from the posterior weight distribution of a') wolffd@0: disp('multi-layer perceptron.') wolffd@0: disp(' ') wolffd@0: disp('A regression problem is used, with the one-dimensional data drawn') wolffd@0: disp('from a noisy sine function. The x values are sampled from a normal') wolffd@0: disp('distribution with mean 0.25 and variance 0.01.') wolffd@0: disp(' ') wolffd@0: disp('First we initialise the network.') wolffd@0: disp(' ') wolffd@0: disp('Press any key to continue.') wolffd@0: pause wolffd@0: wolffd@0: % Set up network parameters. wolffd@0: nhidden = 5; % Number of hidden units. wolffd@0: alpha = 0.001; % Coefficient of weight-decay prior. wolffd@0: beta = 100.0; % Coefficient of data error. wolffd@0: wolffd@0: % Create and initialize network model. wolffd@0: % Initialise weights reasonably close to 0 wolffd@0: net = mlp(nin, nhidden, nout, 'linear', alpha, beta); wolffd@0: net = mlpinit(net, 10); wolffd@0: wolffd@0: clc wolffd@0: disp('Next we take 100 samples from the posterior distribution. The first') wolffd@0: disp('200 samples at the start of the chain are omitted. As persistence') wolffd@0: disp('is not used, the momentum is randomised at each step. 100 iterations') wolffd@0: disp('are used at each step. The new state is accepted if the threshold') wolffd@0: disp('value is greater than a random number between 0 and 1.') wolffd@0: disp(' ') wolffd@0: disp('Negative step numbers indicate samples discarded from the start of the') wolffd@0: disp('chain.') wolffd@0: disp(' ') wolffd@0: disp('Press any key to continue.') wolffd@0: pause wolffd@0: % Set up vector of options for hybrid Monte Carlo. wolffd@0: nsamples = 100; % Number of retained samples. wolffd@0: wolffd@0: options = foptions; % Default options vector. wolffd@0: options(1) = 1; % Switch on diagnostics. wolffd@0: options(7) = 100; % Number of steps in trajectory. wolffd@0: options(14) = nsamples; % Number of Monte Carlo samples returned. wolffd@0: options(15) = 200; % Number of samples omitted at start of chain. wolffd@0: options(18) = 0.002; % Step size. wolffd@0: wolffd@0: w = mlppak(net); wolffd@0: % Initialise HMC wolffd@0: hmc('state', 42); wolffd@0: [samples, energies] = hmc('neterr', w, options, 'netgrad', net, x, t); wolffd@0: wolffd@0: clc wolffd@0: disp('The plot shows the underlying noise free function, the 100 samples') wolffd@0: disp('produced from the MLP, and their average as a Monte Carlo estimate') wolffd@0: disp('of the true posterior average.') wolffd@0: disp(' ') wolffd@0: disp('Press any key to continue.') wolffd@0: pause wolffd@0: nplot = 300; wolffd@0: plotvals = [0 : 1/(nplot - 1) : 1]'; wolffd@0: pred = zeros(size(plotvals)); wolffd@0: fh = figure; wolffd@0: for k = 1:nsamples wolffd@0: w2 = samples(k,:); wolffd@0: net2 = mlpunpak(net, w2); wolffd@0: y = mlpfwd(net2, plotvals); wolffd@0: % Average sample predictions as Monte Carlo estimate of true integral wolffd@0: pred = pred + y; wolffd@0: h4 = plot(plotvals, y, '-r', 'LineWidth', 1); wolffd@0: if k == 1 wolffd@0: hold on wolffd@0: end wolffd@0: end wolffd@0: pred = pred./nsamples; wolffd@0: wolffd@0: % Plot data wolffd@0: h1 = plot(x, t, 'ob', 'LineWidth', 2, 'MarkerFaceColor', 'blue'); wolffd@0: axis([0 1 -3 3]) wolffd@0: wolffd@0: % Plot function wolffd@0: [fx, fy] = fplot('sin(2*pi*x)', [0 1], '--g'); wolffd@0: h2 = plot(fx, fy, '--g', 'LineWidth', 2); wolffd@0: set(gca, 'box', 'on'); wolffd@0: wolffd@0: % Plot averaged prediction wolffd@0: h3 = plot(plotvals, pred, '-c', 'LineWidth', 2); wolffd@0: hold off wolffd@0: wolffd@0: lstrings = char('Data', 'Function', 'Prediction', 'Samples'); wolffd@0: legend([h1 h2 h3 h4], lstrings, 3); wolffd@0: wolffd@0: disp('Note how the predictions become much further from the true function') wolffd@0: disp('away from the region of high data density.') wolffd@0: disp(' ') wolffd@0: disp('Press any key to exit.') wolffd@0: pause wolffd@0: close(fh); wolffd@0: clear all; wolffd@0: