annotate toolboxes/FullBNT-1.0.7/bnt/examples/static/mixexp3.m @ 0:cc4b1211e677 tip

initial commit to HG from Changeset: 646 (e263d8a21543) added further path and more save "camirversion.m"
author Daniel Wolff
date Fri, 19 Aug 2016 13:07:06 +0200
parents
children
rev   line source
Daniel@0 1 % Fit a piece-wise linear regression model.
Daniel@0 2 % Here is the model
Daniel@0 3 %
Daniel@0 4 % X \
Daniel@0 5 % | |
Daniel@0 6 % Q |
Daniel@0 7 % | /
Daniel@0 8 % Y
Daniel@0 9 %
Daniel@0 10 % where all arcs point down.
Daniel@0 11 % We condition everything on X, so X is a root node. Q is a softmax, and Y is a linear Gaussian.
Daniel@0 12 % Q is hidden, X and Y are observed.
Daniel@0 13
Daniel@0 14 X = 1;
Daniel@0 15 Q = 2;
Daniel@0 16 Y = 3;
Daniel@0 17 dag = zeros(3,3);
Daniel@0 18 dag(X,[Q Y]) = 1;
Daniel@0 19 dag(Q,Y) = 1;
Daniel@0 20 ns = [1 2 1]; % make X and Y scalars, and have 2 experts
Daniel@0 21 dnodes = [2];
Daniel@0 22 onodes = [1 3];
Daniel@0 23 bnet = mk_bnet(dag, ns, 'discrete', dnodes, 'observed', onodes);
Daniel@0 24
Daniel@0 25 IRLS_iter = 10;
Daniel@0 26 clamped = 0;
Daniel@0 27
Daniel@0 28 bnet.CPD{1} = root_CPD(bnet, 1);
Daniel@0 29
Daniel@0 30 % start with good initial params
Daniel@0 31 w = [-5 5]; % w(:,i) is the normal vector to the i'th decisions boundary
Daniel@0 32 b = [0 0]; % b(i) is the offset (bias) to the i'th decisions boundary
Daniel@0 33
Daniel@0 34 mu = [0 0];
Daniel@0 35 sigma = 1;
Daniel@0 36 Sigma = repmat(sigma*eye(ns(Y)), [ns(Y) ns(Y) ns(Q)]);
Daniel@0 37 W = [-1 1];
Daniel@0 38 W2 = reshape(W, [ns(Y) ns(X) ns(Q)]);
Daniel@0 39
Daniel@0 40 bnet.CPD{2} = softmax_CPD(bnet, 2, w, b, clamped, IRLS_iter);
Daniel@0 41 bnet.CPD{3} = gaussian_CPD(bnet, 3, 'mean', mu, 'cov', Sigma, 'weights', W2);
Daniel@0 42
Daniel@0 43
Daniel@0 44 engine = jtree_inf_engine(bnet);
Daniel@0 45
Daniel@0 46 evidence = cell(1,3);
Daniel@0 47 evidence{X} = 0.68;
Daniel@0 48
Daniel@0 49 engine = enter_evidence(engine, evidence);
Daniel@0 50
Daniel@0 51 m = marginal_nodes(engine, Y);
Daniel@0 52 m.mu