Mercurial > hg > camir-aes2014
comparison toolboxes/FullBNT-1.0.7/bnt/examples/static/mixexp1.m @ 0:e9a9cd732c1e tip
first hg version after svn
author | wolffd |
---|---|
date | Tue, 10 Feb 2015 15:05:51 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:e9a9cd732c1e |
---|---|
1 % Fit a piece-wise linear regression model. | |
2 % Here is the model | |
3 % | |
4 % X \ | |
5 % | | | |
6 % Q | | |
7 % | / | |
8 % Y | |
9 % | |
10 % where all arcs point down. | |
11 % We condition everything on X, so X is a root node. Q is a softmax, and Y is a linear Gaussian. | |
12 % Q is hidden, X and Y are observed. | |
13 | |
14 X = 1; | |
15 Q = 2; | |
16 Y = 3; | |
17 dag = zeros(3,3); | |
18 dag(X,[Q Y]) = 1; | |
19 dag(Q,Y) = 1; | |
20 ns = [1 2 1]; % make X and Y scalars, and have 2 experts | |
21 dnodes = [2]; | |
22 onodes = [1 3]; | |
23 bnet = mk_bnet(dag, ns, 'discrete', dnodes, 'observed', onodes); | |
24 | |
25 | |
26 w = [-5 5]; % w(:,i) is the normal vector to the i'th decisions boundary | |
27 b = [0 0]; % b(i) is the offset (bias) to the i'th decisions boundary | |
28 | |
29 mu = [0 0]; | |
30 sigma = 1; | |
31 Sigma = repmat(sigma*eye(ns(Y)), [ns(Y) ns(Y) ns(Q)]); | |
32 W = [-1 1]; | |
33 W2 = reshape(W, [ns(Y) ns(X) ns(Q)]); | |
34 | |
35 bnet.CPD{1} = root_CPD(bnet, 1); | |
36 bnet.CPD{2} = softmax_CPD(bnet, 2, w, b); | |
37 bnet.CPD{3} = gaussian_CPD(bnet, 3, 'mean', mu, 'cov', Sigma, 'weights', W2); | |
38 | |
39 | |
40 | |
41 % Check inference | |
42 | |
43 x = 0.1; | |
44 ystar = 1; | |
45 | |
46 engine = jtree_inf_engine(bnet); | |
47 [engine, loglik] = enter_evidence(engine, {x, [], ystar}); | |
48 Qpost = marginal_nodes(engine, 2); | |
49 | |
50 % eta(i,:) = softmax (gating) params for expert i | |
51 eta = [b' w']; | |
52 | |
53 % theta(i,:) = regression vector for expert i | |
54 theta = [mu' W']; | |
55 | |
56 % yhat(i) = E[y | Q=i, x] = prediction of i'th expert | |
57 x1 = [1 x]'; | |
58 yhat = theta * x1; | |
59 | |
60 % gate_prior(i,:) = Pr(Q=i | x) | |
61 gate_prior = normalise(exp(eta * x1)); | |
62 | |
63 % cond_lik(i) = Pr(y | Q=i, x) | |
64 cond_lik = (1/(sqrt(2*pi)*sigma)) * exp(-(0.5/sigma^2) * ((ystar - yhat) .* (ystar - yhat))); | |
65 | |
66 % gate_posterior(i,:) = Pr(Q=i | x, y) | |
67 [gate_posterior, lik] = normalise(gate_prior .* cond_lik); | |
68 | |
69 assert(approxeq(gate_posterior(:), Qpost.T(:))); | |
70 assert(approxeq(log(lik), loglik)); | |
71 | |
72 |