Mercurial > hg > camir-aes2014
comparison toolboxes/FullBNT-1.0.7/bnt/examples/static/brainy.m @ 0:e9a9cd732c1e tip
first hg version after svn
author | wolffd |
---|---|
date | Tue, 10 Feb 2015 15:05:51 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:e9a9cd732c1e |
---|---|
1 % Example of explaining away from | |
2 % http://www.ai.mit.edu/~murphyk/Bayes/bnintro.html#explainaway | |
3 % | |
4 % Suppose you have to be brainy or smart to get into college. | |
5 % B S P(C=1) P(C=2) 1=false 2=true | |
6 % 1 1 1.0 0.0 | |
7 % 2 1 0.0 1.0 | |
8 % 1 2 0.0 1.0 | |
9 % 2 2 0.0 1.0 | |
10 % | |
11 % | |
12 % If we observe that you are in college, you must be either brainy or sporty or both. | |
13 % If we observre you are in college and sporty, it is less likely you are brainy, | |
14 % since brainy-ness and sporty-ness compete as causal explanations of the effect. | |
15 | |
16 % B S | |
17 % \/ | |
18 % C | |
19 | |
20 B = 1; S = 2; C = 3; | |
21 dag = zeros(3,3); | |
22 dag([B S], C)=1; | |
23 ns = 2*ones(1,3); | |
24 bnet = mk_bnet(dag, ns); | |
25 bnet.CPD{B} = tabular_CPD(bnet, B, 'CPT', [0.5 0.5]'); | |
26 bnet.CPD{S} = tabular_CPD(bnet, S, 'CPT', [0.5 0.5]'); | |
27 CPT = zeros(2,2,2); | |
28 CPT(1,1,:) = [1 0]; | |
29 CPT(2,1,:) = [0 1]; | |
30 CPT(1,2,:) = [0 1]; | |
31 CPT(2,2,:) = [0 1]; | |
32 bnet.CPD{C} = tabular_CPD(bnet, C, 'CPT', CPT); | |
33 | |
34 engine = jtree_inf_engine(bnet); | |
35 ev = cell(1,3); | |
36 ev{C} = 2; | |
37 engine = enter_evidence(engine, ev); | |
38 m = marginal_nodes(engine, B); | |
39 fprintf('P(B=true|C=true) = %5.3f\n', m.T(2)) % 0.67 | |
40 | |
41 ev{S} = 2; | |
42 engine = enter_evidence(engine, ev); | |
43 m = marginal_nodes(engine, B); | |
44 fprintf('P(B=true|C=true,S=true) = %5.3f\n', m.T(2)) % 0.5 = unconditional baseline P(B=true) |