wolffd@0: % Check sampling on a mixture of experts model wolffd@0: % wolffd@0: % X \ wolffd@0: % | | wolffd@0: % Q | wolffd@0: % | / wolffd@0: % Y wolffd@0: % wolffd@0: % where all arcs point down. wolffd@0: % We condition everything on X, so X is a root node. Q is a softmax, and Y is a linear Gaussian. wolffd@0: % Q is hidden, X and Y are observed. wolffd@0: wolffd@0: X = 1; wolffd@0: Q = 2; wolffd@0: Y = 3; wolffd@0: dag = zeros(3,3); wolffd@0: dag(X,[Q Y]) = 1; wolffd@0: dag(Q,Y) = 1; wolffd@0: ns = [1 2 2]; wolffd@0: dnodes = [2]; wolffd@0: bnet = mk_bnet(dag, ns, dnodes); wolffd@0: wolffd@0: x = 0.5; wolffd@0: bnet.CPD{1} = root_CPD(bnet, 1, x); wolffd@0: bnet.CPD{2} = softmax_CPD(bnet, 2); wolffd@0: bnet.CPD{3} = gaussian_CPD(bnet, 3); wolffd@0: wolffd@0: data_case = sample_bnet(bnet, 'evidence', {0.8, [], []}) wolffd@0: ll = log_lik_complete(bnet, data_case) wolffd@0: wolffd@0: data_case = sample_bnet(bnet, 'evidence', {-11, [], []}) wolffd@0: ll = log_lik_complete(bnet, data_case) wolffd@0: wolffd@0: