wolffd@0: % Make a linear dynamical system wolffd@0: % X1 -> X2 wolffd@0: % | | wolffd@0: % v v wolffd@0: % Y1 Y2 wolffd@0: wolffd@0: intra = zeros(2); wolffd@0: intra(1,2) = 1; wolffd@0: inter = zeros(2); wolffd@0: inter(1,1) = 1; wolffd@0: n = 2; wolffd@0: wolffd@0: X = 2; % size of hidden state wolffd@0: Y = 2; % size of observable state wolffd@0: wolffd@0: ns = [X Y]; wolffd@0: dnodes = []; wolffd@0: onodes = [2]; wolffd@0: eclass1 = [1 2]; wolffd@0: eclass2 = [3 2]; wolffd@0: bnet = mk_dbn(intra, inter, ns, 'discrete', dnodes, 'eclass1', eclass1, 'eclass2', eclass2, ... wolffd@0: 'observed', onodes); wolffd@0: wolffd@0: x0 = rand(X,1); wolffd@0: V0 = eye(X); wolffd@0: C0 = rand(Y,X); wolffd@0: R0 = eye(Y); wolffd@0: A0 = rand(X,X); wolffd@0: Q0 = eye(X); wolffd@0: wolffd@0: bnet.CPD{1} = gaussian_CPD(bnet, 1, 'mean', x0, 'cov', V0, 'cov_prior_weight', 0); wolffd@0: bnet.CPD{2} = gaussian_CPD(bnet, 2, 'mean', zeros(Y,1), 'cov', R0, 'weights', C0, ... wolffd@0: 'clamp_mean', 1, 'cov_prior_weight', 0); wolffd@0: bnet.CPD{3} = gaussian_CPD(bnet, 3, 'mean', zeros(X,1), 'cov', Q0, 'weights', A0, ... wolffd@0: 'clamp_mean', 1, 'cov_prior_weight', 0); wolffd@0: wolffd@0: wolffd@0: T = 5; % fixed length sequences wolffd@0: wolffd@0: clear engine; wolffd@0: engine{1} = kalman_inf_engine(bnet); wolffd@0: engine{2} = jtree_unrolled_dbn_inf_engine(bnet, T); wolffd@0: engine{3} = jtree_dbn_inf_engine(bnet); wolffd@0: N = length(engine); wolffd@0: wolffd@0: % inference wolffd@0: wolffd@0: ev = sample_dbn(bnet, T); wolffd@0: evidence = cell(n,T); wolffd@0: evidence(onodes,:) = ev(onodes, :); wolffd@0: wolffd@0: t = 1; wolffd@0: query = [1 3]; wolffd@0: m = cell(1, N); wolffd@0: ll = zeros(1, N); wolffd@0: for i=1:N wolffd@0: [engine{i}, ll(i)] = enter_evidence(engine{i}, evidence); wolffd@0: m{i} = marginal_nodes(engine{i}, query, t); wolffd@0: end wolffd@0: wolffd@0: % compare all engines to engine{1} wolffd@0: for i=2:N wolffd@0: assert(approxeq(m{1}.mu, m{i}.mu)); wolffd@0: assert(approxeq(m{1}.Sigma, m{i}.Sigma)); wolffd@0: assert(approxeq(ll(1), ll(i))); wolffd@0: end wolffd@0: wolffd@0: if 0 wolffd@0: for i=2:N wolffd@0: approxeq(m{1}.mu, m{i}.mu) wolffd@0: approxeq(m{1}.Sigma, m{i}.Sigma) wolffd@0: approxeq(ll(1), ll(i)) wolffd@0: end wolffd@0: end wolffd@0: wolffd@0: % learning wolffd@0: wolffd@0: ncases = 5; wolffd@0: cases = cell(1, ncases); wolffd@0: for i=1:ncases wolffd@0: ev = sample_dbn(bnet, T); wolffd@0: cases{i} = cell(n,T); wolffd@0: cases{i}(onodes,:) = ev(onodes, :); wolffd@0: end wolffd@0: wolffd@0: max_iter = 2; wolffd@0: bnet2 = cell(1,N); wolffd@0: LLtrace = cell(1,N); wolffd@0: for i=1:N wolffd@0: [bnet2{i}, LLtrace{i}] = learn_params_dbn_em(engine{i}, cases, 'max_iter', max_iter); wolffd@0: end wolffd@0: wolffd@0: for i=1:N wolffd@0: temp = bnet2{i}; wolffd@0: for e=1:3 wolffd@0: CPD{i,e} = struct(temp.CPD{e}); wolffd@0: end wolffd@0: end wolffd@0: wolffd@0: for i=2:N wolffd@0: assert(approxeq(LLtrace{i}, LLtrace{1})); wolffd@0: for e=1:3 wolffd@0: assert(approxeq(CPD{i,e}.mean, CPD{1,e}.mean)); wolffd@0: assert(approxeq(CPD{i,e}.cov, CPD{1,e}.cov)); wolffd@0: assert(approxeq(CPD{i,e}.weights, CPD{1,e}.weights)); wolffd@0: end wolffd@0: end wolffd@0: wolffd@0: wolffd@0: % Compare to KF toolbox wolffd@0: wolffd@0: data = zeros(Y, T, ncases); wolffd@0: for i=1:ncases wolffd@0: data(:,:,i) = cell2num(cases{i}(onodes, :)); wolffd@0: end wolffd@0: [A2, C2, Q2, R2, x2, V2, LL2trace] = learn_kalman(data, A0, C0, Q0, R0, x0, V0, max_iter); wolffd@0: wolffd@0: wolffd@0: e = 1; wolffd@0: assert(approxeq(x2, CPD{e,1}.mean)) wolffd@0: assert(approxeq(V2, CPD{e,1}.cov)) wolffd@0: assert(approxeq(C2, CPD{e,2}.weights)) wolffd@0: assert(approxeq(R2, CPD{e,2}.cov)); wolffd@0: assert(approxeq(A2, CPD{e,3}.weights)) wolffd@0: assert(approxeq(Q2, CPD{e,3}.cov)); wolffd@0: assert(approxeq(LL2trace, LLtrace{1})) wolffd@0: