annotate toolboxes/FullBNT-1.0.7/bnt/examples/dynamic/kalman1.m @ 0:cc4b1211e677 tip

initial commit to HG from Changeset: 646 (e263d8a21543) added further path and more save "camirversion.m"
author Daniel Wolff
date Fri, 19 Aug 2016 13:07:06 +0200
parents
children
rev   line source
Daniel@0 1 % Make a linear dynamical system
Daniel@0 2 % X1 -> X2
Daniel@0 3 % | |
Daniel@0 4 % v v
Daniel@0 5 % Y1 Y2
Daniel@0 6
Daniel@0 7 intra = zeros(2);
Daniel@0 8 intra(1,2) = 1;
Daniel@0 9 inter = zeros(2);
Daniel@0 10 inter(1,1) = 1;
Daniel@0 11 n = 2;
Daniel@0 12
Daniel@0 13 X = 2; % size of hidden state
Daniel@0 14 Y = 2; % size of observable state
Daniel@0 15 ns = [X Y];
Daniel@0 16 bnet = mk_dbn(intra, inter, ns, 'discrete', [], 'observed', 2);
Daniel@0 17
Daniel@0 18 x0 = rand(X,1);
Daniel@0 19 V0 = eye(X);
Daniel@0 20 C0 = rand(Y,X);
Daniel@0 21 R0 = eye(Y);
Daniel@0 22 A0 = rand(X,X);
Daniel@0 23 Q0 = eye(X);
Daniel@0 24
Daniel@0 25 bnet.CPD{1} = gaussian_CPD(bnet, 1, 'mean', x0, 'cov', V0, 'cov_prior_weight', 0);
Daniel@0 26 bnet.CPD{2} = gaussian_CPD(bnet, 2, 'mean', zeros(Y,1), 'cov', R0, 'weights', C0, ...
Daniel@0 27 'clamp_mean', 1, 'cov_prior_weight', 0);
Daniel@0 28 bnet.CPD{3} = gaussian_CPD(bnet, 3, 'mean', zeros(X,1), 'cov', Q0, 'weights', A0, ...
Daniel@0 29 'clamp_mean', 1, 'cov_prior_weight', 0);
Daniel@0 30
Daniel@0 31
Daniel@0 32 T = 5; % fixed length sequences
Daniel@0 33
Daniel@0 34 clear engine;
Daniel@0 35 engine{1} = kalman_inf_engine(bnet);
Daniel@0 36 engine{2} = jtree_unrolled_dbn_inf_engine(bnet, T);
Daniel@0 37 engine{3} = jtree_dbn_inf_engine(bnet);
Daniel@0 38 engine{end+1} = smoother_engine(jtree_2TBN_inf_engine(bnet));
Daniel@0 39 N = length(engine);
Daniel@0 40
Daniel@0 41
Daniel@0 42 inf_time = cmp_inference_dbn(bnet, engine, T);
Daniel@0 43
Daniel@0 44 ncases = 2;
Daniel@0 45 max_iter = 2;
Daniel@0 46 [learning_time, CPD, LL, cases] = cmp_learning_dbn(bnet, engine, T, 'ncases', ncases, 'max_iter', max_iter);
Daniel@0 47
Daniel@0 48
Daniel@0 49 % Compare to KF toolbox
Daniel@0 50
Daniel@0 51 data = zeros(Y, T, ncases);
Daniel@0 52 for i=1:ncases
Daniel@0 53 data(:,:,i) = cell2num(cases{i}(onodes, :));
Daniel@0 54 end
Daniel@0 55 [A2, C2, Q2, R2, x2, V2, LL2trace] = learn_kalman(data, A0, C0, Q0, R0, x0, V0, max_iter);
Daniel@0 56
Daniel@0 57
Daniel@0 58 e = 1;
Daniel@0 59 assert(approxeq(x2, CPD{e,1}.mean))
Daniel@0 60 assert(approxeq(V2, CPD{e,1}.cov))
Daniel@0 61 assert(approxeq(C2, CPD{e,2}.weights))
Daniel@0 62 assert(approxeq(R2, CPD{e,2}.cov));
Daniel@0 63 assert(approxeq(A2, CPD{e,3}.weights))
Daniel@0 64 assert(approxeq(Q2, CPD{e,3}.cov));
Daniel@0 65 assert(approxeq(LL2trace, LL{1}))
Daniel@0 66