wolffd@0
|
1 % Make an HMM with mixture of Gaussian observations
|
wolffd@0
|
2 % Q1 ---> Q2
|
wolffd@0
|
3 % / | / |
|
wolffd@0
|
4 % M1 | M2 |
|
wolffd@0
|
5 % \ v \ v
|
wolffd@0
|
6 % Y1 Y2
|
wolffd@0
|
7 % where Pr(m=j|q=i) is a multinomial and Pr(y|m,q) is a Gaussian
|
wolffd@0
|
8
|
wolffd@0
|
9 %seed = 3;
|
wolffd@0
|
10 %rand('state', seed);
|
wolffd@0
|
11 %randn('state', seed);
|
wolffd@0
|
12
|
wolffd@0
|
13 intra = zeros(3);
|
wolffd@0
|
14 intra(1,[2 3]) = 1;
|
wolffd@0
|
15 intra(2,3) = 1;
|
wolffd@0
|
16 inter = zeros(3);
|
wolffd@0
|
17 inter(1,1) = 1;
|
wolffd@0
|
18 n = 3;
|
wolffd@0
|
19
|
wolffd@0
|
20 Q = 2; % num hidden states
|
wolffd@0
|
21 O = 2; % size of observed vector
|
wolffd@0
|
22 M = 2; % num mixture components per state
|
wolffd@0
|
23
|
wolffd@0
|
24 ns = [Q M O];
|
wolffd@0
|
25 dnodes = [1 2];
|
wolffd@0
|
26 onodes = [3];
|
wolffd@0
|
27 eclass1 = [1 2 3];
|
wolffd@0
|
28 eclass2 = [4 2 3];
|
wolffd@0
|
29 bnet = mk_dbn(intra, inter, ns, 'discrete', dnodes, 'eclass1', eclass1, 'eclass2', eclass2, ...
|
wolffd@0
|
30 'observed', onodes);
|
wolffd@0
|
31
|
wolffd@0
|
32 prior0 = normalise(rand(Q,1));
|
wolffd@0
|
33 transmat0 = mk_stochastic(rand(Q,Q));
|
wolffd@0
|
34 mixmat0 = mk_stochastic(rand(Q,M));
|
wolffd@0
|
35 mu0 = rand(O,Q,M);
|
wolffd@0
|
36 Sigma0 = repmat(eye(O), [1 1 Q M]);
|
wolffd@0
|
37 bnet.CPD{1} = tabular_CPD(bnet, 1, prior0);
|
wolffd@0
|
38 bnet.CPD{2} = tabular_CPD(bnet, 2, mixmat0);
|
wolffd@0
|
39 %% we set the cov prior to 0 to give same results as HMM toolbox
|
wolffd@0
|
40 %bnet.CPD{3} = gaussian_CPD(bnet, 3, 'mean', mu0, 'cov', Sigma0, 'cov_prior_weight', 0);
|
wolffd@0
|
41 % new version of HMM toolbox uses the same default prior on Gaussians as BNT
|
wolffd@0
|
42 bnet.CPD{3} = gaussian_CPD(bnet, 3, 'mean', mu0, 'cov', Sigma0);
|
wolffd@0
|
43 bnet.CPD{4} = tabular_CPD(bnet, 4, transmat0);
|
wolffd@0
|
44
|
wolffd@0
|
45
|
wolffd@0
|
46
|
wolffd@0
|
47 T = 5; % fixed length sequences
|
wolffd@0
|
48
|
wolffd@0
|
49 engine = {};
|
wolffd@0
|
50 engine{end+1} = hmm_inf_engine(bnet);
|
wolffd@0
|
51 engine{end+1} = smoother_engine(jtree_2TBN_inf_engine(bnet));
|
wolffd@0
|
52 engine{end+1} = smoother_engine(hmm_2TBN_inf_engine(bnet));
|
wolffd@0
|
53 if 0
|
wolffd@0
|
54 engine{end+1} = jtree_unrolled_dbn_inf_engine(bnet, T);
|
wolffd@0
|
55 %engine{end+1} = frontier_inf_engine(bnet);
|
wolffd@0
|
56 engine{end+1} = bk_inf_engine(bnet, 'clusters', 'exact');
|
wolffd@0
|
57 engine{end+1} = jtree_dbn_inf_engine(bnet);
|
wolffd@0
|
58 end
|
wolffd@0
|
59
|
wolffd@0
|
60 inf_time = cmp_inference_dbn(bnet, engine, T);
|
wolffd@0
|
61
|
wolffd@0
|
62 ncases = 2;
|
wolffd@0
|
63 max_iter = 2;
|
wolffd@0
|
64 [learning_time, CPD, LL, cases] = cmp_learning_dbn(bnet, engine, T, 'ncases', ncases, 'max_iter', max_iter);
|
wolffd@0
|
65
|
wolffd@0
|
66 % Compare to HMM toolbox
|
wolffd@0
|
67
|
wolffd@0
|
68 data = zeros(O, T, ncases);
|
wolffd@0
|
69 for i=1:ncases
|
wolffd@0
|
70 data(:,:,i) = reshape(cell2num(cases{i}(onodes,:)), [O T]);
|
wolffd@0
|
71 end
|
wolffd@0
|
72 tic;
|
wolffd@0
|
73 [LL2, prior2, transmat2, mu2, Sigma2, mixmat2] = ...
|
wolffd@0
|
74 mhmm_em(data, prior0, transmat0, mu0, Sigma0, mixmat0, 'max_iter', max_iter);
|
wolffd@0
|
75 t=toc;
|
wolffd@0
|
76 disp(['HMM toolbox took ' num2str(t) ' seconds '])
|
wolffd@0
|
77
|
wolffd@0
|
78 for e = 1:length(engine)
|
wolffd@0
|
79 assert(approxeq(prior2, CPD{e,1}.CPT))
|
wolffd@0
|
80 assert(approxeq(mixmat2, CPD{e,2}.CPT))
|
wolffd@0
|
81 assert(approxeq(mu2, CPD{e,3}.mean))
|
wolffd@0
|
82 assert(approxeq(Sigma2, CPD{e,3}.cov))
|
wolffd@0
|
83 assert(approxeq(transmat2, CPD{e,4}.CPT))
|
wolffd@0
|
84 assert(approxeq(LL2, LL{e}))
|
wolffd@0
|
85 end
|