Mercurial > hg > camir-aes2014
comparison toolboxes/FullBNT-1.0.7/bnt/examples/dynamic/mhmm1.m @ 0:e9a9cd732c1e tip
first hg version after svn
author | wolffd |
---|---|
date | Tue, 10 Feb 2015 15:05:51 +0000 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:e9a9cd732c1e |
---|---|
1 % Make an HMM with mixture of Gaussian observations | |
2 % Q1 ---> Q2 | |
3 % / | / | | |
4 % M1 | M2 | | |
5 % \ v \ v | |
6 % Y1 Y2 | |
7 % where Pr(m=j|q=i) is a multinomial and Pr(y|m,q) is a Gaussian | |
8 | |
9 %seed = 3; | |
10 %rand('state', seed); | |
11 %randn('state', seed); | |
12 | |
13 intra = zeros(3); | |
14 intra(1,[2 3]) = 1; | |
15 intra(2,3) = 1; | |
16 inter = zeros(3); | |
17 inter(1,1) = 1; | |
18 n = 3; | |
19 | |
20 Q = 2; % num hidden states | |
21 O = 2; % size of observed vector | |
22 M = 2; % num mixture components per state | |
23 | |
24 ns = [Q M O]; | |
25 dnodes = [1 2]; | |
26 onodes = [3]; | |
27 eclass1 = [1 2 3]; | |
28 eclass2 = [4 2 3]; | |
29 bnet = mk_dbn(intra, inter, ns, 'discrete', dnodes, 'eclass1', eclass1, 'eclass2', eclass2, ... | |
30 'observed', onodes); | |
31 | |
32 prior0 = normalise(rand(Q,1)); | |
33 transmat0 = mk_stochastic(rand(Q,Q)); | |
34 mixmat0 = mk_stochastic(rand(Q,M)); | |
35 mu0 = rand(O,Q,M); | |
36 Sigma0 = repmat(eye(O), [1 1 Q M]); | |
37 bnet.CPD{1} = tabular_CPD(bnet, 1, prior0); | |
38 bnet.CPD{2} = tabular_CPD(bnet, 2, mixmat0); | |
39 %% we set the cov prior to 0 to give same results as HMM toolbox | |
40 %bnet.CPD{3} = gaussian_CPD(bnet, 3, 'mean', mu0, 'cov', Sigma0, 'cov_prior_weight', 0); | |
41 % new version of HMM toolbox uses the same default prior on Gaussians as BNT | |
42 bnet.CPD{3} = gaussian_CPD(bnet, 3, 'mean', mu0, 'cov', Sigma0); | |
43 bnet.CPD{4} = tabular_CPD(bnet, 4, transmat0); | |
44 | |
45 | |
46 | |
47 T = 5; % fixed length sequences | |
48 | |
49 engine = {}; | |
50 engine{end+1} = hmm_inf_engine(bnet); | |
51 engine{end+1} = smoother_engine(jtree_2TBN_inf_engine(bnet)); | |
52 engine{end+1} = smoother_engine(hmm_2TBN_inf_engine(bnet)); | |
53 if 0 | |
54 engine{end+1} = jtree_unrolled_dbn_inf_engine(bnet, T); | |
55 %engine{end+1} = frontier_inf_engine(bnet); | |
56 engine{end+1} = bk_inf_engine(bnet, 'clusters', 'exact'); | |
57 engine{end+1} = jtree_dbn_inf_engine(bnet); | |
58 end | |
59 | |
60 inf_time = cmp_inference_dbn(bnet, engine, T); | |
61 | |
62 ncases = 2; | |
63 max_iter = 2; | |
64 [learning_time, CPD, LL, cases] = cmp_learning_dbn(bnet, engine, T, 'ncases', ncases, 'max_iter', max_iter); | |
65 | |
66 % Compare to HMM toolbox | |
67 | |
68 data = zeros(O, T, ncases); | |
69 for i=1:ncases | |
70 data(:,:,i) = reshape(cell2num(cases{i}(onodes,:)), [O T]); | |
71 end | |
72 tic; | |
73 [LL2, prior2, transmat2, mu2, Sigma2, mixmat2] = ... | |
74 mhmm_em(data, prior0, transmat0, mu0, Sigma0, mixmat0, 'max_iter', max_iter); | |
75 t=toc; | |
76 disp(['HMM toolbox took ' num2str(t) ' seconds ']) | |
77 | |
78 for e = 1:length(engine) | |
79 assert(approxeq(prior2, CPD{e,1}.CPT)) | |
80 assert(approxeq(mixmat2, CPD{e,2}.CPT)) | |
81 assert(approxeq(mu2, CPD{e,3}.mean)) | |
82 assert(approxeq(Sigma2, CPD{e,3}.cov)) | |
83 assert(approxeq(transmat2, CPD{e,4}.CPT)) | |
84 assert(approxeq(LL2, LL{e})) | |
85 end |