wolffd@0: % Make an HMM with mixture of Gaussian observations wolffd@0: % Q1 ---> Q2 wolffd@0: % / | / | wolffd@0: % M1 | M2 | wolffd@0: % \ v \ v wolffd@0: % Y1 Y2 wolffd@0: % where Pr(m=j|q=i) is a multinomial and Pr(y|m,q) is a Gaussian wolffd@0: wolffd@0: %seed = 3; wolffd@0: %rand('state', seed); wolffd@0: %randn('state', seed); wolffd@0: wolffd@0: intra = zeros(3); wolffd@0: intra(1,[2 3]) = 1; wolffd@0: intra(2,3) = 1; wolffd@0: inter = zeros(3); wolffd@0: inter(1,1) = 1; wolffd@0: n = 3; wolffd@0: wolffd@0: Q = 2; % num hidden states wolffd@0: O = 2; % size of observed vector wolffd@0: M = 2; % num mixture components per state wolffd@0: wolffd@0: ns = [Q M O]; wolffd@0: dnodes = [1 2]; wolffd@0: onodes = [3]; wolffd@0: eclass1 = [1 2 3]; wolffd@0: eclass2 = [4 2 3]; wolffd@0: bnet = mk_dbn(intra, inter, ns, 'discrete', dnodes, 'eclass1', eclass1, 'eclass2', eclass2, ... wolffd@0: 'observed', onodes); wolffd@0: wolffd@0: prior0 = normalise(rand(Q,1)); wolffd@0: transmat0 = mk_stochastic(rand(Q,Q)); wolffd@0: mixmat0 = mk_stochastic(rand(Q,M)); wolffd@0: mu0 = rand(O,Q,M); wolffd@0: Sigma0 = repmat(eye(O), [1 1 Q M]); wolffd@0: bnet.CPD{1} = tabular_CPD(bnet, 1, prior0); wolffd@0: bnet.CPD{2} = tabular_CPD(bnet, 2, mixmat0); wolffd@0: %% we set the cov prior to 0 to give same results as HMM toolbox wolffd@0: %bnet.CPD{3} = gaussian_CPD(bnet, 3, 'mean', mu0, 'cov', Sigma0, 'cov_prior_weight', 0); wolffd@0: % new version of HMM toolbox uses the same default prior on Gaussians as BNT wolffd@0: bnet.CPD{3} = gaussian_CPD(bnet, 3, 'mean', mu0, 'cov', Sigma0); wolffd@0: bnet.CPD{4} = tabular_CPD(bnet, 4, transmat0); wolffd@0: wolffd@0: wolffd@0: wolffd@0: T = 5; % fixed length sequences wolffd@0: wolffd@0: engine = {}; wolffd@0: engine{end+1} = hmm_inf_engine(bnet); wolffd@0: engine{end+1} = smoother_engine(jtree_2TBN_inf_engine(bnet)); wolffd@0: engine{end+1} = smoother_engine(hmm_2TBN_inf_engine(bnet)); wolffd@0: if 0 wolffd@0: engine{end+1} = jtree_unrolled_dbn_inf_engine(bnet, T); wolffd@0: %engine{end+1} = frontier_inf_engine(bnet); wolffd@0: engine{end+1} = bk_inf_engine(bnet, 'clusters', 'exact'); wolffd@0: engine{end+1} = jtree_dbn_inf_engine(bnet); wolffd@0: end wolffd@0: wolffd@0: inf_time = cmp_inference_dbn(bnet, engine, T); wolffd@0: wolffd@0: ncases = 2; wolffd@0: max_iter = 2; wolffd@0: [learning_time, CPD, LL, cases] = cmp_learning_dbn(bnet, engine, T, 'ncases', ncases, 'max_iter', max_iter); wolffd@0: wolffd@0: % Compare to HMM toolbox wolffd@0: wolffd@0: data = zeros(O, T, ncases); wolffd@0: for i=1:ncases wolffd@0: data(:,:,i) = reshape(cell2num(cases{i}(onodes,:)), [O T]); wolffd@0: end wolffd@0: tic; wolffd@0: [LL2, prior2, transmat2, mu2, Sigma2, mixmat2] = ... wolffd@0: mhmm_em(data, prior0, transmat0, mu0, Sigma0, mixmat0, 'max_iter', max_iter); wolffd@0: t=toc; wolffd@0: disp(['HMM toolbox took ' num2str(t) ' seconds ']) wolffd@0: wolffd@0: for e = 1:length(engine) wolffd@0: assert(approxeq(prior2, CPD{e,1}.CPT)) wolffd@0: assert(approxeq(mixmat2, CPD{e,2}.CPT)) wolffd@0: assert(approxeq(mu2, CPD{e,3}.mean)) wolffd@0: assert(approxeq(Sigma2, CPD{e,3}.cov)) wolffd@0: assert(approxeq(transmat2, CPD{e,4}.CPT)) wolffd@0: assert(approxeq(LL2, LL{e})) wolffd@0: end