comparison toolboxes/FullBNT-1.0.7/bnt/examples/dynamic/HHMM/Square/mk_square_hhmm.m @ 0:e9a9cd732c1e tip

first hg version after svn
author wolffd
date Tue, 10 Feb 2015 15:05:51 +0000
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:e9a9cd732c1e
1 function bnet = mk_square_hhmm(discrete_obs, true_params, topright)
2
3 % Make a 3 level HHMM described by the following grammar
4 %
5 % Square -> CLK | CCK % clockwise or counterclockwise
6 % CLK -> LR UD RL DU start on top left (1 2 3 4)
7 % CCK -> RL UD LR DU if start at top right (3 2 1 4)
8 % CCK -> UD LR DU RL if start at top left (2 1 4 3)
9 %
10 % LR = left-right, UD = up-down, RL = right-left, DU = down-up
11 % LR, UD, RL, DU are sub HMMs.
12 %
13 % For discrete observations, the subHMMs are 2-state left-right.
14 % LR emits L then l, etc.
15 %
16 % For cts observations, the subHMMs are 1 state.
17 % LR emits a vector in the -> direction, with a little noise.
18 % Since there is no constraint that we remain in the LR state as long as the RL state,
19 % the sides of the square might have different lengths,
20 % so the result is not really a square!
21 %
22 % If true_params = 0, we use random parameters at the top 2 levels
23 % (ready for learning). At the bottom level, we use noisy versions
24 % of the "true" observations.
25 %
26 % If topright=1, counter-clockwise starts at top right, not top left
27 % This example was inspired by Ivanov and Bobick.
28
29 if nargin < 3, topright = 1; end
30
31 if 1 % discrete_obs
32 Qsizes = [2 4 2];
33 else
34 Qsizes = [2 4 1];
35 end
36
37 D = 3;
38 Qnodes = 1:D;
39 startprob = cell(1,D);
40 transprob = cell(1,D);
41 termprob = cell(1,D);
42
43 % LEVEL 1
44
45 startprob{1} = 'unif';
46 transprob{1} = 'unif';
47
48 % LEVEL 2
49
50 if true_params
51 startprob{2} = zeros(2, 4);
52 startprob{2}(1, :) = [1 0 0 0];
53 if topright
54 startprob{2}(2, :) = [0 0 1 0];
55 else
56 startprob{2}(2, :) = [0 1 0 0];
57 end
58
59 transprob{2} = zeros(4, 2, 4);
60
61 transprob{2}(:,1,:) = [0 1 0 0
62 0 0 1 0
63 0 0 0 1
64 0 0 0 1]; % 4->e
65 if topright
66 transprob{2}(:,2,:) = [0 0 0 1
67 1 0 0 0
68 0 1 0 0
69 0 0 0 1]; % 4->e
70 else
71 transprob{2}(:,2,:) = [0 0 0 1
72 1 0 0 0
73 0 0 1 0 % 3->e
74 0 0 1 0];
75 end
76
77 %termprob{2} = 'rightstop';
78 termprob{2} = zeros(2,4);
79 pfin = 0.8;
80 termprob{2}(1,:) = [0 0 0 pfin]; % finish in state 4 (DU)
81 if topright
82 termprob{2}(2,:) = [0 0 0 pfin];
83 else
84 termprob{2}(2,:) = [0 0 pfin 0]; % finish in state 3 (RL)
85 end
86 else
87 % In the unsupervised case, it is essential that we break symmetry
88 % in the initial param estimates.
89 %startprob{2} = 'unif';
90 %transprob{2} = 'unif';
91 %termprob{2} = 'unif';
92 startprob{2} = 'rnd';
93 transprob{2} = 'rnd';
94 termprob{2} = 'rnd';
95 end
96
97 % LEVEL 3
98
99 if 1 | true_params
100 startprob{3} = 'leftstart';
101 transprob{3} = 'leftright';
102 termprob{3} = 'rightstop';
103 else
104 % If we want to be able to run a base-level model backwards...
105 startprob{3} = 'rnd';
106 transprob{3} = 'rnd';
107 termprob{3} = 'rnd';
108 end
109
110
111 % OBS LEVEl
112
113 if discrete_obs
114 % Initialise observations of lowest level primitives in a way which we can interpret
115 chars = ['L', 'l', 'U', 'u', 'R', 'r', 'D', 'd'];
116 L=find(chars=='L'); l=find(chars=='l');
117 U=find(chars=='U'); u=find(chars=='u');
118 R=find(chars=='R'); r=find(chars=='r');
119 D=find(chars=='D'); d=find(chars=='d');
120 Osize = length(chars);
121
122 if true_params
123 p = 1; % makes each state fully observed
124 else
125 p = 0.9;
126 end
127
128 obsprob = (1-p)*ones([4 2 Osize]);
129 % Q2 Q3 O
130 obsprob(1, 1, L) = p;
131 obsprob(1, 2, l) = p;
132 obsprob(2, 1, U) = p;
133 obsprob(2, 2, u) = p;
134 obsprob(3, 1, R) = p;
135 obsprob(3, 2, r) = p;
136 obsprob(4, 1, D) = p;
137 obsprob(4, 2, d) = p;
138 obsprob = mk_stochastic(obsprob);
139 Oargs = {'CPT', obsprob};
140 else
141 % Initialise means of lowest level primitives in a way which we can interpret
142 % These means are little vectors in the east, south, west, north directions.
143 % (left-right=east, up-down=south, right-left=west, down-up=north)
144 Osize = 2;
145 mu = zeros(2, Qsizes(2), Qsizes(3));
146 scale = 3;
147 if true_params
148 noise = 0;
149 else
150 noise = 0.5*scale;
151 end
152 for q3=1:Qsizes(3)
153 mu(:, 1, q3) = scale*[1;0] + noise*rand(2,1);
154 end
155 for q3=1:Qsizes(3)
156 mu(:, 2, q3) = scale*[0;-1] + noise*rand(2,1);
157 end
158 for q3=1:Qsizes(3)
159 mu(:, 3, q3) = scale*[-1;0] + noise*rand(2,1);
160 end
161 for q3=1:Qsizes(3)
162 mu(:, 4, q3) = scale*[0;1] + noise*rand(2,1);
163 end
164 Sigma = repmat(reshape(scale*eye(2), [2 2 1 1 ]), [1 1 Qsizes(2) Qsizes(3)]);
165 Oargs = {'mean', mu, 'cov', Sigma, 'cov_type', 'diag'};
166 end
167
168 if discrete_obs
169 selfprob = 0.5;
170 else
171 selfprob = 0.95;
172 % If less than this, it won't look like a square
173 % because it doesn't spend enough time in each state
174 % Unfortunately, the variance on durations (lengths of each side)
175 % is very large
176 end
177 bnet = mk_hhmm('Qsizes', Qsizes, 'Osize', Osize', 'discrete_obs', discrete_obs, ...
178 'Oargs', Oargs, 'Ops', Qnodes(2:3), 'selfprob', selfprob, ...
179 'startprob', startprob, 'transprob', transprob, 'termprob', termprob);
180