| wolffd@0 | 1 seed = 0; | 
| wolffd@0 | 2 rand('state', seed); | 
| wolffd@0 | 3 randn('state', seed); | 
| wolffd@0 | 4 | 
| wolffd@0 | 5 nrows = 5; | 
| wolffd@0 | 6 ncols = 5; | 
| wolffd@0 | 7 npixels = nrows*ncols; | 
| wolffd@0 | 8 | 
| wolffd@0 | 9 % we number pixels in transposed raster scan order (top to bottom, left to right) | 
| wolffd@0 | 10 | 
| wolffd@0 | 11 % H(i,j) is the number of the hidden node at (i,j) | 
| wolffd@0 | 12 H = reshape(1:npixels, nrows, ncols); | 
| wolffd@0 | 13 % O(i,j) is the number of the obsevred node at (i,j) | 
| wolffd@0 | 14 O = reshape(1:npixels, nrows, ncols) + length(H(:)); | 
| wolffd@0 | 15 | 
| wolffd@0 | 16 | 
| wolffd@0 | 17 % Make a Bayes net where each hidden pixel generates an observed pixel | 
| wolffd@0 | 18 % but there are no connections between the hidden pixels. | 
| wolffd@0 | 19 % We use this just to generate noisy versions of known images. | 
| wolffd@0 | 20 N = 2*npixels; | 
| wolffd@0 | 21 dag = zeros(N); | 
| wolffd@0 | 22 for i=1:nrows | 
| wolffd@0 | 23   for j=1:ncols | 
| wolffd@0 | 24     dag(H(i,j), O(i,j)) = 1; | 
| wolffd@0 | 25   end | 
| wolffd@0 | 26 end | 
| wolffd@0 | 27 | 
| wolffd@0 | 28 | 
| wolffd@0 | 29 K = 2; % number of discrete values for the hidden vars | 
| wolffd@0 | 30 ns = ones(N,1); | 
| wolffd@0 | 31 ns(H(:)) = K; | 
| wolffd@0 | 32 ns(O(:)) = 1; | 
| wolffd@0 | 33 | 
| wolffd@0 | 34 | 
| wolffd@0 | 35 % make image with vertical stripes | 
| wolffd@0 | 36 I = zeros(nrows, ncols); | 
| wolffd@0 | 37 for j=1:2:ncols | 
| wolffd@0 | 38   I(:,j) = 1; | 
| wolffd@0 | 39 end | 
| wolffd@0 | 40 | 
| wolffd@0 | 41 % each "hidden" node will be instantiated to the pixel in the known image | 
| wolffd@0 | 42 % each observed node has conditional Gaussian distribution | 
| wolffd@0 | 43 eclass = ones(1,N); | 
| wolffd@0 | 44 %eclass(H(:)) = 1; | 
| wolffd@0 | 45 %eclass(O(:)) = 2; | 
| wolffd@0 | 46 eclass(H(:)) = 1:npixels; | 
| wolffd@0 | 47 eclass(O(:)) = npixels+1; | 
| wolffd@0 | 48 bnet = mk_bnet(dag, ns, 'discrete', H(:), 'equiv_class', eclass); | 
| wolffd@0 | 49 | 
| wolffd@0 | 50 | 
| wolffd@0 | 51 %bnet.CPD{1} = tabular_CPD(bnet, H(1), 'CPT', normalise(ones(1,K))); | 
| wolffd@0 | 52 for i=1:nrows | 
| wolffd@0 | 53   for j=1:ncols | 
| wolffd@0 | 54     bnet.CPD{H(i,j)} = root_CPD(bnet, H(i,j), I(i,j) + 1); | 
| wolffd@0 | 55   end | 
| wolffd@0 | 56 end | 
| wolffd@0 | 57 | 
| wolffd@0 | 58 % If H(i,j)=1, O(i,j)=+1 plus noise | 
| wolffd@0 | 59 % If H(i,j)=2, O(i,j)=-1 plus noise | 
| wolffd@0 | 60 sigma = 0.5; | 
| wolffd@0 | 61 bnet.CPD{eclass(O(1,1))} = gaussian_CPD(bnet, O(1,1), 'mean', [1 -1], 'cov', reshape(sigma*ones(1,K), [1 1 K])); | 
| wolffd@0 | 62 ofactor = bnet.CPD{eclass(O(1,1))}; | 
| wolffd@0 | 63 %ofactor = gaussian_CPD('self', 2, 'dps', 1, 'cps', [], 'sz', [K O], 'mean', [1 -1], 'cov', reshape(sigma*ones(1,K), [1 1 K))); | 
| wolffd@0 | 64 | 
| wolffd@0 | 65 | 
| wolffd@0 | 66 data = sample_bnet(bnet); | 
| wolffd@0 | 67 img = reshape(data(O(:)), nrows, ncols) | 
| wolffd@0 | 68 | 
| wolffd@0 | 69 | 
| wolffd@0 | 70 | 
| wolffd@0 | 71 | 
| wolffd@0 | 72 %%%%%%%%%%%%%%%%%%%%%%%%%%% | 
| wolffd@0 | 73 | 
| wolffd@0 | 74 % Now create MRF represented as a factor graph to try and recover the scene | 
| wolffd@0 | 75 | 
| wolffd@0 | 76 % VEF(i,j) is the number of the factor for the vertical edge between HV(i,j) - HV(i+1,j) | 
| wolffd@0 | 77 VEF = reshape((1:(nrows-1)*ncols), nrows-1, ncols); | 
| wolffd@0 | 78 % HEF(i,j) is the number of the factor for the horizontal edge between HV(i,j) - HV(i,j+1) | 
| wolffd@0 | 79 HEF = reshape((1:nrows*(ncols-1)), nrows, ncols-1) + length(VEF(:)); | 
| wolffd@0 | 80 | 
| wolffd@0 | 81 nvars = npixels; | 
| wolffd@0 | 82 nfac = length(VEF(:)) + length(HEF(:)); | 
| wolffd@0 | 83 | 
| wolffd@0 | 84 G = zeros(nvars, nfac); | 
| wolffd@0 | 85 N = length(ns); | 
| wolffd@0 | 86 eclass = zeros(1, nfac); % eclass(i)=j  means factor i gets its params from factors{j} | 
| wolffd@0 | 87 vfactor_ndx = 1; % all vertcial edges get their params from factors{1} | 
| wolffd@0 | 88 hfactor_ndx = 2; % all vertcial edges get their params from factors{2} | 
| wolffd@0 | 89 for i=1:nrows | 
| wolffd@0 | 90   for j=1:ncols | 
| wolffd@0 | 91     if i < nrows | 
| wolffd@0 | 92       G(H(i:i+1,j), VEF(i,j)) = 1; | 
| wolffd@0 | 93       eclass(VEF(i,j)) = vfactor_ndx; | 
| wolffd@0 | 94     end | 
| wolffd@0 | 95     if j < ncols | 
| wolffd@0 | 96       G(H(i,j:j+1), HEF(i,j)) = 1; | 
| wolffd@0 | 97       eclass(HEF(i,j)) = hfactor_ndx; | 
| wolffd@0 | 98     end | 
| wolffd@0 | 99   end | 
| wolffd@0 | 100 end | 
| wolffd@0 | 101 | 
| wolffd@0 | 102 | 
| wolffd@0 | 103 % "kitten raised in cage" prior - more likely to see continguous vertical lines | 
| wolffd@0 | 104 vfactor = tabular_kernel([K K], softeye(K, 0.9)); | 
| wolffd@0 | 105 hfactor = tabular_kernel([K K], softeye(K, 0.5)); | 
| wolffd@0 | 106 factors = cell(1,2); | 
| wolffd@0 | 107 factors{vfactor_ndx} = vfactor; | 
| wolffd@0 | 108 factors{hfactor_ndx} = hfactor; | 
| wolffd@0 | 109 | 
| wolffd@0 | 110 ev_eclass = ones(1,N); % every observation factor gets is params from ofactor | 
| wolffd@0 | 111 ns = K*ones(1,nvars); | 
| wolffd@0 | 112 %fg = mk_fgraph_given_ev(G, ns, factors, {ofactor}, num2cell(img), 'equiv_class', eclass, 'ev_equiv_class', ev_eclass); | 
| wolffd@0 | 113 fg = mk_fgraph_given_ev(G, ns, factors, {ofactor}, img, 'equiv_class', eclass, 'ev_equiv_class', ev_eclass); | 
| wolffd@0 | 114 | 
| wolffd@0 | 115 bnet2 = fgraph_to_bnet(fg); | 
| wolffd@0 | 116 | 
| wolffd@0 | 117 % inference | 
| wolffd@0 | 118 | 
| wolffd@0 | 119 | 
| wolffd@0 | 120 maximize = 1; | 
| wolffd@0 | 121 | 
| wolffd@0 | 122 engine = {}; | 
| wolffd@0 | 123 engine{1} = belprop_fg_inf_engine(fg, 'max_iter', npixels*2); | 
| wolffd@0 | 124 engine{2} = jtree_inf_engine(bnet2); | 
| wolffd@0 | 125 nengines = length(engine); | 
| wolffd@0 | 126 | 
| wolffd@0 | 127 % on fg, we have already included the evidence | 
| wolffd@0 | 128 evidence = cell(1,npixels); | 
| wolffd@0 | 129 tic; [engine{1}, ll(1)] = enter_evidence(engine{1}, evidence, 'maximize', maximize); toc | 
| wolffd@0 | 130 | 
| wolffd@0 | 131 | 
| wolffd@0 | 132 % on bnet2, we must add evidence to the dummy nodes | 
| wolffd@0 | 133 V = fg.nvars; | 
| wolffd@0 | 134 dummy = V+1:V+fg.nfactors; | 
| wolffd@0 | 135 N = max(dummy); | 
| wolffd@0 | 136 evidence = cell(1, N); | 
| wolffd@0 | 137 evidence(dummy) = {1}; | 
| wolffd@0 | 138 tic; [engine{2}, ll(2)] = enter_evidence(engine{2}, evidence); toc | 
| wolffd@0 | 139 | 
| wolffd@0 | 140 | 
| wolffd@0 | 141 Ihat = zeros(nrows, ncols, nengines); | 
| wolffd@0 | 142 for e=1:nengines | 
| wolffd@0 | 143   for i=1:nrows | 
| wolffd@0 | 144     for j=1:ncols | 
| wolffd@0 | 145       m = marginal_nodes(engine{e}, H(i,j)); | 
| wolffd@0 | 146       Ihat(i,j,e) = argmax(m.T)-1; | 
| wolffd@0 | 147     end | 
| wolffd@0 | 148   end | 
| wolffd@0 | 149 end | 
| wolffd@0 | 150 Ihat |