wolffd@0: seed = 0; wolffd@0: rand('state', seed); wolffd@0: randn('state', seed); wolffd@0: wolffd@0: nrows = 5; wolffd@0: ncols = 5; wolffd@0: npixels = nrows*ncols; wolffd@0: wolffd@0: % we number pixels in transposed raster scan order (top to bottom, left to right) wolffd@0: wolffd@0: % H(i,j) is the number of the hidden node at (i,j) wolffd@0: H = reshape(1:npixels, nrows, ncols); wolffd@0: % O(i,j) is the number of the obsevred node at (i,j) wolffd@0: O = reshape(1:npixels, nrows, ncols) + length(H(:)); wolffd@0: wolffd@0: wolffd@0: % Make a Bayes net where each hidden pixel generates an observed pixel wolffd@0: % but there are no connections between the hidden pixels. wolffd@0: % We use this just to generate noisy versions of known images. wolffd@0: N = 2*npixels; wolffd@0: dag = zeros(N); wolffd@0: for i=1:nrows wolffd@0: for j=1:ncols wolffd@0: dag(H(i,j), O(i,j)) = 1; wolffd@0: end wolffd@0: end wolffd@0: wolffd@0: wolffd@0: K = 2; % number of discrete values for the hidden vars wolffd@0: ns = ones(N,1); wolffd@0: ns(H(:)) = K; wolffd@0: ns(O(:)) = 1; wolffd@0: wolffd@0: wolffd@0: % make image with vertical stripes wolffd@0: I = zeros(nrows, ncols); wolffd@0: for j=1:2:ncols wolffd@0: I(:,j) = 1; wolffd@0: end wolffd@0: wolffd@0: % each "hidden" node will be instantiated to the pixel in the known image wolffd@0: % each observed node has conditional Gaussian distribution wolffd@0: eclass = ones(1,N); wolffd@0: %eclass(H(:)) = 1; wolffd@0: %eclass(O(:)) = 2; wolffd@0: eclass(H(:)) = 1:npixels; wolffd@0: eclass(O(:)) = npixels+1; wolffd@0: bnet = mk_bnet(dag, ns, 'discrete', H(:), 'equiv_class', eclass); wolffd@0: wolffd@0: wolffd@0: %bnet.CPD{1} = tabular_CPD(bnet, H(1), 'CPT', normalise(ones(1,K))); wolffd@0: for i=1:nrows wolffd@0: for j=1:ncols wolffd@0: bnet.CPD{H(i,j)} = root_CPD(bnet, H(i,j), I(i,j) + 1); wolffd@0: end wolffd@0: end wolffd@0: wolffd@0: % If H(i,j)=1, O(i,j)=+1 plus noise wolffd@0: % If H(i,j)=2, O(i,j)=-1 plus noise wolffd@0: sigma = 0.5; wolffd@0: bnet.CPD{eclass(O(1,1))} = gaussian_CPD(bnet, O(1,1), 'mean', [1 -1], 'cov', reshape(sigma*ones(1,K), [1 1 K])); wolffd@0: ofactor = bnet.CPD{eclass(O(1,1))}; wolffd@0: %ofactor = gaussian_CPD('self', 2, 'dps', 1, 'cps', [], 'sz', [K O], 'mean', [1 -1], 'cov', reshape(sigma*ones(1,K), [1 1 K))); wolffd@0: wolffd@0: wolffd@0: data = sample_bnet(bnet); wolffd@0: img = reshape(data(O(:)), nrows, ncols) wolffd@0: wolffd@0: wolffd@0: wolffd@0: wolffd@0: %%%%%%%%%%%%%%%%%%%%%%%%%%% wolffd@0: wolffd@0: % Now create MRF represented as a factor graph to try and recover the scene wolffd@0: wolffd@0: % VEF(i,j) is the number of the factor for the vertical edge between HV(i,j) - HV(i+1,j) wolffd@0: VEF = reshape((1:(nrows-1)*ncols), nrows-1, ncols); wolffd@0: % HEF(i,j) is the number of the factor for the horizontal edge between HV(i,j) - HV(i,j+1) wolffd@0: HEF = reshape((1:nrows*(ncols-1)), nrows, ncols-1) + length(VEF(:)); wolffd@0: wolffd@0: nvars = npixels; wolffd@0: nfac = length(VEF(:)) + length(HEF(:)); wolffd@0: wolffd@0: G = zeros(nvars, nfac); wolffd@0: N = length(ns); wolffd@0: eclass = zeros(1, nfac); % eclass(i)=j means factor i gets its params from factors{j} wolffd@0: vfactor_ndx = 1; % all vertcial edges get their params from factors{1} wolffd@0: hfactor_ndx = 2; % all vertcial edges get their params from factors{2} wolffd@0: for i=1:nrows wolffd@0: for j=1:ncols wolffd@0: if i < nrows wolffd@0: G(H(i:i+1,j), VEF(i,j)) = 1; wolffd@0: eclass(VEF(i,j)) = vfactor_ndx; wolffd@0: end wolffd@0: if j < ncols wolffd@0: G(H(i,j:j+1), HEF(i,j)) = 1; wolffd@0: eclass(HEF(i,j)) = hfactor_ndx; wolffd@0: end wolffd@0: end wolffd@0: end wolffd@0: wolffd@0: wolffd@0: % "kitten raised in cage" prior - more likely to see continguous vertical lines wolffd@0: vfactor = tabular_kernel([K K], softeye(K, 0.9)); wolffd@0: hfactor = tabular_kernel([K K], softeye(K, 0.5)); wolffd@0: factors = cell(1,2); wolffd@0: factors{vfactor_ndx} = vfactor; wolffd@0: factors{hfactor_ndx} = hfactor; wolffd@0: wolffd@0: ev_eclass = ones(1,N); % every observation factor gets is params from ofactor wolffd@0: ns = K*ones(1,nvars); wolffd@0: %fg = mk_fgraph_given_ev(G, ns, factors, {ofactor}, num2cell(img), 'equiv_class', eclass, 'ev_equiv_class', ev_eclass); wolffd@0: fg = mk_fgraph_given_ev(G, ns, factors, {ofactor}, img, 'equiv_class', eclass, 'ev_equiv_class', ev_eclass); wolffd@0: wolffd@0: bnet2 = fgraph_to_bnet(fg); wolffd@0: wolffd@0: % inference wolffd@0: wolffd@0: wolffd@0: maximize = 1; wolffd@0: wolffd@0: engine = {}; wolffd@0: engine{1} = belprop_fg_inf_engine(fg, 'max_iter', npixels*2); wolffd@0: engine{2} = jtree_inf_engine(bnet2); wolffd@0: nengines = length(engine); wolffd@0: wolffd@0: % on fg, we have already included the evidence wolffd@0: evidence = cell(1,npixels); wolffd@0: tic; [engine{1}, ll(1)] = enter_evidence(engine{1}, evidence, 'maximize', maximize); toc wolffd@0: wolffd@0: wolffd@0: % on bnet2, we must add evidence to the dummy nodes wolffd@0: V = fg.nvars; wolffd@0: dummy = V+1:V+fg.nfactors; wolffd@0: N = max(dummy); wolffd@0: evidence = cell(1, N); wolffd@0: evidence(dummy) = {1}; wolffd@0: tic; [engine{2}, ll(2)] = enter_evidence(engine{2}, evidence); toc wolffd@0: wolffd@0: wolffd@0: Ihat = zeros(nrows, ncols, nengines); wolffd@0: for e=1:nengines wolffd@0: for i=1:nrows wolffd@0: for j=1:ncols wolffd@0: m = marginal_nodes(engine{e}, H(i,j)); wolffd@0: Ihat(i,j,e) = argmax(m.T)-1; wolffd@0: end wolffd@0: end wolffd@0: end wolffd@0: Ihat