Daniel@0: seed = 0; Daniel@0: rand('state', seed); Daniel@0: randn('state', seed); Daniel@0: Daniel@0: nrows = 5; Daniel@0: ncols = 5; Daniel@0: npixels = nrows*ncols; Daniel@0: Daniel@0: % we number pixels in transposed raster scan order (top to bottom, left to right) Daniel@0: Daniel@0: % H(i,j) is the number of the hidden node at (i,j) Daniel@0: H = reshape(1:npixels, nrows, ncols); Daniel@0: % O(i,j) is the number of the obsevred node at (i,j) Daniel@0: O = reshape(1:npixels, nrows, ncols) + length(H(:)); Daniel@0: Daniel@0: Daniel@0: % Make a Bayes net where each hidden pixel generates an observed pixel Daniel@0: % but there are no connections between the hidden pixels. Daniel@0: % We use this just to generate noisy versions of known images. Daniel@0: N = 2*npixels; Daniel@0: dag = zeros(N); Daniel@0: for i=1:nrows Daniel@0: for j=1:ncols Daniel@0: dag(H(i,j), O(i,j)) = 1; Daniel@0: end Daniel@0: end Daniel@0: Daniel@0: Daniel@0: K = 2; % number of discrete values for the hidden vars Daniel@0: ns = ones(N,1); Daniel@0: ns(H(:)) = K; Daniel@0: ns(O(:)) = 1; Daniel@0: Daniel@0: Daniel@0: % make image with vertical stripes Daniel@0: I = zeros(nrows, ncols); Daniel@0: for j=1:2:ncols Daniel@0: I(:,j) = 1; Daniel@0: end Daniel@0: Daniel@0: % each "hidden" node will be instantiated to the pixel in the known image Daniel@0: % each observed node has conditional Gaussian distribution Daniel@0: eclass = ones(1,N); Daniel@0: %eclass(H(:)) = 1; Daniel@0: %eclass(O(:)) = 2; Daniel@0: eclass(H(:)) = 1:npixels; Daniel@0: eclass(O(:)) = npixels+1; Daniel@0: bnet = mk_bnet(dag, ns, 'discrete', H(:), 'equiv_class', eclass); Daniel@0: Daniel@0: Daniel@0: %bnet.CPD{1} = tabular_CPD(bnet, H(1), 'CPT', normalise(ones(1,K))); Daniel@0: for i=1:nrows Daniel@0: for j=1:ncols Daniel@0: bnet.CPD{H(i,j)} = root_CPD(bnet, H(i,j), I(i,j) + 1); Daniel@0: end Daniel@0: end Daniel@0: Daniel@0: % If H(i,j)=1, O(i,j)=+1 plus noise Daniel@0: % If H(i,j)=2, O(i,j)=-1 plus noise Daniel@0: sigma = 0.5; Daniel@0: bnet.CPD{eclass(O(1,1))} = gaussian_CPD(bnet, O(1,1), 'mean', [1 -1], 'cov', reshape(sigma*ones(1,K), [1 1 K])); Daniel@0: ofactor = bnet.CPD{eclass(O(1,1))}; Daniel@0: %ofactor = gaussian_CPD('self', 2, 'dps', 1, 'cps', [], 'sz', [K O], 'mean', [1 -1], 'cov', reshape(sigma*ones(1,K), [1 1 K))); Daniel@0: Daniel@0: Daniel@0: data = sample_bnet(bnet); Daniel@0: img = reshape(data(O(:)), nrows, ncols) Daniel@0: Daniel@0: Daniel@0: Daniel@0: Daniel@0: %%%%%%%%%%%%%%%%%%%%%%%%%%% Daniel@0: Daniel@0: % Now create MRF represented as a factor graph to try and recover the scene Daniel@0: Daniel@0: % VEF(i,j) is the number of the factor for the vertical edge between HV(i,j) - HV(i+1,j) Daniel@0: VEF = reshape((1:(nrows-1)*ncols), nrows-1, ncols); Daniel@0: % HEF(i,j) is the number of the factor for the horizontal edge between HV(i,j) - HV(i,j+1) Daniel@0: HEF = reshape((1:nrows*(ncols-1)), nrows, ncols-1) + length(VEF(:)); Daniel@0: Daniel@0: nvars = npixels; Daniel@0: nfac = length(VEF(:)) + length(HEF(:)); Daniel@0: Daniel@0: G = zeros(nvars, nfac); Daniel@0: N = length(ns); Daniel@0: eclass = zeros(1, nfac); % eclass(i)=j means factor i gets its params from factors{j} Daniel@0: vfactor_ndx = 1; % all vertcial edges get their params from factors{1} Daniel@0: hfactor_ndx = 2; % all vertcial edges get their params from factors{2} Daniel@0: for i=1:nrows Daniel@0: for j=1:ncols Daniel@0: if i < nrows Daniel@0: G(H(i:i+1,j), VEF(i,j)) = 1; Daniel@0: eclass(VEF(i,j)) = vfactor_ndx; Daniel@0: end Daniel@0: if j < ncols Daniel@0: G(H(i,j:j+1), HEF(i,j)) = 1; Daniel@0: eclass(HEF(i,j)) = hfactor_ndx; Daniel@0: end Daniel@0: end Daniel@0: end Daniel@0: Daniel@0: Daniel@0: % "kitten raised in cage" prior - more likely to see continguous vertical lines Daniel@0: vfactor = tabular_kernel([K K], softeye(K, 0.9)); Daniel@0: hfactor = tabular_kernel([K K], softeye(K, 0.5)); Daniel@0: factors = cell(1,2); Daniel@0: factors{vfactor_ndx} = vfactor; Daniel@0: factors{hfactor_ndx} = hfactor; Daniel@0: Daniel@0: ev_eclass = ones(1,N); % every observation factor gets is params from ofactor Daniel@0: ns = K*ones(1,nvars); Daniel@0: %fg = mk_fgraph_given_ev(G, ns, factors, {ofactor}, num2cell(img), 'equiv_class', eclass, 'ev_equiv_class', ev_eclass); Daniel@0: fg = mk_fgraph_given_ev(G, ns, factors, {ofactor}, img, 'equiv_class', eclass, 'ev_equiv_class', ev_eclass); Daniel@0: Daniel@0: bnet2 = fgraph_to_bnet(fg); Daniel@0: Daniel@0: % inference Daniel@0: Daniel@0: Daniel@0: maximize = 1; Daniel@0: Daniel@0: engine = {}; Daniel@0: engine{1} = belprop_fg_inf_engine(fg, 'max_iter', npixels*2); Daniel@0: engine{2} = jtree_inf_engine(bnet2); Daniel@0: nengines = length(engine); Daniel@0: Daniel@0: % on fg, we have already included the evidence Daniel@0: evidence = cell(1,npixels); Daniel@0: tic; [engine{1}, ll(1)] = enter_evidence(engine{1}, evidence, 'maximize', maximize); toc Daniel@0: Daniel@0: Daniel@0: % on bnet2, we must add evidence to the dummy nodes Daniel@0: V = fg.nvars; Daniel@0: dummy = V+1:V+fg.nfactors; Daniel@0: N = max(dummy); Daniel@0: evidence = cell(1, N); Daniel@0: evidence(dummy) = {1}; Daniel@0: tic; [engine{2}, ll(2)] = enter_evidence(engine{2}, evidence); toc Daniel@0: Daniel@0: Daniel@0: Ihat = zeros(nrows, ncols, nengines); Daniel@0: for e=1:nengines Daniel@0: for i=1:nrows Daniel@0: for j=1:ncols Daniel@0: m = marginal_nodes(engine{e}, H(i,j)); Daniel@0: Ihat(i,j,e) = argmax(m.T)-1; Daniel@0: end Daniel@0: end Daniel@0: end Daniel@0: Ihat