wolffd@0: function kl = cross_entropy(p, q, symmetric) wolffd@0: % CROSS_ENTROPY Compute the Kullback-Leibler divergence between two discrete prob. distributions wolffd@0: % kl = cross_entropy(p, q, symmetric) wolffd@0: % wolffd@0: % If symmetric = 1, we compute the symmetric version. Default: symmetric = 0; wolffd@0: wolffd@0: tiny = exp(-700); wolffd@0: if nargin < 3, symmetric = 0; end wolffd@0: p = p(:); wolffd@0: q = q(:); wolffd@0: if symmetric wolffd@0: kl = (sum(p .* log((p+tiny)./(q+tiny))) + sum(q .* log((q+tiny)./(p+tiny))))/2; wolffd@0: else wolffd@0: kl = sum(p .* log((p+tiny)./(q+tiny))); wolffd@0: end