view maths/KLDivergence.h @ 258:f49be56d3c4e

* Add KL divergence of histograms
author Chris Cannam <c.cannam@qmul.ac.uk>
date Mon, 21 Jan 2008 18:02:47 +0000
parents 43943a4382ef
children e5907ae6de17
line wrap: on
line source
/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*-  vi:set ts=8 sts=4 sw=4: */

/*
    QM DSP Library

    Centre for Digital Music, Queen Mary, University of London.
    This file copyright 2008 QMUL.
    All rights reserved.
*/

#ifndef KLDIVERGENCE_H
#define KLDIVERGENCE_H

#include <vector>

using std::vector;

/**
 * Helper methods for calculating Kullback-Leibler divergences.
 */
class KLDivergence
{
public:
    KLDivergence() { }
    ~KLDivergence() { }

    /**
     * Calculate a symmetrised Kullback-Leibler divergence of Gaussian
     * models based on mean and variance vectors.  All input vectors
     * must be of equal size.
     */
    double distanceGaussian(const vector<double> &means1,
                            const vector<double> &variances1,
                            const vector<double> &means2,
                            const vector<double> &variances2);

    /**
     * Calculate a Kullback-Leibler divergence of two probability
     * distributions.  Input vectors must be of equal size.  If
     * symmetrised is true, the result will be the symmetrised
     * distance (equal to KL(d1, d2) + KL(d2, d1)).
     */
    double distanceDistribution(const vector<double> &d1,
                                const vector<double> &d2,
                                bool symmetrised);
};

#endif