annotate maths/KLDivergence.h @ 298:255e431ae3d4

* Key detector: when returning key strengths, use the peak value of the three underlying chromagram correlations (from 36-bin chromagram) corresponding to each key, instead of the mean. Rationale: This is the same method as used when returning the key value, and it's nice to have the same results in both returned value and plot. The peak performed better than the sum with a simple test set of triads, so it seems reasonable to change the plot to match the key output rather than the other way around. * FFT: kiss_fftr returns only the non-conjugate bins, synthesise the rest rather than leaving them (perhaps dangerously) undefined. Fixes an uninitialised data error in chromagram that could cause garbage results from key detector. * Constant Q: remove precalculated values again, I reckon they're not proving such a good tradeoff.
author Chris Cannam <c.cannam@qmul.ac.uk>
date Fri, 05 Jun 2009 15:12:39 +0000
parents f49be56d3c4e
children e5907ae6de17
rev   line source
c@256 1 /* -*- c-basic-offset: 4 indent-tabs-mode: nil -*- vi:set ts=8 sts=4 sw=4: */
c@256 2
c@256 3 /*
c@256 4 QM DSP Library
c@256 5
c@256 6 Centre for Digital Music, Queen Mary, University of London.
c@258 7 This file copyright 2008 QMUL.
c@256 8 All rights reserved.
c@256 9 */
c@256 10
c@256 11 #ifndef KLDIVERGENCE_H
c@256 12 #define KLDIVERGENCE_H
c@256 13
c@256 14 #include <vector>
c@256 15
c@256 16 using std::vector;
c@256 17
c@256 18 /**
c@258 19 * Helper methods for calculating Kullback-Leibler divergences.
c@256 20 */
c@256 21 class KLDivergence
c@256 22 {
c@256 23 public:
c@256 24 KLDivergence() { }
c@256 25 ~KLDivergence() { }
c@256 26
c@258 27 /**
c@258 28 * Calculate a symmetrised Kullback-Leibler divergence of Gaussian
c@258 29 * models based on mean and variance vectors. All input vectors
c@258 30 * must be of equal size.
c@258 31 */
c@258 32 double distanceGaussian(const vector<double> &means1,
c@258 33 const vector<double> &variances1,
c@258 34 const vector<double> &means2,
c@258 35 const vector<double> &variances2);
c@258 36
c@258 37 /**
c@258 38 * Calculate a Kullback-Leibler divergence of two probability
c@258 39 * distributions. Input vectors must be of equal size. If
c@258 40 * symmetrised is true, the result will be the symmetrised
c@258 41 * distance (equal to KL(d1, d2) + KL(d2, d1)).
c@258 42 */
c@258 43 double distanceDistribution(const vector<double> &d1,
c@258 44 const vector<double> &d2,
c@258 45 bool symmetrised);
c@256 46 };
c@256 47
c@256 48 #endif
c@256 49