Mercurial > hg > multitrack-audio-matcher
diff src/AudioEventMatcher.cpp @ 10:cbadb9d05d29
Using timestamps for the scrolling alignment time
author | Andrew N Robertson <andrew.robertson@eecs.qmul.ac.uk> |
---|---|
date | Sat, 04 Feb 2012 19:59:27 +0000 |
parents | bc62266af280 |
children | 9a2b008c4706 |
line wrap: on
line diff
--- a/src/AudioEventMatcher.cpp Fri Feb 03 17:53:14 2012 +0000 +++ b/src/AudioEventMatcher.cpp Sat Feb 04 19:59:27 2012 +0000 @@ -65,8 +65,18 @@ } void AudioEventMatcher::updateBestAlignmentPosition(){ + //THIS DEALS WITH WHERE WE ARE NOW! ON THE SCREEN + //DIFFERENT TO WHEN EVENTS COME IN AS THEY ARE TIMESTAMPED - SO EG A PITCH EVENT MAY ARRIVE 16 CHROMA FRAMES LATER - BIG DIFFERENCE + + int newTime = ofGetElapsedTimeMillis() - startTime; +// double tmp = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);; +// double timetmp = (newTime - lastAlignmentTime); +// double speedtmp = bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate); + currentAlignmentPosition = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate); - currentAlignmentPosition += (ofGetElapsedTimeMillis() - lastAlignmentTime) * bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate); + currentAlignmentPosition += (newTime - lastAlignmentTime) * bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate); + +// printf("ALIGN pos %f time diff %f (now %f , last %f)speed %f :: ALIGN BEST %f\n", tmp, timetmp, (double)ofGetElapsedTimeMillis(), lastAlignmentTime, speedtmp, currentAlignmentPosition); } void AudioEventMatcher::draw(){ @@ -153,11 +163,10 @@ int priorStartIndex = recentPrior.getRealTermsAsIndex(screenStartTimeMillis); int priorEndIndex = recentPrior.getRealTermsAsIndex(screenEndTimeMillis); - ofSetColor(0,200,200); + ofSetColor(0,200,200);//recent prior recentPrior.drawConstrainedVector(priorStartIndex, priorEndIndex, 0, ofGetWidth(), bayesPositionWindow); -// bayesianStruct.prior.addTriangularShape(100, 20, 0.4); - ofSetColor(255,0,100); + ofSetColor(255,0,100);//purple prior bayesianStruct.prior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesLikelihoodWindow); } @@ -166,11 +175,8 @@ if (pitchIn > 0){ liveInput.addPitchEvent(pitchIn, timeIn); - //tmp print stuff - printf("New pitch MAP post estimate now %i, ", bayesianStruct.posterior.MAPestimate); - double tmp = bayesianStruct.posterior.getMAPestimate(); - printf(" getting it %f and offset %f == %f ms\n", tmp, bayesianStruct.posterior.offset, bayesianStruct.posterior.getIndexInRealTerms(tmp)); - + //printPosteriorMAPinfo(); + matchNewPitchEvent(channel, pitchIn, timeIn);//main pitch matching fn likelihoodVisualisation[1] = bayesianStruct.likelihood; @@ -179,7 +185,6 @@ recentTime = timeIn; } - } void AudioEventMatcher::newKickEvent(const double& timeIn){ @@ -209,9 +214,8 @@ //Needs just to set bounds for the matching process, not have TimeIn void AudioEventMatcher::matchNewOnsetEvent(const int& channel, const double& timeIn){ - bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets - + //start at beginning but OPTIMISE later double onsetLikelihoodToNoise = 0.3; @@ -224,13 +228,13 @@ int numberOfMatchesFound = 0; - double startTime = bayesianStruct.likelihood.offset; - double endTime = bayesianStruct.likelihood.offset + matchWindowWidth; + double startMatchingTime = bayesianStruct.likelihood.offset; + double endMatchingTime = bayesianStruct.likelihood.offset + matchWindowWidth; if (channel <= recordedTracks.numberOfAudioTracks){ for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){ double millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime; - if (millisTime >= startTime && millisTime <= endTime){ + if (millisTime >= startMatchingTime && millisTime <= endMatchingTime){ bayesianStruct.likelihood.addGaussianShapeFromRealTime(millisTime, likelihoodWidth, quantity); numberOfMatchesFound++; // printf("Adding Gaussian for onset at time %f offset %f\n", millisTime, bayesianStruct.likelihood.offset); @@ -245,8 +249,8 @@ bayesianStruct.calculatePosterior(); - lastAlignmentTime = ofGetElapsedTimeMillis(); - recentEventTime[channel] = ofGetElapsedTimeMillis() - startTime; + lastAlignmentTime = timeIn;//use TIMESTAMP + recentEventTime[channel] = timeIn;//ofGetElapsedTimeMillis() - startTime; } @@ -254,8 +258,11 @@ void AudioEventMatcher::matchNewPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){ //start at beginning but OPTIMISE later - - + /*printf("TIME %i\n", ofGetElapsedTimeMillis()); + //tmp debug + updateBestAlignmentPosition(); + printf("current alignment best estimate %f\n", currentAlignmentPosition); + */ bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets //set the lielihoods by matching the pitched note @@ -295,8 +302,8 @@ bayesianStruct.calculatePosterior(); } - lastAlignmentTime = ofGetElapsedTimeMillis(); - recentEventTime[channel] = ofGetElapsedTimeMillis() - startTime; + lastAlignmentTime = timeIn;//has to use the STAMPED time + recentEventTime[channel] = timeIn; } double AudioEventMatcher::getPitchDistance(const double& pitchOne, const double& pitchTwo, const double& scale){ @@ -328,5 +335,13 @@ bayesPositionWindow.resized(w,h); } +/* + +void printPosteriorMAPinfo(){ //tmp print stuff + printf("New pitch MAP post estimate now %i, ", bayesianStruct.posterior.MAPestimate); + double tmp = bayesianStruct.posterior.getMAPestimate(); + printf(" getting it %f and offset %f == %f ms\n", tmp, bayesianStruct.posterior.offset, bayesianStruct.posterior.getIndexInRealTerms(tmp)); + + } + */ -