Mercurial > hg > multitrack-audio-matcher
comparison src/AudioEventMatcher.cpp @ 45:d23685b9e766
Fixed the alignment error caluculations and added histogram plotting
author | Andrew N Robertson <andrew.robertson@eecs.qmul.ac.uk> |
---|---|
date | Tue, 08 May 2012 21:53:11 +0100 |
parents | 6a7982661703 |
children | 5359e2c0b0fb |
comparison
equal
deleted
inserted
replaced
44:73fbbc92fdfb | 45:d23685b9e766 |
---|---|
212 | 212 |
213 void AudioEventMatcher::updateBestAlignmentPosition(){ | 213 void AudioEventMatcher::updateBestAlignmentPosition(){ |
214 //THIS DEALS WITH WHERE WE ARE NOW! ON THE SCREEN | 214 //THIS DEALS WITH WHERE WE ARE NOW! ON THE SCREEN |
215 //DIFFERENT TO WHEN EVENTS COME IN AS THEY ARE TIMESTAMPED - SO EG A PITCH EVENT MAY ARRIVE 16 CHROMA FRAMES LATER - BIG DIFFERENCE | 215 //DIFFERENT TO WHEN EVENTS COME IN AS THEY ARE TIMESTAMPED - SO EG A PITCH EVENT MAY ARRIVE 16 CHROMA FRAMES LATER - BIG DIFFERENCE |
216 | 216 |
217 int newTime = ofGetElapsedTimeMillis() - startTime; | 217 int newTime = getTimeNow(); |
218 // double tmp = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);; | 218 // double tmp = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);; |
219 // double timetmp = (newTime - lastAlignmentTime); | 219 // double timetmp = (newTime - lastAlignmentTime); |
220 // double speedtmp = bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate); | 220 // double speedtmp = bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate); |
221 // currentAlignmentTime = newTime; | 221 // currentAlignmentTime = newTime; |
222 currentAlignmentPosition = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate); | 222 currentAlignmentPosition = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate); |
232 // printf("updateBestAlignment:: alignment %i:: %i\n", newTime, (int) currentAlignmentPosition); | 232 // printf("updateBestAlignment:: alignment %i:: %i\n", newTime, (int) currentAlignmentPosition); |
233 | 233 |
234 // printf("ALIGN pos %f time diff %f (now %f , last %f)speed %f :: ALIGN BEST %f\n", tmp, timetmp, (double)ofGetElapsedTimeMillis(), lastAlignmentTime, speedtmp, currentAlignmentPosition); | 234 // printf("ALIGN pos %f time diff %f (now %f , last %f)speed %f :: ALIGN BEST %f\n", tmp, timetmp, (double)ofGetElapsedTimeMillis(), lastAlignmentTime, speedtmp, currentAlignmentPosition); |
235 } | 235 } |
236 | 236 |
237 int AudioEventMatcher::getTimeNow(){ | |
238 return ofGetElapsedTimeMillis() - startTime; | |
239 } | |
240 | |
241 | |
237 void AudioEventMatcher::draw(){ | 242 void AudioEventMatcher::draw(){ |
238 | 243 |
239 //MAIN DRAW FUNCTION FOR ALL | 244 //MAIN DRAW FUNCTION FOR ALL |
240 | 245 |
241 //draw some outlines in blue | 246 //draw some outlines in blue |
258 | 263 |
259 temporal.drawTempoArray(bayesLikelihoodWindow); | 264 temporal.drawTempoArray(bayesLikelihoodWindow); |
260 | 265 |
261 drawRecordedTempo(); | 266 drawRecordedTempo(); |
262 drawPlayingTempo(); | 267 drawPlayingTempo(); |
268 | |
269 drawAlignmentTimes(); | |
263 | 270 |
264 | 271 |
265 } | 272 } |
266 | 273 |
267 void AudioEventMatcher::drawRecordedTempo(){ | 274 void AudioEventMatcher::drawRecordedTempo(){ |
390 tmpStr += " offsetis "+ofToString(bayesianStruct.posterior.offset); | 397 tmpStr += " offsetis "+ofToString(bayesianStruct.posterior.offset); |
391 tmpStr += " screenWidth = "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis)); | 398 tmpStr += " screenWidth = "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis)); |
392 ofDrawBitmapString(tmpStr, 20,140); | 399 ofDrawBitmapString(tmpStr, 20,140); |
393 tmpStr = "best est "+ofToString(bayesianStruct.bestEstimate); | 400 tmpStr = "best est "+ofToString(bayesianStruct.bestEstimate); |
394 ofDrawBitmapString(tmpStr, 20, 180); | 401 ofDrawBitmapString(tmpStr, 20, 180); |
395 ofDrawBitmapString("screenwidth "+ofToString(screenWidthMillis), 20, 800); | 402 //ofDrawBitmapString("screenwidth "+ofToString(screenWidthMillis), 20, 800); |
396 | 403 |
397 ofSetColor(255); | 404 ofSetColor(255); |
398 tmpStr = "pitch "+ofToString(recentPitch, 2); | 405 tmpStr = "pitch "+ofToString(recentPitch, 2); |
399 tmpStr += " Nearest "+ofToString(pitchOfNearestMatch,2); | 406 tmpStr += " Nearest "+ofToString(pitchOfNearestMatch,2); |
400 tmpStr += " dist "+ofToString(distanceOfNearestMatch, 2); | 407 tmpStr += " dist "+ofToString(distanceOfNearestMatch, 2); |
409 ofDrawBitmapString(alignString, 20, 50); | 416 ofDrawBitmapString(alignString, 20, 50); |
410 ofDrawBitmapString("pos "+ofToString(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.playPosition), 200,600); | 417 ofDrawBitmapString("pos "+ofToString(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.playPosition), 200,600); |
411 | 418 |
412 } | 419 } |
413 | 420 |
421 | |
422 void AudioEventMatcher::drawAlignmentTimes(){ | |
423 ofSetColor(255); | |
424 std::string dataString = "Live time "+ofToString(synchroniser.recordedPositionTimeSent); | |
425 dataString += ", Reh time "+ofToString(synchroniser.recordedPositionMillis); | |
426 ofDrawBitmapString(dataString, 10, ofGetHeight() - 20); | |
427 | |
428 } | |
429 | |
430 | |
414 void AudioEventMatcher::newPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){ | 431 void AudioEventMatcher::newPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){ |
415 if (pitchIn > 0){ | 432 if (pitchIn > 0){ |
416 liveInput.addPitchEvent(pitchIn, timeIn); | 433 liveInput.addPitchEvent(pitchIn, timeIn); |
417 | 434 |
418 //printPosteriorMAPinfo(); | 435 //printPosteriorMAPinfo(); |
478 bayesianStruct.likelihood.offset = bayesianStruct.prior.offset; | 495 bayesianStruct.likelihood.offset = bayesianStruct.prior.offset; |
479 bayesianStruct.likelihood.zero();//set to zero | 496 bayesianStruct.likelihood.zero();//set to zero |
480 //double quantity = 1;// | 497 //double quantity = 1;// |
481 double quantity = 1*onsetLikelihoodToNoise;//BETTER CHANGE THIS BACK TOO..see below//likelihoodToNoiseRatio / numberOfMatches; | 498 double quantity = 1*onsetLikelihoodToNoise;//BETTER CHANGE THIS BACK TOO..see below//likelihoodToNoiseRatio / numberOfMatches; |
482 int numberOfMatchesFound = 0; | 499 int numberOfMatchesFound = 0; |
483 | 500 double nearestOnsetDistance = 1000; |
484 double startMatchingTime = bayesianStruct.likelihood.offset; | 501 double startMatchingTime = bayesianStruct.likelihood.offset; |
485 double endMatchingTime = bayesianStruct.likelihood.offset + matchWindowWidth; | 502 double endMatchingTime = bayesianStruct.likelihood.offset + matchWindowWidth; |
486 double millisTime = -1*INFINITY;//or 0 is fine | 503 double millisTime = -1*INFINITY;//or 0 is fine |
487 int checkIndex = 0; | 504 int checkIndex = 0; |
488 if (channel <= recordedTracks.numberOfAudioTracks && checkIndex < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size()){ | 505 if (channel <= recordedTracks.numberOfAudioTracks && checkIndex < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size()){ |
494 millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime; | 511 millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime; |
495 if (millisTime >= startMatchingTime && millisTime <= endMatchingTime){ | 512 if (millisTime >= startMatchingTime && millisTime <= endMatchingTime){ |
496 bayesianStruct.likelihood.addGaussianShapeFromRealTime(millisTime, onsetLikelihoodWidth, quantity); | 513 bayesianStruct.likelihood.addGaussianShapeFromRealTime(millisTime, onsetLikelihoodWidth, quantity); |
497 numberOfMatchesFound++; | 514 numberOfMatchesFound++; |
498 // printf("Adding Gaussian for onset at time %f offset %f\n", millisTime, bayesianStruct.likelihood.offset); | 515 // printf("Adding Gaussian for onset at time %f offset %f\n", millisTime, bayesianStruct.likelihood.offset); |
499 | 516 if (fabs(currentAlignmentPosition - millisTime) < nearestOnsetDistance) |
517 nearestOnsetDistance = currentAlignmentPosition - millisTime; | |
500 }//end if within limits (changed so it now is 4 sure) | 518 }//end if within limits (changed so it now is 4 sure) |
501 } | 519 } |
502 } | 520 } |
503 | 521 |
504 if (numberOfMatchesFound > 0){ | 522 if (numberOfMatchesFound > 0){ |
515 projectedPrior = bayesianStruct.prior; | 533 projectedPrior = bayesianStruct.prior; |
516 | 534 |
517 | 535 |
518 temporal.updateTempo(channel, timeIn); | 536 temporal.updateTempo(channel, timeIn); |
519 } | 537 } |
520 | 538 int timeNow = getTimeNow(); |
539 | |
540 printf("Nearest onset is %.1f time is %i and alignemnt %i time now %i\n", nearestOnsetDistance, (int) timeIn, (int)currentAlignmentPosition, timeNow); | |
521 } | 541 } |
522 | 542 |
523 | 543 |
524 | 544 |
525 void AudioEventMatcher::matchNewPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){ | 545 void AudioEventMatcher::matchNewPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){ |