Mercurial > hg > multitrack-audio-matcher
diff src/AudioEventMatcher.cpp @ 11:9a2b008c4706
Priors for each channel and scrolling projection for new prior is added
author | Andrew N Robertson <andrew.robertson@eecs.qmul.ac.uk> |
---|---|
date | Sun, 05 Feb 2012 00:00:58 +0000 |
parents | cbadb9d05d29 |
children | 66783ace7506 |
line wrap: on
line diff
--- a/src/AudioEventMatcher.cpp Sat Feb 04 19:59:27 2012 +0000 +++ b/src/AudioEventMatcher.cpp Sun Feb 05 00:00:58 2012 +0000 @@ -51,6 +51,8 @@ bayesianStruct.setStartPlaying(); currentAlignmentPosition = 0; startTime = ofGetElapsedTimeMillis(); + + projectedPrior = bayesianStruct.prior; //bayesianStruct.posterior.printArray(); } @@ -72,10 +74,12 @@ // double tmp = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);; // double timetmp = (newTime - lastAlignmentTime); // double speedtmp = bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate); - +// currentAlignmentTime = newTime; currentAlignmentPosition = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate); currentAlignmentPosition += (newTime - lastAlignmentTime) * bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate); + bayesianStruct.projectDistribution(newTime, currentAlignmentPosition, projectedPrior);//prior gets updated to where we are now + // printf("ALIGN pos %f time diff %f (now %f , last %f)speed %f :: ALIGN BEST %f\n", tmp, timetmp, (double)ofGetElapsedTimeMillis(), lastAlignmentTime, speedtmp, currentAlignmentPosition); } @@ -88,19 +92,15 @@ //draw the scrolling audio tracks recordedTracks.drawTracks(); - - ofSetColor(255); // bayesianStruct.relativeSpeedPrior.drawVector(0, 200, bayesTempoWindow); setScreenDisplayTimes(); drawBayesianDistributions(); - // bayesianStruct.posterior.drawVector(0, bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow); - + //bayesianStruct.posterior.drawVector(0, bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow); //bayesianStruct.posterior.drawVector(bayesianStruct.posterior.getRealTermsAsIndex(0), bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow); - - // bayesianStruct.relativeSpeedPosterior.drawVector(0, bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(2), bayesTempoWindow); + //bayesianStruct.relativeSpeedPosterior.drawVector(0, bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(2), bayesTempoWindow); ofDrawBitmapString("pitch "+ofToString(recentPitch, 2)+", Time "+ofToString(recentTime, 0), 20, 20); @@ -157,6 +157,11 @@ for (int i = 0; i <recordedTracks.numberOfAudioTracks;i++){ ofSetColor(200,255,50); likelihoodVisualisation[i].drawConstrainedVector(likelihoodVisualisation[i].getRealTermsAsIndex(screenStartTimeMillis), likelihoodVisualisation[i].getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window); + + ofSetColor(0,255,150); + recentPriors[i].drawConstrainedVector(recentPriors[i].getRealTermsAsIndex(screenStartTimeMillis), recentPriors[i].getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window); + + ofSetColor(255); ofDrawBitmapString("recent event "+ofToString(recentEventTime[i]), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.x + 20, recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.y + recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.height - 10); } @@ -167,7 +172,10 @@ recentPrior.drawConstrainedVector(priorStartIndex, priorEndIndex, 0, ofGetWidth(), bayesPositionWindow); ofSetColor(255,0,100);//purple prior - bayesianStruct.prior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesLikelihoodWindow); + bayesianStruct.prior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow); + + ofSetColor(255,0,0); + projectedPrior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesLikelihoodWindow); } @@ -176,7 +184,7 @@ liveInput.addPitchEvent(pitchIn, timeIn); //printPosteriorMAPinfo(); - + matchNewPitchEvent(channel, pitchIn, timeIn);//main pitch matching fn likelihoodVisualisation[1] = bayesianStruct.likelihood; @@ -243,14 +251,20 @@ } } + if (numberOfMatchesFound > 0){ // bayesianStruct.likelihood.addConstant((1-likelihoodToNoiseRatio)/bayesianStruct.likelihood.length); bayesianStruct.likelihood.addConstant(numberOfMatchesFound*(1-onsetLikelihoodToNoise)/(onsetLikelihoodToNoise*bayesianStruct.likelihood.length)); bayesianStruct.likelihood.renormalise(); bayesianStruct.calculatePosterior(); - lastAlignmentTime = timeIn;//use TIMESTAMP recentEventTime[channel] = timeIn;//ofGetElapsedTimeMillis() - startTime; + + recentPriors[channel] = bayesianStruct.prior; + + } + + } @@ -300,10 +314,13 @@ //bayesianStruct.likelihood.addConstant(1); bayesianStruct.calculatePosterior(); + lastAlignmentTime = timeIn;//has to use the STAMPED time + recentEventTime[channel] = timeIn; + + recentPriors[channel] = bayesianStruct.prior; } - lastAlignmentTime = timeIn;//has to use the STAMPED time - recentEventTime[channel] = timeIn; + } double AudioEventMatcher::getPitchDistance(const double& pitchOne, const double& pitchTwo, const double& scale){