annotate src/AudioEventMatcher.cpp @ 23:bc113cd60a2f

tmp mistake
author Andrew N Robertson <andrew.robertson@eecs.qmul.ac.uk>
date Mon, 13 Feb 2012 14:58:53 +0000
parents 24c413f0f2c5
children c47ea39b830d
rev   line source
andrew@0 1 /*
andrew@0 2 * AudioEventMatcher.cpp
andrew@0 3 * MultipleAudioMathcher
andrew@0 4 *
andrew@0 5 * Created by Andrew on 31/01/2012.
andrew@0 6 * Copyright 2012 QMUL. All rights reserved.
andrew@0 7 *
andrew@0 8 */
andrew@0 9
andrew@0 10 #include "AudioEventMatcher.h"
andrew@0 11
andrew@0 12
andrew@2 13 const int matchWindowWidth = 6000;
andrew@0 14
andrew@0 15 AudioEventMatcher::AudioEventMatcher(){
andrew@7 16
andrew@15 17
andrew@23 18 pitchLikelihoodToNoise = 0.6;//more noise
andrew@16 19
andrew@23 20 onsetLikelihoodToNoise = 0.4;
andrew@17 21 onsetLikelihoodWidth = 10;//in ms
andrew@15 22
andrew@0 23 setArraySizes();
andrew@3 24
andrew@3 25 usingRealTime = false;
andrew@3 26 bayesianStruct.realTimeMode = &usingRealTime;
andrew@7 27 recentPitch = 0;
andrew@8 28 currentAlignmentPosition = 0;
andrew@14 29
andrew@15 30
andrew@9 31
andrew@9 32 followingLiveInput = true;
andrew@15 33 startedPlaying = false;
andrew@20 34 recordedTempoIndex = 0;
andrew@20 35 // temporal.setUpEventTimeMatrix();
andrew@20 36 // recordedTempoData.setUpEventTimeMatrix();
andrew@0 37 }
andrew@0 38
andrew@14 39
andrew@19 40
andrew@19 41
andrew@7 42 void AudioEventMatcher::setWindowDimensions(){
andrew@7 43 double startHeight = recordedTracks.numberOfAudioTracks * recordedTracks.trackScreenHeight;
andrew@7 44 double heightAvailable = 1 - startHeight;
andrew@19 45 heightAvailable /= NUMBER_OF_CHANNELS;
andrew@7 46
andrew@7 47 bayesPositionWindow.setToRelativeSize(0, startHeight, 1, heightAvailable);
andrew@7 48 bayesLikelihoodWindow.setToRelativeSize(0, startHeight + 1*heightAvailable, 1, heightAvailable);
andrew@7 49 bayesTempoWindow.setToRelativeSize(0, startHeight + 2*heightAvailable, 1, heightAvailable);
andrew@7 50
andrew@7 51
andrew@7 52 }
andrew@0 53
andrew@0 54 void AudioEventMatcher::setArraySizes(){
andrew@0 55 bayesianStruct.resetSpeedSize(200);
andrew@0 56 bayesianStruct.setRelativeSpeedScalar(0.01);
andrew@0 57 bayesianStruct.setSpeedPrior(1.0);
andrew@0 58 bayesianStruct.relativeSpeedPrior.getMaximum();
andrew@0 59
andrew@0 60 bayesianStruct.resetSize(matchWindowWidth);
andrew@0 61 bayesianStruct.setPositionDistributionScalar(1);
andrew@0 62
andrew@0 63 }
andrew@0 64
andrew@16 65 void AudioEventMatcher::loadAudioFiles(){
andrew@16 66 recordedTracks.loadTestAudio();
andrew@16 67 synchroniser.fileLengthSamples = recordedTracks.loadedAudioFiles[0].fileLoader.totalNumberOfSamples;
andrew@16 68 printf("synchroniser has %f samples\n", synchroniser.fileLengthSamples);
andrew@20 69
andrew@20 70 calculateRecordedTempoData();
andrew@20 71 printf("\n\nFIRST PASS: FINAL recorded tempo is %f\n", recordedTempoData.playingTempo);
andrew@20 72 setTempoPrior(recordedTempoData.playingTempo);
andrew@20 73 calculateRecordedTempoData();//now calculate again using better prior
andrew@20 74
andrew@20 75 printf("\n\nSECOND PASS: FINAL recorded tempo is %f\n", recordedTempoData.playingTempo);
andrew@20 76 printf("GLOBAL TEMPO of RECORDED FILES\n");
andrew@20 77 recordedTempoData.printTempoTimes();
andrew@20 78 }
andrew@20 79
andrew@20 80 void AudioEventMatcher::setTempoPrior(double tempo){
andrew@20 81 recordedTempoData.zero();
andrew@20 82 recordedTempoData.tempoPosterior.zero();
andrew@20 83 recordedTempoData.tempoPosterior.addGaussianShapeFromRealTime(tempo, 3, 1);
andrew@20 84
andrew@20 85 }
andrew@20 86
andrew@20 87 void AudioEventMatcher::calculateRecordedTempoData(){
andrew@20 88 int indexForOnsets[3];
andrew@20 89 indexForOnsets[0] = 0;
andrew@20 90 indexForOnsets[1] = 0;
andrew@20 91 indexForOnsets[2] = 0;
andrew@20 92 int kickTime, snareTime;
andrew@20 93 while (indexForOnsets[0] < recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.chromaOnsets.size() ||
andrew@20 94 indexForOnsets[2] < recordedTracks.loadedAudioFiles[2].fileLoader.onsetDetect.chromaOnsets.size()) {
andrew@20 95
andrew@20 96 setNextOnsetTime(0, kickTime, &indexForOnsets[0]);
andrew@20 97 setNextOnsetTime(2, snareTime, &indexForOnsets[0]);
andrew@20 98
andrew@20 99 if (kickTime < snareTime){
andrew@20 100 printf("update kick at %i\n", kickTime);
andrew@20 101 recordedTempoData.updateTempo(0, kickTime);
andrew@20 102 printf("recorded tempo is %f\n", recordedTempoData.playingTempo);
andrew@20 103 indexForOnsets[0]++;
andrew@20 104 }else {
andrew@20 105 printf("update snare at %i\n", snareTime);
andrew@20 106 recordedTempoData.updateTempo(2, snareTime);
andrew@20 107 printf("recorded tempo is %f\n", recordedTempoData.playingTempo);
andrew@20 108 indexForOnsets[2]++;
andrew@20 109 }
andrew@20 110 }//end while
andrew@20 111
andrew@20 112
andrew@20 113 }
andrew@20 114
andrew@20 115 void AudioEventMatcher::setNextOnsetTime(const int& channel, int& time, int* indexForOnsets){
andrew@20 116 if (indexForOnsets[channel] < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size()){
andrew@20 117 time = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[indexForOnsets[channel]].millisTime;
andrew@20 118 }
andrew@20 119 else {
andrew@20 120 time = 2147483647;//infinity
andrew@20 121 }
andrew@16 122 }
andrew@16 123
andrew@9 124 void AudioEventMatcher::startPlaying(){
andrew@3 125 bayesianStruct.setStartPlaying();
andrew@8 126 currentAlignmentPosition = 0;
andrew@8 127 startTime = ofGetElapsedTimeMillis();
andrew@11 128
andrew@11 129 projectedPrior = bayesianStruct.prior;
andrew@15 130 startedPlaying = true;
andrew@17 131 synchroniser.reset();
andrew@19 132 temporal.reset();
andrew@17 133
andrew@20 134 recordedTempoIndex = 0;
andrew@20 135 recordedTempo = recordedTempoData.globalTempo[recordedTempoIndex];
andrew@20 136
andrew@20 137 currentSpeedRatio = 1;
andrew@20 138
andrew@21 139 temporal.tempoPosterior.zero();
andrew@21 140 temporal.tempoPosterior.addGaussianShapeFromRealTime(recordedTempo, 10, 1);
andrew@21 141
andrew@20 142 //SET TEMPO PRIOR for Speed Ratio
andrew@20 143 //the update this
andrew@20 144 setSpeedRatioDistribution(currentSpeedRatio);
andrew@3 145 //bayesianStruct.posterior.printArray();
andrew@3 146 }
andrew@3 147
andrew@9 148
andrew@20 149 void AudioEventMatcher::setSpeedRatioDistribution(const double& speedRatio){
andrew@20 150 bayesianStruct.relativeSpeedPosterior.zero();
andrew@20 151 bayesianStruct.relativeSpeedPosterior.addToIndex(bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(speedRatio), 1);
andrew@22 152 bayesianStruct.relativeSpeedPosterior.addGaussianShapeFromRealTime(1, 0.06, 0.8);
andrew@20 153 }
andrew@20 154
andrew@15 155 void AudioEventMatcher::stopPlaying(){
andrew@15 156 startedPlaying = false;
andrew@19 157 temporal.printEventTimes();
andrew@15 158 }
andrew@15 159
andrew@22 160 void AudioEventMatcher::rescue(){
andrew@22 161 bayesianStruct.posterior.zero();
andrew@22 162 bayesianStruct.posterior.addConstant(1);
andrew@22 163 bayesianStruct.prior.zero();
andrew@22 164 bayesianStruct.prior.addConstant(1);
andrew@22 165 }
andrew@22 166
andrew@9 167 void AudioEventMatcher::updatePosition(){
andrew@19 168
andrew@19 169 if (startedPlaying){
andrew@9 170 if (!followingLiveInput)
andrew@9 171 recordedTracks.updatePosition();
andrew@19 172 else
andrew@9 173 recordedTracks.updatePositionToMillis(currentAlignmentPosition);
andrew@9 174
andrew@20 175 updateBestAlignmentPosition();
andrew@19 176 }
andrew@19 177
andrew@20 178 updateRecordedTempo();
andrew@20 179
andrew@19 180 temporal.tempoPosterior.addGaussianShape(temporal.tempoPosterior.MAPestimate, temporal.tempoArraySize / 4, 0.5 );
andrew@9 181 }
andrew@9 182
andrew@20 183 void AudioEventMatcher::updateRecordedTempo(){
andrew@20 184 //tempo of equivalent recorded position is updated
andrew@20 185 while(currentAlignmentPosition > recordedTempoData.globalTempoTimes[recordedTempoIndex]){
andrew@20 186 recordedTempoIndex++;
andrew@20 187 }
andrew@20 188 recordedTempo = recordedTempoData.globalTempo[recordedTempoIndex];
andrew@20 189 double tmpRatio = currentSpeedRatio;
andrew@20 190 currentSpeedRatio = temporal.playingTempo / recordedTempo;
andrew@20 191 if (currentSpeedRatio != tmpRatio)
andrew@20 192 setSpeedRatioDistribution(currentSpeedRatio);
andrew@20 193 }
andrew@20 194
andrew@8 195 void AudioEventMatcher::updateBestAlignmentPosition(){
andrew@10 196 //THIS DEALS WITH WHERE WE ARE NOW! ON THE SCREEN
andrew@10 197 //DIFFERENT TO WHEN EVENTS COME IN AS THEY ARE TIMESTAMPED - SO EG A PITCH EVENT MAY ARRIVE 16 CHROMA FRAMES LATER - BIG DIFFERENCE
andrew@10 198
andrew@10 199 int newTime = ofGetElapsedTimeMillis() - startTime;
andrew@10 200 // double tmp = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);;
andrew@10 201 // double timetmp = (newTime - lastAlignmentTime);
andrew@10 202 // double speedtmp = bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate);
andrew@11 203 // currentAlignmentTime = newTime;
andrew@9 204 currentAlignmentPosition = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);
andrew@10 205 currentAlignmentPosition += (newTime - lastAlignmentTime) * bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate);
andrew@10 206
andrew@16 207
andrew@17 208 synchroniser.updateRecordedPosition(currentAlignmentPosition, newTime);
andrew@16 209
andrew@16 210 synchroniser.updateOutputSpeed();
andrew@16 211
andrew@11 212 bayesianStruct.projectDistribution(newTime, currentAlignmentPosition, projectedPrior);//prior gets updated to where we are now
andrew@11 213
andrew@10 214 // printf("ALIGN pos %f time diff %f (now %f , last %f)speed %f :: ALIGN BEST %f\n", tmp, timetmp, (double)ofGetElapsedTimeMillis(), lastAlignmentTime, speedtmp, currentAlignmentPosition);
andrew@8 215 }
andrew@8 216
andrew@0 217 void AudioEventMatcher::draw(){
andrew@6 218 //draw some outlines in blue
andrew@3 219 ofSetColor(20,200,200);
andrew@3 220 bayesPositionWindow.drawOutline();
andrew@3 221 bayesTempoWindow.drawOutline();
andrew@0 222
andrew@6 223 //draw the scrolling audio tracks
andrew@1 224 recordedTracks.drawTracks();
andrew@7 225
andrew@2 226 ofSetColor(255);
andrew@2 227 // bayesianStruct.relativeSpeedPrior.drawVector(0, 200, bayesTempoWindow);
andrew@9 228
andrew@9 229 setScreenDisplayTimes();
andrew@6 230 drawBayesianDistributions();
andrew@8 231
andrew@11 232 //bayesianStruct.posterior.drawVector(0, bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow);
andrew@6 233 //bayesianStruct.posterior.drawVector(bayesianStruct.posterior.getRealTermsAsIndex(0), bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow);
andrew@11 234 //bayesianStruct.relativeSpeedPosterior.drawVector(0, bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(2), bayesTempoWindow);
andrew@18 235 string tmpStr = "pitch "+ofToString(recentPitch, 2);
andrew@18 236 tmpStr += " Nearest "+ofToString(pitchOfNearestMatch,2);
andrew@18 237 tmpStr += " dist "+ofToString(distanceOfNearestMatch, 2);
andrew@18 238 tmpStr += ", Time "+ofToString(recentTime, 0);
andrew@18 239 ofDrawBitmapString(tmpStr, 20, 20);
andrew@18 240
andrew@18 241
andrew@9 242
andrew@16 243 string alignString = " align "+ofToString(currentAlignmentPosition, 2);
andrew@16 244 alignString += " playing "+ofToString(synchroniser.playingPositionRatio, 5);
andrew@17 245 alignString += " pos "+ofToString(synchroniser.playingPositionMillis,0)+" ms";
andrew@17 246 alignString += " rec pos "+ofToString(synchroniser.recordedPositionMillis,0)+" ms";
andrew@16 247 ofDrawBitmapString(alignString, 20, 50);
andrew@16 248
andrew@9 249 ofDrawBitmapString("pos "+ofToString(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.playPosition), 200,600);
andrew@20 250
andrew@20 251 temporal.drawTempoArray(bayesLikelihoodWindow);
andrew@20 252
andrew@20 253 drawRecordedTempo();
andrew@20 254 drawPlayingTempo();
andrew@20 255
andrew@20 256
andrew@6 257 }
andrew@20 258
andrew@20 259 void AudioEventMatcher::drawRecordedTempo(){
andrew@6 260
andrew@21 261 int xTempoIndex = ofGetWidth() * (double)(recordedTempo - recordedTempoData.minimumTempoInterval)/(double)(recordedTempoData.maximumTempoInterval - recordedTempoData.minimumTempoInterval);
andrew@20 262 ofSetColor(0, 200, 0);
andrew@20 263 ofLine(xTempoIndex, bayesLikelihoodWindow.y, xTempoIndex, bayesLikelihoodWindow.y + bayesLikelihoodWindow.height);
andrew@20 264 ofDrawBitmapString(ofToString(recordedTempo), xTempoIndex, bayesLikelihoodWindow.y + 10);
andrew@20 265 }
andrew@20 266
andrew@20 267 void AudioEventMatcher::drawPlayingTempo(){
andrew@21 268 //purple line for MAP estimate of new intervals
andrew@21 269 int xTempoIndex = (double)(ofGetWidth() * (temporal.playingTempo - temporal.minimumTempoInterval))/(double)(temporal.maximumTempoInterval - temporal.minimumTempoInterval);
andrew@20 270 ofSetColor(200, 0, 200);
andrew@20 271 ofLine(xTempoIndex, bayesLikelihoodWindow.y, xTempoIndex, bayesLikelihoodWindow.y + bayesLikelihoodWindow.height);
andrew@21 272 ofDrawBitmapString(ofToString(temporal.playingTempo), xTempoIndex, bayesLikelihoodWindow.y + 10);
andrew@20 273
andrew@21 274 //red line where the ratio is between playing tempo and recorded one
andrew@20 275 int xSpeedRatioIndex = (double)(temporal.tempoPosterior.getIndexInRealTerms(currentSpeedRatio)*ofGetWidth())/(double)temporal.tempoPosterior.arraySize;
andrew@20 276 ofSetColor(200,0,0);
andrew@20 277 ofLine(xSpeedRatioIndex, bayesTempoWindow.y, xSpeedRatioIndex, bayesTempoWindow.y + bayesTempoWindow.height);
andrew@21 278 string tmpString = "playing "+ofToString(temporal.playingTempo);
andrew@21 279 tmpString += ", recorded "+ofToString(recordedTempo);
andrew@21 280 tmpString += " ratio "+ofToString(currentSpeedRatio);
andrew@21 281 ofSetColor(155,155,155);
andrew@21 282 ofDrawBitmapString(tmpString, 20, bayesTempoWindow.y+10);
andrew@20 283
andrew@20 284 }
andrew@20 285
andrew@20 286
andrew@9 287 void AudioEventMatcher::setScreenDisplayTimes(){
andrew@9 288 screenWidthMillis = recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.framesToMillis(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.amplitudeNumber);
andrew@9 289 // if (!followingLiveInput){
andrew@9 290
andrew@9 291 screenStartTimeMillis = recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.framesToMillis(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.drawParams.windowStartFrame);
andrew@9 292 screenEndTimeMillis = screenStartTimeMillis + screenWidthMillis;
andrew@9 293
andrew@9 294 //need PRECISION in this alignment
andrew@9 295
andrew@9 296
andrew@9 297 /*}else{
andrew@9 298
andrew@9 299 screenStartTimeMillis = (int)(currentAlignmentPosition/screenWidthMillis) * screenWidthMillis;
andrew@9 300 screenEndTimeMillis = screenStartTimeMillis + screenWidthMillis;
andrew@9 301 }*/
andrew@9 302 }
andrew@9 303
andrew@6 304 void AudioEventMatcher::drawBayesianDistributions(){
andrew@6 305
andrew@6 306
andrew@6 307 int startIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenStartTimeMillis);
andrew@6 308 int endIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenEndTimeMillis);
andrew@4 309
andrew@6 310 bayesianStruct.posterior.drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), bayesPositionWindow);
andrew@6 311
andrew@6 312 string tmpString = "start "+ofToString(screenStartTimeMillis)+" (index "+ofToString(startIndex)+"), end "+ofToString(screenEndTimeMillis);
andrew@6 313 ofDrawBitmapString(tmpString, bayesPositionWindow.x+20, bayesPositionWindow.y+20);
andrew@4 314
andrew@8 315 // bayesianStruct.likelihood.drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), bayesLikelihoodWindow);
andrew@2 316
andrew@6 317 bayesianStruct.relativeSpeedPosterior.drawConstrainedVector(0, bayesianStruct.relativeSpeedPosterior.arraySize, 0, ofGetWidth(), bayesTempoWindow);
andrew@6 318
andrew@3 319 string tmpStr = "zero is "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(0));
andrew@3 320 tmpStr += " offsetis "+ofToString(bayesianStruct.posterior.offset);
andrew@3 321 tmpStr += " screenWidth = "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis));
andrew@3 322 ofDrawBitmapString(tmpStr, 20,140);
andrew@3 323 tmpStr = "best est "+ofToString(bayesianStruct.bestEstimate);
andrew@3 324 ofDrawBitmapString(tmpStr, 20, 180);
andrew@3 325
andrew@8 326 ofDrawBitmapString("screenwidth "+ofToString(screenWidthMillis), 20, 800);
andrew@3 327
andrew@9 328 //green line at current best estimate
andrew@13 329 ofSetColor(0,255,0);//green scrolling line best position
andrew@8 330 double currentEstimateIndex = (currentAlignmentPosition - screenStartTimeMillis)*ofGetWidth()/screenWidthMillis;
andrew@8 331 ofLine(currentEstimateIndex, bayesPositionWindow.y, currentEstimateIndex, bayesPositionWindow.y + bayesPositionWindow.height);
andrew@7 332
andrew@16 333
andrew@16 334 ofSetColor(0,255,255);//synchroniser position
andrew@16 335 currentEstimateIndex = (synchroniser.playingPositionMillis - screenStartTimeMillis)*ofGetWidth()/screenWidthMillis;
andrew@16 336 ofLine(currentEstimateIndex, bayesLikelihoodWindow.y, currentEstimateIndex, bayesLikelihoodWindow.y + bayesPositionWindow.height);
andrew@16 337
andrew@16 338
andrew@16 339
andrew@7 340 //draw track by track likelihoods
andrew@7 341 for (int i = 0; i <recordedTracks.numberOfAudioTracks;i++){
andrew@13 342 ofSetColor(200,255,50);//channel likelihoods in yellow
andrew@8 343 likelihoodVisualisation[i].drawConstrainedVector(likelihoodVisualisation[i].getRealTermsAsIndex(screenStartTimeMillis), likelihoodVisualisation[i].getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window);
andrew@11 344
andrew@13 345 ofSetColor(0,255,150);//channel priors
andrew@11 346 recentPriors[i].drawConstrainedVector(recentPriors[i].getRealTermsAsIndex(screenStartTimeMillis), recentPriors[i].getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window);
andrew@11 347
andrew@11 348
andrew@8 349 ofSetColor(255);
andrew@8 350 ofDrawBitmapString("recent event "+ofToString(recentEventTime[i]), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.x + 20, recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.y + recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.height - 10);
andrew@7 351 }
andrew@8 352
andrew@13 353 int priorStartIndex = bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis);
andrew@13 354 int priorEndIndex = bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis);
andrew@13 355 // ofSetColor(0,200,200);//recent prior
andrew@13 356 // recentPrior.drawConstrainedVector(priorStartIndex, priorEndIndex, 0, ofGetWidth(), bayesPositionWindow);
andrew@8 357
andrew@10 358 ofSetColor(255,0,100);//purple prior
andrew@11 359 bayesianStruct.prior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
andrew@11 360
andrew@11 361 ofSetColor(255,0,0);
andrew@13 362 projectedPrior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
andrew@7 363
andrew@20 364
andrew@19 365
andrew@1 366 }
andrew@1 367
andrew@6 368 void AudioEventMatcher::newPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){
andrew@7 369 if (pitchIn > 0){
andrew@1 370 liveInput.addPitchEvent(pitchIn, timeIn);
andrew@4 371
andrew@10 372 //printPosteriorMAPinfo();
andrew@11 373
andrew@7 374 matchNewPitchEvent(channel, pitchIn, timeIn);//main pitch matching fn
andrew@7 375
andrew@7 376 likelihoodVisualisation[1] = bayesianStruct.likelihood;
andrew@7 377
andrew@7 378 recentPitch = pitchIn;//for drawing
andrew@7 379 recentTime = timeIn;
andrew@7 380 }
andrew@8 381
andrew@2 382 }
andrew@2 383
andrew@6 384 void AudioEventMatcher::newKickEvent(const double& timeIn){
andrew@6 385 // liveInput.addKickEvent(timeIn);
andrew@2 386 matchNewOnsetEvent(0, timeIn);
andrew@7 387 likelihoodVisualisation[0] = bayesianStruct.likelihood;
andrew@2 388 }
andrew@2 389
andrew@6 390 void AudioEventMatcher::newKickEvent(const int& channel, const double& timeIn){
andrew@6 391 // liveInput.addKickEvent(timeIn);
andrew@6 392 matchNewOnsetEvent(channel, timeIn);
andrew@7 393 likelihoodVisualisation[0] = bayesianStruct.likelihood;
andrew@6 394 }
andrew@6 395
andrew@2 396
andrew@2 397 void AudioEventMatcher::newSnareEvent(const double& timeIn){
andrew@6 398 matchNewOnsetEvent(2, timeIn);
andrew@7 399 likelihoodVisualisation[2] = bayesianStruct.likelihood;
andrew@7 400 }
andrew@7 401
andrew@7 402
andrew@7 403 void AudioEventMatcher::newSnareEvent(const int& channel, const double& timeIn){
andrew@7 404 matchNewOnsetEvent(channel, timeIn);
andrew@7 405 likelihoodVisualisation[2] = bayesianStruct.likelihood;
andrew@2 406 }
andrew@2 407
andrew@2 408 //Needs just to set bounds for the matching process, not have TimeIn
andrew@2 409 void AudioEventMatcher::matchNewOnsetEvent(const int& channel, const double& timeIn){
andrew@3 410
andrew@6 411 bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets
andrew@10 412
andrew@2 413 //start at beginning but OPTIMISE later
andrew@15 414
andrew@2 415
andrew@2 416 bayesianStruct.likelihood.offset = bayesianStruct.prior.offset;
andrew@2 417 bayesianStruct.likelihood.zero();//set to zero
andrew@2 418
andrew@2 419 double quantity = 1;//likelihoodToNoiseRatio / numberOfMatches;
andrew@2 420 int numberOfMatchesFound = 0;
andrew@2 421
andrew@2 422
andrew@10 423 double startMatchingTime = bayesianStruct.likelihood.offset;
andrew@10 424 double endMatchingTime = bayesianStruct.likelihood.offset + matchWindowWidth;
andrew@2 425
andrew@2 426 if (channel <= recordedTracks.numberOfAudioTracks){
andrew@2 427 for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){
andrew@2 428 double millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
andrew@10 429 if (millisTime >= startMatchingTime && millisTime <= endMatchingTime){
andrew@14 430 bayesianStruct.likelihood.addGaussianShapeFromRealTime(millisTime, onsetLikelihoodWidth, quantity);
andrew@2 431 numberOfMatchesFound++;
andrew@6 432 // printf("Adding Gaussian for onset at time %f offset %f\n", millisTime, bayesianStruct.likelihood.offset);
andrew@2 433
andrew@2 434 }
andrew@2 435 }
andrew@2 436 }
andrew@2 437
andrew@11 438 if (numberOfMatchesFound > 0){
andrew@3 439 // bayesianStruct.likelihood.addConstant((1-likelihoodToNoiseRatio)/bayesianStruct.likelihood.length);
andrew@3 440 bayesianStruct.likelihood.addConstant(numberOfMatchesFound*(1-onsetLikelihoodToNoise)/(onsetLikelihoodToNoise*bayesianStruct.likelihood.length));
andrew@2 441 bayesianStruct.likelihood.renormalise();
andrew@2 442
andrew@8 443 bayesianStruct.calculatePosterior();
andrew@10 444 lastAlignmentTime = timeIn;//use TIMESTAMP
andrew@10 445 recentEventTime[channel] = timeIn;//ofGetElapsedTimeMillis() - startTime;
andrew@11 446
andrew@11 447 recentPriors[channel] = bayesianStruct.prior;
andrew@13 448 projectedPrior = bayesianStruct.prior;
andrew@19 449
andrew@19 450
andrew@19 451 temporal.updateTempo(channel, timeIn);
andrew@11 452 }
andrew@11 453
andrew@11 454
andrew@6 455
andrew@3 456 }
andrew@3 457
andrew@3 458
andrew@3 459
andrew@3 460 void AudioEventMatcher::matchNewPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){
andrew@3 461 //start at beginning but OPTIMISE later
andrew@10 462 /*printf("TIME %i\n", ofGetElapsedTimeMillis());
andrew@10 463 //tmp debug
andrew@10 464 updateBestAlignmentPosition();
andrew@10 465 printf("current alignment best estimate %f\n", currentAlignmentPosition);
andrew@10 466 */
andrew@6 467 bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets
andrew@8 468
andrew@7 469 //set the lielihoods by matching the pitched note
andrew@7 470
andrew@15 471
andrew@3 472 int numberOfMatches = 0;
andrew@3 473 bayesianStruct.likelihood.zero();//set to zero
andrew@18 474 double newOnsetTime;
andrew@18 475 double closestDistance = INFINITY;
andrew@3 476
andrew@3 477 double quantity = 0;
andrew@3 478 if (channel <= recordedTracks.numberOfAudioTracks){
andrew@3 479 for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){
andrew@3 480
andrew@3 481 if (checkMatch(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn)) {
andrew@18 482 quantity = getPitchDistance(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn, 8);
andrew@18 483
andrew@3 484 bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, 30, quantity);
andrew@3 485 recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = true;
andrew@3 486 numberOfMatches++;
andrew@3 487 }
andrew@3 488 else{
andrew@3 489 recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = false;
andrew@3 490 }
andrew@18 491 //checking nearest pitch
andrew@18 492 newOnsetTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
andrew@18 493 if (abs(newOnsetTime - currentAlignmentPosition) < closestDistance){
andrew@18 494 closestDistance = abs(newOnsetTime - currentAlignmentPosition);
andrew@18 495 pitchOfNearestMatch = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch;
andrew@18 496 distanceOfNearestMatch = quantity;
andrew@18 497 }
andrew@3 498
andrew@3 499 }
andrew@3 500 }
andrew@6 501
andrew@8 502
andrew@8 503
andrew@6 504 if (numberOfMatches > 0){//no point updating unless there is a match
andrew@7 505
andrew@6 506 bayesianStruct.likelihood.addConstant(numberOfMatches*(1-pitchLikelihoodToNoise)/(pitchLikelihoodToNoise*bayesianStruct.likelihood.length));
andrew@4 507
andrew@4 508 //tmp set likelihood constant and calculate using that
andrew@6 509 //bayesianStruct.likelihood.zero();
andrew@6 510 //bayesianStruct.likelihood.addConstant(1);
andrew@7 511
andrew@6 512 bayesianStruct.calculatePosterior();
andrew@11 513 lastAlignmentTime = timeIn;//has to use the STAMPED time
andrew@11 514 recentEventTime[channel] = timeIn;
andrew@11 515
andrew@11 516 recentPriors[channel] = bayesianStruct.prior;
andrew@13 517 projectedPrior = bayesianStruct.prior;
andrew@19 518
andrew@19 519 temporal.eventTimes[channel].push_back(timeIn);
andrew@6 520 }
andrew@4 521
andrew@11 522
andrew@1 523 }
andrew@1 524
andrew@3 525 double AudioEventMatcher::getPitchDistance(const double& pitchOne, const double& pitchTwo, const double& scale){
andrew@3 526
andrew@18 527 double scaleFactor = scale * pitchOne / 110.0;
andrew@16 528
andrew@18 529 int multiplicationFactor = 1;
andrew@18 530 if (pitchTwo > 0){
andrew@18 531 int multiplicationFactor = round(pitchOne/pitchTwo);
andrew@18 532 }
andrew@16 533
andrew@18 534 double distance = abs(pitchOne - pitchTwo*multiplicationFactor);
andrew@16 535 if (distance < scaleFactor)
andrew@16 536 distance = 1 - (distance/scaleFactor);
andrew@3 537 else
andrew@3 538 distance = 0;
andrew@3 539
andrew@3 540 // printf("[pitch distance %f vs %f = %f\n", pitchOne, pitchTwo, distance);
andrew@3 541 return distance;
andrew@3 542
andrew@3 543 }
andrew@3 544
andrew@3 545
andrew@3 546 bool AudioEventMatcher::checkMatch(const double& recordedPitch, const double& livePitch){
andrew@18 547
andrew@18 548 if (livePitch > 0){
andrew@18 549 int multiplicationFactor = (int)(round(recordedPitch/livePitch));
andrew@18 550
andrew@18 551 if (abs(recordedPitch - livePitch * multiplicationFactor) < 16)
andrew@3 552 return true;
andrew@3 553 else
andrew@3 554 return false;
andrew@18 555 }else {
andrew@18 556 return false;
andrew@18 557 }
andrew@18 558
andrew@3 559 }
andrew@3 560
andrew@3 561
andrew@1 562
andrew@1 563 void AudioEventMatcher::windowResized(const int& w, const int& h){
andrew@1 564 recordedTracks.windowResized(w,h);
andrew@3 565 bayesTempoWindow.resized(w,h);
andrew@3 566 bayesPositionWindow.resized(w,h);
andrew@3 567 }
andrew@3 568
andrew@10 569 /*
andrew@10 570
andrew@10 571 void printPosteriorMAPinfo(){ //tmp print stuff
andrew@10 572 printf("New pitch MAP post estimate now %i, ", bayesianStruct.posterior.MAPestimate);
andrew@10 573 double tmp = bayesianStruct.posterior.getMAPestimate();
andrew@10 574 printf(" getting it %f and offset %f == %f ms\n", tmp, bayesianStruct.posterior.offset, bayesianStruct.posterior.getIndexInRealTerms(tmp));
andrew@10 575
andrew@10 576 }
andrew@10 577 */
andrew@3 578