annotate src/AudioEventMatcher.cpp @ 46:ba36a1721538

Added abs median calculation, match forwards and backwards paths
author Andrew N Robertson <andrew.robertson@eecs.qmul.ac.uk>
date Tue, 08 May 2012 23:16:00 +0100
parents d23685b9e766
children 5359e2c0b0fb
rev   line source
andrew@0 1 /*
andrew@0 2 * AudioEventMatcher.cpp
andrew@0 3 * MultipleAudioMathcher
andrew@0 4 *
andrew@0 5 * Created by Andrew on 31/01/2012.
andrew@0 6 * Copyright 2012 QMUL. All rights reserved.
andrew@0 7 *
andrew@0 8 */
andrew@0 9
andrew@0 10 #include "AudioEventMatcher.h"
andrew@0 11
andrew@0 12
andrew@39 13 const int matchWindowWidth = 8000;//ms in which to match
andrew@39 14
andrew@32 15 const float pitchCutOff = 16;//within which pitches are even considered
andrew@0 16
andrew@0 17 AudioEventMatcher::AudioEventMatcher(){
andrew@7 18
andrew@35 19 useChromaDotProduct = false;
andrew@15 20
andrew@37 21 printingData = false;
andrew@37 22
andrew@23 23 pitchLikelihoodToNoise = 0.6;//more noise
andrew@32 24 chromaLikelihoodToNoise = 0.5;//lower => more noise, higher more weight for events
andrew@32 25 chromaLikelihoodWidth = 50;//ms round onset event
andrew@16 26
andrew@36 27 onsetLikelihoodToNoise = 0.1;
andrew@17 28 onsetLikelihoodWidth = 10;//in ms
andrew@15 29
andrew@0 30 setArraySizes();
andrew@3 31
andrew@3 32 usingRealTime = false;
andrew@3 33 bayesianStruct.realTimeMode = &usingRealTime;
andrew@7 34 recentPitch = 0;
andrew@8 35 currentAlignmentPosition = 0;
andrew@14 36
andrew@9 37 followingLiveInput = true;
andrew@15 38 startedPlaying = false;
andrew@20 39 recordedTempoIndex = 0;
andrew@39 40
andrew@42 41 bayesianStruct.startingWindowWidth = 100;//matchWindowWidth / 8;
andrew@42 42 bayesianStruct.matchWindowWidth = matchWindowWidth;
andrew@20 43 // temporal.setUpEventTimeMatrix();
andrew@20 44 // recordedTempoData.setUpEventTimeMatrix();
andrew@0 45 }
andrew@0 46
andrew@14 47
andrew@19 48
andrew@19 49
andrew@7 50 void AudioEventMatcher::setWindowDimensions(){
andrew@7 51 double startHeight = recordedTracks.numberOfAudioTracks * recordedTracks.trackScreenHeight;
andrew@7 52 double heightAvailable = 1 - startHeight;
andrew@32 53 heightAvailable /= numberOfChannels;
andrew@7 54
andrew@7 55 bayesPositionWindow.setToRelativeSize(0, startHeight, 1, heightAvailable);
andrew@7 56 bayesLikelihoodWindow.setToRelativeSize(0, startHeight + 1*heightAvailable, 1, heightAvailable);
andrew@7 57 bayesTempoWindow.setToRelativeSize(0, startHeight + 2*heightAvailable, 1, heightAvailable);
andrew@7 58
andrew@7 59
andrew@7 60 }
andrew@0 61
andrew@0 62 void AudioEventMatcher::setArraySizes(){
andrew@0 63 bayesianStruct.resetSpeedSize(200);
andrew@0 64 bayesianStruct.setRelativeSpeedScalar(0.01);
andrew@0 65 bayesianStruct.setSpeedPrior(1.0);
andrew@0 66 bayesianStruct.relativeSpeedPrior.getMaximum();
andrew@0 67
andrew@36 68 float scalarForBayesianDistribution = 2;
andrew@36 69
andrew@36 70 bayesianStruct.resetSize(matchWindowWidth / scalarForBayesianDistribution);
andrew@36 71 bayesianStruct.setPositionDistributionScalar(2);
andrew@0 72
andrew@0 73 }
andrew@0 74
andrew@16 75 void AudioEventMatcher::loadAudioFiles(){
andrew@16 76 recordedTracks.loadTestAudio();
andrew@16 77 synchroniser.fileLengthSamples = recordedTracks.loadedAudioFiles[0].fileLoader.totalNumberOfSamples;
andrew@16 78 printf("synchroniser has %f samples\n", synchroniser.fileLengthSamples);
andrew@20 79
andrew@20 80 calculateRecordedTempoData();
andrew@20 81 printf("\n\nFIRST PASS: FINAL recorded tempo is %f\n", recordedTempoData.playingTempo);
andrew@20 82 setTempoPrior(recordedTempoData.playingTempo);
andrew@20 83 calculateRecordedTempoData();//now calculate again using better prior
andrew@20 84
andrew@20 85 printf("\n\nSECOND PASS: FINAL recorded tempo is %f\n", recordedTempoData.playingTempo);
andrew@20 86 printf("GLOBAL TEMPO of RECORDED FILES\n");
andrew@20 87 recordedTempoData.printTempoTimes();
andrew@20 88 }
andrew@20 89
andrew@20 90 void AudioEventMatcher::setTempoPrior(double tempo){
andrew@20 91 recordedTempoData.zero();
andrew@20 92 recordedTempoData.tempoPosterior.zero();
andrew@20 93 recordedTempoData.tempoPosterior.addGaussianShapeFromRealTime(tempo, 3, 1);
andrew@20 94
andrew@20 95 }
andrew@20 96
andrew@20 97 void AudioEventMatcher::calculateRecordedTempoData(){
andrew@20 98 int indexForOnsets[3];
andrew@20 99 indexForOnsets[0] = 0;
andrew@20 100 indexForOnsets[1] = 0;
andrew@20 101 indexForOnsets[2] = 0;
andrew@20 102 int kickTime, snareTime;
andrew@20 103 while (indexForOnsets[0] < recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.chromaOnsets.size() ||
andrew@20 104 indexForOnsets[2] < recordedTracks.loadedAudioFiles[2].fileLoader.onsetDetect.chromaOnsets.size()) {
andrew@20 105
andrew@20 106 setNextOnsetTime(0, kickTime, &indexForOnsets[0]);
andrew@20 107 setNextOnsetTime(2, snareTime, &indexForOnsets[0]);
andrew@20 108
andrew@20 109 if (kickTime < snareTime){
andrew@20 110 printf("update kick at %i\n", kickTime);
andrew@20 111 recordedTempoData.updateTempo(0, kickTime);
andrew@20 112 printf("recorded tempo is %f\n", recordedTempoData.playingTempo);
andrew@20 113 indexForOnsets[0]++;
andrew@20 114 }else {
andrew@20 115 printf("update snare at %i\n", snareTime);
andrew@20 116 recordedTempoData.updateTempo(2, snareTime);
andrew@20 117 printf("recorded tempo is %f\n", recordedTempoData.playingTempo);
andrew@20 118 indexForOnsets[2]++;
andrew@20 119 }
andrew@20 120 }//end while
andrew@20 121
andrew@20 122
andrew@20 123 }
andrew@20 124
andrew@20 125 void AudioEventMatcher::setNextOnsetTime(const int& channel, int& time, int* indexForOnsets){
andrew@20 126 if (indexForOnsets[channel] < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size()){
andrew@20 127 time = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[indexForOnsets[channel]].millisTime;
andrew@20 128 }
andrew@20 129 else {
andrew@20 130 time = 2147483647;//infinity
andrew@20 131 }
andrew@16 132 }
andrew@16 133
andrew@9 134 void AudioEventMatcher::startPlaying(){
andrew@3 135 bayesianStruct.setStartPlaying();
andrew@8 136 currentAlignmentPosition = 0;
andrew@8 137 startTime = ofGetElapsedTimeMillis();
andrew@11 138
andrew@11 139 projectedPrior = bayesianStruct.prior;
andrew@15 140 startedPlaying = true;
andrew@17 141 synchroniser.reset();
andrew@19 142 temporal.reset();
andrew@17 143
andrew@20 144 recordedTempoIndex = 0;
andrew@20 145 recordedTempo = recordedTempoData.globalTempo[recordedTempoIndex];
andrew@20 146
andrew@20 147 currentSpeedRatio = 1;
andrew@20 148
andrew@21 149 temporal.tempoPosterior.zero();
andrew@36 150 temporal.tempoPosterior.addGaussianShapeFromRealTime(recordedTempo, 2000, 1);
andrew@21 151
andrew@20 152 //SET TEMPO PRIOR for Speed Ratio
andrew@20 153 //the update this
andrew@20 154 setSpeedRatioDistribution(currentSpeedRatio);
andrew@37 155
andrew@37 156 euclideanMaximumDistance = 0;
andrew@37 157
andrew@3 158 //bayesianStruct.posterior.printArray();
andrew@3 159 }
andrew@3 160
andrew@9 161
andrew@20 162 void AudioEventMatcher::setSpeedRatioDistribution(const double& speedRatio){
andrew@39 163 //here is the speed combo actually used
andrew@20 164 bayesianStruct.relativeSpeedPosterior.zero();
andrew@39 165 // bayesianStruct.relativeSpeedPosterior.addToIndex(bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(speedRatio), 1);
andrew@39 166 bayesianStruct.relativeSpeedPosterior.addGaussianShapeFromRealTime(1, 0.1, 3);
andrew@39 167 bayesianStruct.relativeSpeedPosterior.addGaussianShapeFromRealTime(1, 0.02, 2);
andrew@20 168 }
andrew@20 169
andrew@15 170 void AudioEventMatcher::stopPlaying(){
andrew@15 171 startedPlaying = false;
andrew@37 172 //temporal.printEventTimes();
andrew@15 173 }
andrew@15 174
andrew@22 175 void AudioEventMatcher::rescue(){
andrew@22 176 bayesianStruct.posterior.zero();
andrew@22 177 bayesianStruct.posterior.addConstant(1);
andrew@22 178 bayesianStruct.prior.zero();
andrew@22 179 bayesianStruct.prior.addConstant(1);
andrew@22 180 }
andrew@22 181
andrew@9 182 void AudioEventMatcher::updatePosition(){
andrew@19 183
andrew@19 184 if (startedPlaying){
andrew@9 185 if (!followingLiveInput)
andrew@9 186 recordedTracks.updatePosition();
andrew@19 187 else
andrew@9 188 recordedTracks.updatePositionToMillis(currentAlignmentPosition);
andrew@9 189
andrew@20 190 updateBestAlignmentPosition();
andrew@19 191 }
andrew@19 192
andrew@20 193 updateRecordedTempo();
andrew@20 194
andrew@19 195 temporal.tempoPosterior.addGaussianShape(temporal.tempoPosterior.MAPestimate, temporal.tempoArraySize / 4, 0.5 );
andrew@9 196 }
andrew@9 197
andrew@20 198 void AudioEventMatcher::updateRecordedTempo(){
andrew@20 199 //tempo of equivalent recorded position is updated
andrew@37 200 if (recordedTempoIndex < recordedTempoData.globalTempoTimes.size()){//if for debug
andrew@20 201 while(currentAlignmentPosition > recordedTempoData.globalTempoTimes[recordedTempoIndex]){
andrew@20 202 recordedTempoIndex++;
andrew@20 203 }
andrew@20 204 recordedTempo = recordedTempoData.globalTempo[recordedTempoIndex];
andrew@20 205 double tmpRatio = currentSpeedRatio;
andrew@20 206 currentSpeedRatio = temporal.playingTempo / recordedTempo;
andrew@20 207 if (currentSpeedRatio != tmpRatio)
andrew@20 208 setSpeedRatioDistribution(currentSpeedRatio);
andrew@37 209
andrew@37 210 }//end if to prevent debug crash
andrew@20 211 }
andrew@20 212
andrew@8 213 void AudioEventMatcher::updateBestAlignmentPosition(){
andrew@10 214 //THIS DEALS WITH WHERE WE ARE NOW! ON THE SCREEN
andrew@10 215 //DIFFERENT TO WHEN EVENTS COME IN AS THEY ARE TIMESTAMPED - SO EG A PITCH EVENT MAY ARRIVE 16 CHROMA FRAMES LATER - BIG DIFFERENCE
andrew@10 216
andrew@45 217 int newTime = getTimeNow();
andrew@10 218 // double tmp = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);;
andrew@10 219 // double timetmp = (newTime - lastAlignmentTime);
andrew@10 220 // double speedtmp = bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate);
andrew@11 221 // currentAlignmentTime = newTime;
andrew@9 222 currentAlignmentPosition = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);
andrew@10 223 currentAlignmentPosition += (newTime - lastAlignmentTime) * bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate);
andrew@10 224
andrew@16 225
andrew@17 226 synchroniser.updateRecordedPosition(currentAlignmentPosition, newTime);
andrew@16 227
andrew@16 228 synchroniser.updateOutputSpeed();
andrew@16 229
andrew@11 230 bayesianStruct.projectDistribution(newTime, currentAlignmentPosition, projectedPrior);//prior gets updated to where we are now
andrew@32 231
andrew@32 232 // printf("updateBestAlignment:: alignment %i:: %i\n", newTime, (int) currentAlignmentPosition);
andrew@11 233
andrew@10 234 // printf("ALIGN pos %f time diff %f (now %f , last %f)speed %f :: ALIGN BEST %f\n", tmp, timetmp, (double)ofGetElapsedTimeMillis(), lastAlignmentTime, speedtmp, currentAlignmentPosition);
andrew@8 235 }
andrew@8 236
andrew@45 237 int AudioEventMatcher::getTimeNow(){
andrew@45 238 return ofGetElapsedTimeMillis() - startTime;
andrew@45 239 }
andrew@45 240
andrew@45 241
andrew@0 242 void AudioEventMatcher::draw(){
andrew@32 243
andrew@32 244 //MAIN DRAW FUNCTION FOR ALL
andrew@32 245
andrew@6 246 //draw some outlines in blue
andrew@3 247 ofSetColor(20,200,200);
andrew@39 248 // bayesPositionWindow.drawOutline();
andrew@39 249 // bayesTempoWindow.drawOutline();
andrew@0 250
andrew@6 251 //draw the scrolling audio tracks
andrew@1 252 recordedTracks.drawTracks();
andrew@7 253
andrew@2 254 ofSetColor(255);
andrew@2 255 // bayesianStruct.relativeSpeedPrior.drawVector(0, 200, bayesTempoWindow);
andrew@9 256
andrew@9 257 setScreenDisplayTimes();
andrew@6 258 drawBayesianDistributions();
andrew@8 259
andrew@11 260 //bayesianStruct.posterior.drawVector(0, bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow);
andrew@6 261 //bayesianStruct.posterior.drawVector(bayesianStruct.posterior.getRealTermsAsIndex(0), bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow);
andrew@11 262 //bayesianStruct.relativeSpeedPosterior.drawVector(0, bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(2), bayesTempoWindow);
andrew@9 263
andrew@20 264 temporal.drawTempoArray(bayesLikelihoodWindow);
andrew@20 265
andrew@20 266 drawRecordedTempo();
andrew@20 267 drawPlayingTempo();
andrew@20 268
andrew@45 269 drawAlignmentTimes();
andrew@45 270
andrew@20 271
andrew@6 272 }
andrew@20 273
andrew@20 274 void AudioEventMatcher::drawRecordedTempo(){
andrew@6 275
andrew@21 276 int xTempoIndex = ofGetWidth() * (double)(recordedTempo - recordedTempoData.minimumTempoInterval)/(double)(recordedTempoData.maximumTempoInterval - recordedTempoData.minimumTempoInterval);
andrew@20 277 ofSetColor(0, 200, 0);
andrew@20 278 ofLine(xTempoIndex, bayesLikelihoodWindow.y, xTempoIndex, bayesLikelihoodWindow.y + bayesLikelihoodWindow.height);
andrew@20 279 ofDrawBitmapString(ofToString(recordedTempo), xTempoIndex, bayesLikelihoodWindow.y + 10);
andrew@20 280 }
andrew@20 281
andrew@20 282 void AudioEventMatcher::drawPlayingTempo(){
andrew@21 283 //purple line for MAP estimate of new intervals
andrew@21 284 int xTempoIndex = (double)(ofGetWidth() * (temporal.playingTempo - temporal.minimumTempoInterval))/(double)(temporal.maximumTempoInterval - temporal.minimumTempoInterval);
andrew@20 285 ofSetColor(200, 0, 200);
andrew@20 286 ofLine(xTempoIndex, bayesLikelihoodWindow.y, xTempoIndex, bayesLikelihoodWindow.y + bayesLikelihoodWindow.height);
andrew@21 287 ofDrawBitmapString(ofToString(temporal.playingTempo), xTempoIndex, bayesLikelihoodWindow.y + 10);
andrew@20 288
andrew@21 289 //red line where the ratio is between playing tempo and recorded one
andrew@20 290 int xSpeedRatioIndex = (double)(temporal.tempoPosterior.getIndexInRealTerms(currentSpeedRatio)*ofGetWidth())/(double)temporal.tempoPosterior.arraySize;
andrew@20 291 ofSetColor(200,0,0);
andrew@20 292 ofLine(xSpeedRatioIndex, bayesTempoWindow.y, xSpeedRatioIndex, bayesTempoWindow.y + bayesTempoWindow.height);
andrew@21 293 string tmpString = "playing "+ofToString(temporal.playingTempo);
andrew@21 294 tmpString += ", recorded "+ofToString(recordedTempo);
andrew@21 295 tmpString += " ratio "+ofToString(currentSpeedRatio);
andrew@21 296 ofSetColor(155,155,155);
andrew@21 297 ofDrawBitmapString(tmpString, 20, bayesTempoWindow.y+10);
andrew@20 298
andrew@20 299 }
andrew@20 300
andrew@20 301
andrew@9 302 void AudioEventMatcher::setScreenDisplayTimes(){
andrew@9 303 screenWidthMillis = recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.framesToMillis(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.amplitudeNumber);
andrew@9 304 // if (!followingLiveInput){
andrew@9 305
andrew@9 306 screenStartTimeMillis = recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.framesToMillis(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.drawParams.windowStartFrame);
andrew@9 307 screenEndTimeMillis = screenStartTimeMillis + screenWidthMillis;
andrew@9 308
andrew@9 309 //need PRECISION in this alignment
andrew@9 310
andrew@9 311
andrew@9 312 /*}else{
andrew@9 313
andrew@9 314 screenStartTimeMillis = (int)(currentAlignmentPosition/screenWidthMillis) * screenWidthMillis;
andrew@9 315 screenEndTimeMillis = screenStartTimeMillis + screenWidthMillis;
andrew@9 316 }*/
andrew@9 317 }
andrew@9 318
andrew@6 319 void AudioEventMatcher::drawBayesianDistributions(){
andrew@6 320
andrew@32 321
andrew@32 322 drawPositionWindow();
andrew@4 323
andrew@8 324 // bayesianStruct.likelihood.drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), bayesLikelihoodWindow);
andrew@2 325
andrew@6 326 bayesianStruct.relativeSpeedPosterior.drawConstrainedVector(0, bayesianStruct.relativeSpeedPosterior.arraySize, 0, ofGetWidth(), bayesTempoWindow);
andrew@32 327
andrew@6 328
andrew@32 329 drawTrackLikelihoods();
andrew@32 330
andrew@32 331 // int priorStartIndex = bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis);
andrew@32 332 // int priorEndIndex = bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis);
andrew@32 333 // ofSetColor(0,200,200);//recent prior
andrew@32 334 // recentPrior.drawConstrainedVector(priorStartIndex, priorEndIndex, 0, ofGetWidth(), bayesPositionWindow);
andrew@32 335
andrew@32 336 drawInfo();
andrew@32 337
andrew@3 338
andrew@32 339 }
andrew@32 340
andrew@32 341 void AudioEventMatcher::drawPositionWindow(){
andrew@32 342 int startIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenStartTimeMillis);
andrew@32 343 int endIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenEndTimeMillis);
andrew@32 344 string tmpString = "start "+ofToString(screenStartTimeMillis)+" (index "+ofToString(startIndex)+"), end "+ofToString(screenEndTimeMillis);
andrew@32 345 ofDrawBitmapString(tmpString, bayesPositionWindow.x+20, bayesPositionWindow.y+20);
andrew@32 346
andrew@32 347 //draw posterior in the bayes position window
andrew@32 348 ofSetColor(255,0,255);
andrew@32 349 bayesianStruct.posterior.drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), bayesPositionWindow);
andrew@3 350
andrew@9 351 //green line at current best estimate
andrew@13 352 ofSetColor(0,255,0);//green scrolling line best position
andrew@8 353 double currentEstimateIndex = (currentAlignmentPosition - screenStartTimeMillis)*ofGetWidth()/screenWidthMillis;
andrew@8 354 ofLine(currentEstimateIndex, bayesPositionWindow.y, currentEstimateIndex, bayesPositionWindow.y + bayesPositionWindow.height);
andrew@7 355
andrew@32 356
andrew@16 357 ofSetColor(0,255,255);//synchroniser position
andrew@16 358 currentEstimateIndex = (synchroniser.playingPositionMillis - screenStartTimeMillis)*ofGetWidth()/screenWidthMillis;
andrew@16 359 ofLine(currentEstimateIndex, bayesLikelihoodWindow.y, currentEstimateIndex, bayesLikelihoodWindow.y + bayesPositionWindow.height);
andrew@32 360
andrew@32 361 ofSetColor(255,0,100);//purple prior
andrew@32 362 bayesianStruct.prior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
andrew@16 363
andrew@32 364 ofSetColor(255,0,0);//projected prior in red
andrew@32 365 projectedPrior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
andrew@16 366
andrew@37 367 //draw pitch
andrew@37 368 ofSetColor(0,100,255);
andrew@37 369 int index = getScreenWidthIndexOfEventTime(recentPitchEventTime);
andrew@37 370 //this window would be used (recordedTracks.loadedAudioFiles[1].fileLoader.onsetDetect.window);
andrew@16 371
andrew@32 372
andrew@32 373 }
andrew@32 374
andrew@37 375 int AudioEventMatcher::getScreenWidthIndexOfEventTime(const double& time){
andrew@37 376 return (time - screenStartTimeMillis)*ofGetWidth()/screenWidthMillis;
andrew@37 377 }
andrew@37 378
andrew@32 379 void AudioEventMatcher::drawTrackLikelihoods(){
andrew@7 380 //draw track by track likelihoods
andrew@7 381 for (int i = 0; i <recordedTracks.numberOfAudioTracks;i++){
andrew@13 382 ofSetColor(200,255,50);//channel likelihoods in yellow
andrew@8 383 likelihoodVisualisation[i].drawConstrainedVector(likelihoodVisualisation[i].getRealTermsAsIndex(screenStartTimeMillis), likelihoodVisualisation[i].getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window);
andrew@11 384
andrew@13 385 ofSetColor(0,255,150);//channel priors
andrew@11 386 recentPriors[i].drawConstrainedVector(recentPriors[i].getRealTermsAsIndex(screenStartTimeMillis), recentPriors[i].getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window);
andrew@11 387
andrew@11 388
andrew@8 389 ofSetColor(255);
andrew@8 390 ofDrawBitmapString("recent event "+ofToString(recentEventTime[i]), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.x + 20, recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.y + recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.height - 10);
andrew@7 391 }
andrew@32 392 }
andrew@8 393
andrew@8 394
andrew@32 395 void AudioEventMatcher::drawInfo(){
andrew@32 396 string tmpStr = "zero is "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(0));
andrew@32 397 tmpStr += " offsetis "+ofToString(bayesianStruct.posterior.offset);
andrew@32 398 tmpStr += " screenWidth = "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis));
andrew@32 399 ofDrawBitmapString(tmpStr, 20,140);
andrew@32 400 tmpStr = "best est "+ofToString(bayesianStruct.bestEstimate);
andrew@32 401 ofDrawBitmapString(tmpStr, 20, 180);
andrew@45 402 //ofDrawBitmapString("screenwidth "+ofToString(screenWidthMillis), 20, 800);
andrew@11 403
andrew@32 404 ofSetColor(255);
andrew@32 405 tmpStr = "pitch "+ofToString(recentPitch, 2);
andrew@32 406 tmpStr += " Nearest "+ofToString(pitchOfNearestMatch,2);
andrew@32 407 tmpStr += " dist "+ofToString(distanceOfNearestMatch, 2);
andrew@37 408 tmpStr += ", Time "+ofToString(recentPitchEventTime, 0);
andrew@32 409 ofDrawBitmapString(tmpStr, 20, 20);
andrew@7 410
andrew@39 411 string alignString = "align "+ofToString(currentAlignmentPosition, 2);//same as synchroniser-recordedposition
andrew@32 412 alignString += " playing "+ofToString(synchroniser.playingPositionRatio, 5);
andrew@39 413 alignString += " pos "+ofToString(synchroniser.playingPositionMillis,0)+" ms";//playing position in file - causal correction
andrew@39 414 alignString += " rec pos "+ofToString(synchroniser.recordedPositionMillis,0)+" ms";//currentAlignmentPosition in rehearsal
andrew@39 415 alignString += "playing time "+ofToString(synchroniser.recordedPositionTimeSent, 0)+" ms";//playing time since begining of live take
andrew@32 416 ofDrawBitmapString(alignString, 20, 50);
andrew@32 417 ofDrawBitmapString("pos "+ofToString(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.playPosition), 200,600);
andrew@19 418
andrew@1 419 }
andrew@1 420
andrew@45 421
andrew@45 422 void AudioEventMatcher::drawAlignmentTimes(){
andrew@45 423 ofSetColor(255);
andrew@45 424 std::string dataString = "Live time "+ofToString(synchroniser.recordedPositionTimeSent);
andrew@45 425 dataString += ", Reh time "+ofToString(synchroniser.recordedPositionMillis);
andrew@45 426 ofDrawBitmapString(dataString, 10, ofGetHeight() - 20);
andrew@45 427
andrew@45 428 }
andrew@45 429
andrew@45 430
andrew@6 431 void AudioEventMatcher::newPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){
andrew@7 432 if (pitchIn > 0){
andrew@1 433 liveInput.addPitchEvent(pitchIn, timeIn);
andrew@4 434
andrew@10 435 //printPosteriorMAPinfo();
andrew@11 436
andrew@7 437 matchNewPitchEvent(channel, pitchIn, timeIn);//main pitch matching fn
andrew@7 438
andrew@7 439 likelihoodVisualisation[1] = bayesianStruct.likelihood;
andrew@7 440
andrew@7 441 recentPitch = pitchIn;//for drawing
andrew@37 442 recentPitchEventTime = timeIn;
andrew@7 443 }
andrew@32 444 }
andrew@32 445
andrew@32 446
andrew@32 447 void AudioEventMatcher::newChromaEvent(const int& channel, float* chromaIn, const double& timeIn){
andrew@32 448
andrew@32 449 // could add event to the liveInput list? as in pitch event
andrew@37 450 if (printingData){
andrew@37 451 printf("match chroma channel %i\n", channel);
andrew@37 452 for (int i = 0;i < 12;i++){
andrew@34 453 printf("chroma in[%i] = %f\n", i, chromaIn[i]);
andrew@37 454 }
andrew@34 455 }
andrew@34 456
andrew@32 457 matchNewChromaEvent(channel, chromaIn, timeIn);//main pitch matching fn
andrew@32 458
andrew@32 459 likelihoodVisualisation[channel] = bayesianStruct.likelihood;
andrew@32 460
andrew@8 461
andrew@2 462 }
andrew@2 463
andrew@32 464
andrew@6 465 void AudioEventMatcher::newKickEvent(const double& timeIn){
andrew@6 466 // liveInput.addKickEvent(timeIn);
andrew@2 467 matchNewOnsetEvent(0, timeIn);
andrew@7 468 likelihoodVisualisation[0] = bayesianStruct.likelihood;
andrew@2 469 }
andrew@2 470
andrew@6 471 void AudioEventMatcher::newKickEvent(const int& channel, const double& timeIn){
andrew@6 472 // liveInput.addKickEvent(timeIn);
andrew@6 473 matchNewOnsetEvent(channel, timeIn);
andrew@7 474 likelihoodVisualisation[0] = bayesianStruct.likelihood;
andrew@6 475 }
andrew@6 476
andrew@2 477
andrew@2 478 void AudioEventMatcher::newSnareEvent(const double& timeIn){
andrew@6 479 matchNewOnsetEvent(2, timeIn);
andrew@7 480 likelihoodVisualisation[2] = bayesianStruct.likelihood;
andrew@7 481 }
andrew@7 482
andrew@7 483
andrew@7 484 void AudioEventMatcher::newSnareEvent(const int& channel, const double& timeIn){
andrew@7 485 matchNewOnsetEvent(channel, timeIn);
andrew@7 486 likelihoodVisualisation[2] = bayesianStruct.likelihood;
andrew@2 487 }
andrew@2 488
andrew@2 489 //Needs just to set bounds for the matching process, not have TimeIn
andrew@2 490 void AudioEventMatcher::matchNewOnsetEvent(const int& channel, const double& timeIn){
andrew@3 491
andrew@6 492 bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets
andrew@10 493
andrew@2 494 //start at beginning but OPTIMISE later
andrew@2 495 bayesianStruct.likelihood.offset = bayesianStruct.prior.offset;
andrew@2 496 bayesianStruct.likelihood.zero();//set to zero
andrew@36 497 //double quantity = 1;//
andrew@36 498 double quantity = 1*onsetLikelihoodToNoise;//BETTER CHANGE THIS BACK TOO..see below//likelihoodToNoiseRatio / numberOfMatches;
andrew@2 499 int numberOfMatchesFound = 0;
andrew@45 500 double nearestOnsetDistance = 1000;
andrew@10 501 double startMatchingTime = bayesianStruct.likelihood.offset;
andrew@10 502 double endMatchingTime = bayesianStruct.likelihood.offset + matchWindowWidth;
andrew@32 503 double millisTime = -1*INFINITY;//or 0 is fine
andrew@32 504 int checkIndex = 0;
andrew@36 505 if (channel <= recordedTracks.numberOfAudioTracks && checkIndex < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size()){
andrew@32 506 while (millisTime < startMatchingTime) {
andrew@32 507 millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[checkIndex].millisTime;
andrew@32 508 checkIndex++;
andrew@32 509 }
andrew@32 510 for (int i = checkIndex;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size() && millisTime <= endMatchingTime;i++){
andrew@32 511 millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
andrew@10 512 if (millisTime >= startMatchingTime && millisTime <= endMatchingTime){
andrew@14 513 bayesianStruct.likelihood.addGaussianShapeFromRealTime(millisTime, onsetLikelihoodWidth, quantity);
andrew@2 514 numberOfMatchesFound++;
andrew@6 515 // printf("Adding Gaussian for onset at time %f offset %f\n", millisTime, bayesianStruct.likelihood.offset);
andrew@45 516 if (fabs(currentAlignmentPosition - millisTime) < nearestOnsetDistance)
andrew@45 517 nearestOnsetDistance = currentAlignmentPosition - millisTime;
andrew@32 518 }//end if within limits (changed so it now is 4 sure)
andrew@2 519 }
andrew@2 520 }
andrew@2 521
andrew@11 522 if (numberOfMatchesFound > 0){
andrew@3 523 // bayesianStruct.likelihood.addConstant((1-likelihoodToNoiseRatio)/bayesianStruct.likelihood.length);
andrew@36 524 // bayesianStruct.likelihood.addConstant(numberOfMatchesFound*(1-onsetLikelihoodToNoise)/(onsetLikelihoodToNoise*bayesianStruct.likelihood.length));
andrew@36 525 bayesianStruct.likelihood.addConstant(numberOfMatchesFound*(1-onsetLikelihoodToNoise)/(bayesianStruct.likelihood.length));//BETTER CHANGE THIS BACK...
andrew@2 526 bayesianStruct.likelihood.renormalise();
andrew@2 527
andrew@8 528 bayesianStruct.calculatePosterior();
andrew@10 529 lastAlignmentTime = timeIn;//use TIMESTAMP
andrew@10 530 recentEventTime[channel] = timeIn;//ofGetElapsedTimeMillis() - startTime;
andrew@11 531
andrew@11 532 recentPriors[channel] = bayesianStruct.prior;
andrew@13 533 projectedPrior = bayesianStruct.prior;
andrew@19 534
andrew@19 535
andrew@19 536 temporal.updateTempo(channel, timeIn);
andrew@11 537 }
andrew@45 538 int timeNow = getTimeNow();
andrew@11 539
andrew@45 540 printf("Nearest onset is %.1f time is %i and alignemnt %i time now %i\n", nearestOnsetDistance, (int) timeIn, (int)currentAlignmentPosition, timeNow);
andrew@3 541 }
andrew@3 542
andrew@3 543
andrew@3 544
andrew@3 545 void AudioEventMatcher::matchNewPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){
andrew@3 546 //start at beginning but OPTIMISE later
andrew@10 547 /*printf("TIME %i\n", ofGetElapsedTimeMillis());
andrew@10 548 //tmp debug
andrew@10 549 updateBestAlignmentPosition();
andrew@10 550 printf("current alignment best estimate %f\n", currentAlignmentPosition);
andrew@10 551 */
andrew@6 552 bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets
andrew@8 553
andrew@7 554 //set the lielihoods by matching the pitched note
andrew@7 555
andrew@15 556
andrew@3 557 int numberOfMatches = 0;
andrew@3 558 bayesianStruct.likelihood.zero();//set to zero
andrew@18 559 double newOnsetTime;
andrew@18 560 double closestDistance = INFINITY;
andrew@3 561
andrew@3 562 double quantity = 0;
andrew@32 563 double totalLikelihoodAdded = 0;
andrew@3 564 if (channel <= recordedTracks.numberOfAudioTracks){
andrew@3 565 for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){
andrew@3 566
andrew@3 567 if (checkMatch(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn)) {
andrew@32 568 quantity = getPitchDistance(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn, 12);
andrew@18 569
andrew@3 570 bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, 30, quantity);
andrew@3 571 recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = true;
andrew@3 572 numberOfMatches++;
andrew@32 573 totalLikelihoodAdded += quantity;
andrew@3 574 }
andrew@3 575 else{
andrew@3 576 recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = false;
andrew@3 577 }
andrew@18 578 //checking nearest pitch
andrew@18 579 newOnsetTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
andrew@18 580 if (abs(newOnsetTime - currentAlignmentPosition) < closestDistance){
andrew@18 581 closestDistance = abs(newOnsetTime - currentAlignmentPosition);
andrew@18 582 pitchOfNearestMatch = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch;
andrew@18 583 distanceOfNearestMatch = quantity;
andrew@18 584 }
andrew@3 585
andrew@3 586 }
andrew@3 587 }
andrew@6 588
andrew@8 589
andrew@8 590
andrew@37 591 if (numberOfMatches > 0 && totalLikelihoodAdded > 0){//no point updating unless there is a match
andrew@32 592 //replacing numberOfMatches with totalLike below...
andrew@37 593 //bug here was that if totaladded = 0, we add then zero likelihood
andrew@37 594 bayesianStruct.likelihood.addConstant(totalLikelihoodAdded*(1-pitchLikelihoodToNoise)/(bayesianStruct.likelihood.length));
andrew@37 595 // bayesianStruct.likelihood.addConstant(totalLikelihoodAdded*(1-pitchLikelihoodToNoise)/(pitchLikelihoodToNoise*bayesianStruct.likelihood.length));
andrew@4 596
andrew@4 597 //tmp set likelihood constant and calculate using that
andrew@6 598 //bayesianStruct.likelihood.zero();
andrew@6 599 //bayesianStruct.likelihood.addConstant(1);
andrew@7 600
andrew@6 601 bayesianStruct.calculatePosterior();
andrew@11 602 lastAlignmentTime = timeIn;//has to use the STAMPED time
andrew@11 603 recentEventTime[channel] = timeIn;
andrew@11 604
andrew@11 605 recentPriors[channel] = bayesianStruct.prior;
andrew@13 606 projectedPrior = bayesianStruct.prior;
andrew@19 607
andrew@19 608 temporal.eventTimes[channel].push_back(timeIn);
andrew@6 609 }
andrew@4 610
andrew@11 611
andrew@1 612 }
andrew@1 613
andrew@3 614 double AudioEventMatcher::getPitchDistance(const double& pitchOne, const double& pitchTwo, const double& scale){
andrew@3 615
andrew@18 616 double scaleFactor = scale * pitchOne / 110.0;
andrew@16 617
andrew@18 618 int multiplicationFactor = 1;
andrew@18 619 if (pitchTwo > 0){
andrew@32 620 multiplicationFactor = round(pitchOne/pitchTwo);
andrew@18 621 }
andrew@16 622
andrew@18 623 double distance = abs(pitchOne - pitchTwo*multiplicationFactor);
andrew@16 624 if (distance < scaleFactor)
andrew@16 625 distance = 1 - (distance/scaleFactor);
andrew@3 626 else
andrew@3 627 distance = 0;
andrew@3 628
andrew@32 629 //printf("[pitch distance %f vs %f, factor %i = %f\n", pitchOne, pitchTwo, multiplicationFactor, distance);
andrew@3 630 return distance;
andrew@3 631
andrew@3 632 }
andrew@3 633
andrew@3 634
andrew@3 635 bool AudioEventMatcher::checkMatch(const double& recordedPitch, const double& livePitch){
andrew@18 636
andrew@18 637 if (livePitch > 0){
andrew@18 638 int multiplicationFactor = (int)(round(recordedPitch/livePitch));
andrew@18 639
andrew@32 640 if (abs(recordedPitch - livePitch * multiplicationFactor) < pitchCutOff)
andrew@3 641 return true;
andrew@3 642 else
andrew@3 643 return false;
andrew@18 644 }else {
andrew@18 645 return false;
andrew@18 646 }
andrew@18 647
andrew@3 648 }
andrew@3 649
andrew@3 650
andrew@32 651 void AudioEventMatcher::matchNewChromaEvent(const int& channel, float* chromaIn, const double& timeIn){
andrew@32 652 //start at beginning but OPTIMISE later
andrew@32 653
andrew@32 654 bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets
andrew@32 655
andrew@32 656 //set the likelihoods by matching the pitched note
andrew@32 657
andrew@32 658 int numberOfMatches = 0;
andrew@32 659 bayesianStruct.likelihood.zero();//set to zero
andrew@32 660 double newOnsetTime;
andrew@32 661 double closestDistance = INFINITY;
andrew@32 662
andrew@32 663 double quantity = 1;
andrew@32 664 double totalLikelihoodAdded = 0;
andrew@32 665
andrew@32 666 double startMatchingTime = bayesianStruct.likelihood.offset;
andrew@32 667 double endMatchingTime = bayesianStruct.likelihood.offset + matchWindowWidth;
andrew@32 668 double millisTime = -1*INFINITY;//or 0 is fine
andrew@32 669
andrew@32 670 int checkIndex = 0;
andrew@37 671 if (channel <= recordedTracks.numberOfAudioTracks && checkIndex < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size()){
andrew@37 672
andrew@32 673 while (millisTime < startMatchingTime) {
andrew@32 674 millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[checkIndex].millisTime;
andrew@32 675 checkIndex++;
andrew@32 676 }//go up to where we need to check from fast
andrew@32 677
andrew@32 678 for (int i = checkIndex;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size() && millisTime <= endMatchingTime;i++){
andrew@32 679 millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
andrew@32 680
andrew@32 681 if (millisTime >= startMatchingTime && millisTime <= endMatchingTime){
andrew@35 682
andrew@35 683 if (useChromaDotProduct)
andrew@35 684 quantity = getChromaDotProductDistance(chromaIn, &recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].chromaValues[0]);
andrew@35 685 else
andrew@35 686 quantity = getChromaEuclideanDistance(chromaIn, &recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].chromaValues[0]);
andrew@35 687
andrew@35 688
andrew@32 689 bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, chromaLikelihoodWidth, quantity);
andrew@32 690
andrew@32 691 // bayesianStruct.likelihood.addGaussianShapeFromRealTime(millisTime, onsetLikelihoodWidth, quantity);
andrew@32 692 numberOfMatches++;
andrew@32 693 totalLikelihoodAdded += quantity;
andrew@37 694
andrew@37 695 //printf("Adding CHROMA Gaussian for onset at time %.1f dist %.3f\n", millisTime, quantity);
andrew@32 696
andrew@32 697 }//end if within limits (changed so it now is 4 sure)
andrew@32 698 }
andrew@32 699 }
andrew@32 700
andrew@32 701
andrew@37 702 if (numberOfMatches > 0 && totalLikelihoodAdded > 0){//no point updating unless there is a match
andrew@32 703 //replacing numberOfMatches with totalLike below...
andrew@32 704
andrew@32 705 printf("CHROMA HAS %i MATCHES\n", numberOfMatches);
andrew@32 706
andrew@37 707 bayesianStruct.likelihood.addConstant(totalLikelihoodAdded*(1-chromaLikelihoodToNoise)/(bayesianStruct.likelihood.length));
andrew@37 708 //previous way
andrew@37 709 // bayesianStruct.likelihood.addConstant(totalLikelihoodAdded*(1-chromaLikelihoodToNoise)/(chromaLikelihoodToNoise*bayesianStruct.likelihood.length));
andrew@32 710
andrew@32 711 bayesianStruct.calculatePosterior();
andrew@32 712 lastAlignmentTime = timeIn;//has to use the STAMPED time
andrew@32 713 recentEventTime[channel] = timeIn;
andrew@32 714
andrew@32 715 recentPriors[channel] = bayesianStruct.prior;
andrew@32 716 projectedPrior = bayesianStruct.prior;
andrew@32 717
andrew@32 718 temporal.eventTimes[channel].push_back(timeIn);
andrew@32 719 }
andrew@32 720
andrew@32 721 }
andrew@32 722
andrew@32 723
andrew@35 724 double AudioEventMatcher::getChromaDotProductDistance(float* chromaOne, float* chromaTwo){
andrew@32 725 double distance = 0;
andrew@32 726 double total = 0;
andrew@32 727 for (int i = 0;i < 12;i++){
andrew@32 728 distance += chromaOne[i]*chromaTwo[i];
andrew@32 729 total += chromaOne[i]*chromaOne[i] + (chromaTwo[i]*chromaTwo[i]);
andrew@32 730 }
andrew@32 731
andrew@35 732 if (total > 0)
andrew@35 733 distance /= sqrt(total);
andrew@35 734
andrew@35 735 return distance;
andrew@35 736 }
andrew@35 737
andrew@35 738 double AudioEventMatcher::getChromaEuclideanDistance(float* chromaOne, float* chromaTwo){
andrew@35 739 double distance = 0;
andrew@35 740 double total = 0;
andrew@37 741
andrew@35 742 // printf("\n");
andrew@35 743 for (int i = 0;i < 12;i++){
andrew@35 744 total += (chromaOne[i] - chromaTwo[i])*(chromaOne[i] - chromaTwo[i]);
andrew@35 745 // printf("chroma1: %.2f; chroma2: %.2f\n", chromaOne[i], chromaTwo[i]);
andrew@35 746 // total += chromaOne[i]*chromaOne[i] + (chromaTwo[i]*chromaTwo[i]);
andrew@35 747 }
andrew@35 748
andrew@37 749 if (total > euclideanMaximumDistance)
andrew@37 750 euclideanMaximumDistance = total;
andrew@37 751
andrew@37 752 distance = ((euclideanMaximumDistance - total)/ euclideanMaximumDistance);//i.e. 1 is
andrew@37 753
andrew@37 754 // if (total > 0)
andrew@37 755
andrew@37 756
andrew@37 757 // distance = 1.0/sqrt(total);
andrew@35 758 // printf("DISTANCE : %.3f\n", distance);
andrew@32 759 return distance;
andrew@32 760 }
andrew@1 761
andrew@1 762 void AudioEventMatcher::windowResized(const int& w, const int& h){
andrew@1 763 recordedTracks.windowResized(w,h);
andrew@3 764 bayesTempoWindow.resized(w,h);
andrew@3 765 bayesPositionWindow.resized(w,h);
andrew@3 766 }
andrew@3 767
andrew@10 768 /*
andrew@10 769
andrew@10 770 void printPosteriorMAPinfo(){ //tmp print stuff
andrew@10 771 printf("New pitch MAP post estimate now %i, ", bayesianStruct.posterior.MAPestimate);
andrew@10 772 double tmp = bayesianStruct.posterior.getMAPestimate();
andrew@10 773 printf(" getting it %f and offset %f == %f ms\n", tmp, bayesianStruct.posterior.offset, bayesianStruct.posterior.getIndexInRealTerms(tmp));
andrew@10 774
andrew@10 775 }
andrew@10 776 */
andrew@3 777