comparison src/AudioEventMatcher.cpp @ 56:4394c9490716 tip

minor changes
author Andrew N Robertson <andrew.robertson@eecs.qmul.ac.uk>
date Mon, 24 Dec 2012 18:58:39 +0000
parents 2eca10a31ae2
children
comparison
equal deleted inserted replaced
55:2eca10a31ae2 56:4394c9490716
13 13
14 14
15 const int matchWindowWidth = 8000;//ms in which to match 15 const int matchWindowWidth = 8000;//ms in which to match
16 16
17 const float pitchCutOff = 16;//within which pitches are even considered 17 const float pitchCutOff = 16;//within which pitches are even considered
18 const double pitchWidth = 12;
18 19
19 bool printInfo = false; 20 bool printInfo = false;
20 21
21 AudioEventMatcher::AudioEventMatcher(){ 22 AudioEventMatcher::AudioEventMatcher(){
22 23
23 ofBackground(0); 24 ofBackground(0);
24 useChromaDotProduct = false;//false for most tests 25 useChromaDotProduct = false;//false for most tests
25
26 printingData = false; 26 printingData = false;
27 updateTempoMethodOn = false;
27 28
28 pitchLikelihoodToNoise = 0.6;//more noise 29 pitchLikelihoodToNoise = 0.6;//more noise
30 pitchLikelihoodWidth = 30;
31
29 chromaLikelihoodToNoise = 0.5;//lower => more noise, higher more weight for events 32 chromaLikelihoodToNoise = 0.5;//lower => more noise, higher more weight for events
30 chromaLikelihoodWidth = 50;//ms round onset event 33 chromaLikelihoodWidth = 50;//ms round onset event
31 34
32 onsetLikelihoodToNoise = 0.2;//0.1 and 10 as to 9/5/12 35 //onsetLikelihoodToNoise = 0.2;//0.1 and 10 as to 9/5/12
33 kickLikelihoodToNoise = 0.3; 36 kickLikelihoodToNoise = 0.3;
34 snareLikelihoodToNoise = 0.1; 37 snareLikelihoodToNoise = 0.1;
35 38
36 39
37 onsetLikelihoodWidth = 6;//in ms 40 onsetLikelihoodWidth = 6;//in ms
221 recordedTracks.updatePosition(); 224 recordedTracks.updatePosition();
222 markerPlaybackPosition = recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.framesToMillis(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.playPositionFrames); 225 markerPlaybackPosition = recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.framesToMillis(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.playPositionFrames);
223 currentAlignmentPosition = markerPlaybackPosition; 226 currentAlignmentPosition = markerPlaybackPosition;
224 } 227 }
225 228
226 updateRecordedTempo(); 229 if (updateTempoMethodOn){
227 temporal.tempoPosterior.addGaussianShape(temporal.tempoPosterior.MAPestimate, temporal.tempoArraySize / 4, 0.5 ); 230 updateRecordedTempo();
231 }
228 } 232 }
229 233
230 void AudioEventMatcher::updateRecordedTempo(){ 234 void AudioEventMatcher::updateRecordedTempo(){
231 //tempo of equivalent recorded position is updated 235 //tempo of equivalent recorded position is updated
232 recordedTempo = getRecordedTempoAtMillis(currentAlignmentPosition); 236
233 237 if (recordedTempoIndex < recordedTempoData.globalTempoTimes.size()){//if for debug
234 double tmpRatio = currentSpeedRatio; 238 recordedTempo = getRecordedTempoAtMillis(currentAlignmentPosition);
235 currentSpeedRatio = temporal.playingTempo / recordedTempo; 239
236 if (currentSpeedRatio != tmpRatio) 240 double tmpRatio = currentSpeedRatio;
237 setSpeedRatioDistribution(currentSpeedRatio); 241
242 currentSpeedRatio = temporal.playingTempo / recordedTempo;
243 if (currentSpeedRatio != tmpRatio)
244 setSpeedRatioDistribution(currentSpeedRatio);
245
238 }//end if to prevent debug crash 246 }//end if to prevent debug crash
239 } 247
240 248 temporal.tempoPosterior.addGaussianShape(temporal.tempoPosterior.MAPestimate, temporal.tempoArraySize / 4, 0.5 );
249
250 }
251
241 double AudioEventMatcher::getRecordedTempoAtMillis(const double& millisPosition){ 252 double AudioEventMatcher::getRecordedTempoAtMillis(const double& millisPosition){
242 if (recordedTempoIndex < recordedTempoData.globalTempoTimes.size()){//if for debug 253
243 while(currentAlignmentPosition < recordedTempoData.globalTempoTimes[recordedTempoIndex] && recordedTempoIndex > 0){ 254 while(currentAlignmentPosition < recordedTempoData.globalTempoTimes[recordedTempoIndex] && recordedTempoIndex > 0){
244 //this loop never used as sequential, so we expewct the laignment time to be ahead of the last recorded tempo point 255 //this loop never used as sequential, so we expect the alignment time to be ahead of the last recorded tempo point
245 //but just in case 256 //but just in case
246 recordedTempoIndex--; 257 recordedTempoIndex--;
247 } 258 }
248 259
249 while(currentAlignmentPosition > recordedTempoData.globalTempoTimes[recordedTempoIndex]){ 260 while(currentAlignmentPosition > recordedTempoData.globalTempoTimes[recordedTempoIndex]){
250 recordedTempoIndex++; 261 recordedTempoIndex++;
251 } 262 }
252
253 263
254 return recordedTempoData.globalTempo[recordedTempoIndex]; 264 return recordedTempoData.globalTempo[recordedTempoIndex];
255 } 265 }
256 266
257 void AudioEventMatcher::updateBestAlignmentPosition(){ 267 void AudioEventMatcher::updateBestAlignmentPosition(){
776 double totalLikelihoodAdded = 0; 786 double totalLikelihoodAdded = 0;
777 if (channel <= recordedTracks.numberOfAudioTracks){ 787 if (channel <= recordedTracks.numberOfAudioTracks){
778 for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){ 788 for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){
779 789
780 if (checkMatch(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn)) { 790 if (checkMatch(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn)) {
781 quantity = getPitchDistance(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn, 12); 791 quantity = getPitchDistance(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn, pitchWidth);
782 792
783 bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, 30, quantity); 793 bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, pitchLikelihoodWidth, quantity);
784 recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = true; 794 recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = true;
785 numberOfMatches++; 795 numberOfMatches++;
786 totalLikelihoodAdded += quantity; 796 totalLikelihoodAdded += quantity;
787 } 797 }
788 else{ 798 else{