Mercurial > hg > multitrack-audio-matcher
comparison src/AudioEventMatcher.cpp @ 39:f5de07b4d733
helped the tempo prior to have wider shape
author | Andrew N Robertson <andrew.robertson@eecs.qmul.ac.uk> |
---|---|
date | Tue, 24 Apr 2012 23:57:10 +0100 |
parents | 9806a4f22fd0 |
children | 6a7982661703 |
comparison
equal
deleted
inserted
replaced
38:9c803369b229 | 39:f5de07b4d733 |
---|---|
8 */ | 8 */ |
9 | 9 |
10 #include "AudioEventMatcher.h" | 10 #include "AudioEventMatcher.h" |
11 | 11 |
12 | 12 |
13 const int matchWindowWidth = 8000; | 13 const int matchWindowWidth = 8000;//ms in which to match |
14 | |
14 const float pitchCutOff = 16;//within which pitches are even considered | 15 const float pitchCutOff = 16;//within which pitches are even considered |
15 | 16 |
16 AudioEventMatcher::AudioEventMatcher(){ | 17 AudioEventMatcher::AudioEventMatcher(){ |
17 | 18 |
18 useChromaDotProduct = false; | 19 useChromaDotProduct = false; |
34 currentAlignmentPosition = 0; | 35 currentAlignmentPosition = 0; |
35 | 36 |
36 followingLiveInput = true; | 37 followingLiveInput = true; |
37 startedPlaying = false; | 38 startedPlaying = false; |
38 recordedTempoIndex = 0; | 39 recordedTempoIndex = 0; |
40 | |
41 bayesianStruct.startingWindowWidth = matchWindowWidth / 4; | |
39 // temporal.setUpEventTimeMatrix(); | 42 // temporal.setUpEventTimeMatrix(); |
40 // recordedTempoData.setUpEventTimeMatrix(); | 43 // recordedTempoData.setUpEventTimeMatrix(); |
41 } | 44 } |
42 | 45 |
43 | 46 |
154 //bayesianStruct.posterior.printArray(); | 157 //bayesianStruct.posterior.printArray(); |
155 } | 158 } |
156 | 159 |
157 | 160 |
158 void AudioEventMatcher::setSpeedRatioDistribution(const double& speedRatio){ | 161 void AudioEventMatcher::setSpeedRatioDistribution(const double& speedRatio){ |
162 //here is the speed combo actually used | |
159 bayesianStruct.relativeSpeedPosterior.zero(); | 163 bayesianStruct.relativeSpeedPosterior.zero(); |
160 bayesianStruct.relativeSpeedPosterior.addToIndex(bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(speedRatio), 1); | 164 // bayesianStruct.relativeSpeedPosterior.addToIndex(bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(speedRatio), 1); |
161 bayesianStruct.relativeSpeedPosterior.addGaussianShapeFromRealTime(1, 0.06, 0.8); | 165 bayesianStruct.relativeSpeedPosterior.addGaussianShapeFromRealTime(1, 0.1, 3); |
166 bayesianStruct.relativeSpeedPosterior.addGaussianShapeFromRealTime(1, 0.02, 2); | |
162 } | 167 } |
163 | 168 |
164 void AudioEventMatcher::stopPlaying(){ | 169 void AudioEventMatcher::stopPlaying(){ |
165 startedPlaying = false; | 170 startedPlaying = false; |
166 //temporal.printEventTimes(); | 171 //temporal.printEventTimes(); |
232 | 237 |
233 //MAIN DRAW FUNCTION FOR ALL | 238 //MAIN DRAW FUNCTION FOR ALL |
234 | 239 |
235 //draw some outlines in blue | 240 //draw some outlines in blue |
236 ofSetColor(20,200,200); | 241 ofSetColor(20,200,200); |
237 bayesPositionWindow.drawOutline(); | 242 // bayesPositionWindow.drawOutline(); |
238 bayesTempoWindow.drawOutline(); | 243 // bayesTempoWindow.drawOutline(); |
239 | 244 |
240 //draw the scrolling audio tracks | 245 //draw the scrolling audio tracks |
241 recordedTracks.drawTracks(); | 246 recordedTracks.drawTracks(); |
242 | 247 |
243 ofSetColor(255); | 248 ofSetColor(255); |
393 tmpStr += " Nearest "+ofToString(pitchOfNearestMatch,2); | 398 tmpStr += " Nearest "+ofToString(pitchOfNearestMatch,2); |
394 tmpStr += " dist "+ofToString(distanceOfNearestMatch, 2); | 399 tmpStr += " dist "+ofToString(distanceOfNearestMatch, 2); |
395 tmpStr += ", Time "+ofToString(recentPitchEventTime, 0); | 400 tmpStr += ", Time "+ofToString(recentPitchEventTime, 0); |
396 ofDrawBitmapString(tmpStr, 20, 20); | 401 ofDrawBitmapString(tmpStr, 20, 20); |
397 | 402 |
398 string alignString = " align "+ofToString(currentAlignmentPosition, 2); | 403 string alignString = "align "+ofToString(currentAlignmentPosition, 2);//same as synchroniser-recordedposition |
399 alignString += " playing "+ofToString(synchroniser.playingPositionRatio, 5); | 404 alignString += " playing "+ofToString(synchroniser.playingPositionRatio, 5); |
400 alignString += " pos "+ofToString(synchroniser.playingPositionMillis,0)+" ms"; | 405 alignString += " pos "+ofToString(synchroniser.playingPositionMillis,0)+" ms";//playing position in file - causal correction |
401 alignString += " rec pos "+ofToString(synchroniser.recordedPositionMillis,0)+" ms"; | 406 alignString += " rec pos "+ofToString(synchroniser.recordedPositionMillis,0)+" ms";//currentAlignmentPosition in rehearsal |
407 alignString += "playing time "+ofToString(synchroniser.recordedPositionTimeSent, 0)+" ms";//playing time since begining of live take | |
402 ofDrawBitmapString(alignString, 20, 50); | 408 ofDrawBitmapString(alignString, 20, 50); |
403 | |
404 ofDrawBitmapString("pos "+ofToString(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.playPosition), 200,600); | 409 ofDrawBitmapString("pos "+ofToString(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.playPosition), 200,600); |
405 | 410 |
406 } | 411 } |
407 | 412 |
408 void AudioEventMatcher::newPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){ | 413 void AudioEventMatcher::newPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){ |