andrew@0
|
1 /*
|
andrew@0
|
2 * AudioEventMatcher.cpp
|
andrew@0
|
3 * MultipleAudioMathcher
|
andrew@0
|
4 *
|
andrew@0
|
5 * Created by Andrew on 31/01/2012.
|
andrew@0
|
6 * Copyright 2012 QMUL. All rights reserved.
|
andrew@0
|
7 *
|
andrew@0
|
8 */
|
andrew@0
|
9
|
andrew@0
|
10 #include "AudioEventMatcher.h"
|
andrew@0
|
11
|
andrew@0
|
12
|
andrew@2
|
13 const int matchWindowWidth = 6000;
|
andrew@0
|
14
|
andrew@0
|
15 AudioEventMatcher::AudioEventMatcher(){
|
andrew@7
|
16
|
andrew@15
|
17
|
andrew@17
|
18 pitchLikelihoodToNoise = 0.7;//more noise
|
andrew@16
|
19
|
andrew@17
|
20 onsetLikelihoodToNoise = 0.5;
|
andrew@17
|
21 onsetLikelihoodWidth = 10;//in ms
|
andrew@15
|
22
|
andrew@0
|
23 setArraySizes();
|
andrew@3
|
24
|
andrew@3
|
25 usingRealTime = false;
|
andrew@3
|
26 bayesianStruct.realTimeMode = &usingRealTime;
|
andrew@7
|
27 recentPitch = 0;
|
andrew@8
|
28 currentAlignmentPosition = 0;
|
andrew@14
|
29
|
andrew@15
|
30
|
andrew@9
|
31
|
andrew@9
|
32 followingLiveInput = true;
|
andrew@15
|
33 startedPlaying = false;
|
andrew@19
|
34
|
andrew@19
|
35 temporal.setUpEventTimeMatrix();
|
andrew@0
|
36 }
|
andrew@0
|
37
|
andrew@14
|
38
|
andrew@19
|
39
|
andrew@19
|
40
|
andrew@7
|
41 void AudioEventMatcher::setWindowDimensions(){
|
andrew@7
|
42 double startHeight = recordedTracks.numberOfAudioTracks * recordedTracks.trackScreenHeight;
|
andrew@7
|
43 double heightAvailable = 1 - startHeight;
|
andrew@19
|
44 heightAvailable /= NUMBER_OF_CHANNELS;
|
andrew@7
|
45
|
andrew@7
|
46 bayesPositionWindow.setToRelativeSize(0, startHeight, 1, heightAvailable);
|
andrew@7
|
47 bayesLikelihoodWindow.setToRelativeSize(0, startHeight + 1*heightAvailable, 1, heightAvailable);
|
andrew@7
|
48 bayesTempoWindow.setToRelativeSize(0, startHeight + 2*heightAvailable, 1, heightAvailable);
|
andrew@7
|
49
|
andrew@7
|
50
|
andrew@7
|
51 }
|
andrew@0
|
52
|
andrew@0
|
53 void AudioEventMatcher::setArraySizes(){
|
andrew@0
|
54 bayesianStruct.resetSpeedSize(200);
|
andrew@0
|
55 bayesianStruct.setRelativeSpeedScalar(0.01);
|
andrew@0
|
56 bayesianStruct.setSpeedPrior(1.0);
|
andrew@0
|
57 bayesianStruct.relativeSpeedPrior.getMaximum();
|
andrew@0
|
58
|
andrew@0
|
59 bayesianStruct.resetSize(matchWindowWidth);
|
andrew@0
|
60 bayesianStruct.setPositionDistributionScalar(1);
|
andrew@0
|
61
|
andrew@0
|
62 }
|
andrew@0
|
63
|
andrew@16
|
64 void AudioEventMatcher::loadAudioFiles(){
|
andrew@16
|
65 recordedTracks.loadTestAudio();
|
andrew@16
|
66 synchroniser.fileLengthSamples = recordedTracks.loadedAudioFiles[0].fileLoader.totalNumberOfSamples;
|
andrew@16
|
67 printf("synchroniser has %f samples\n", synchroniser.fileLengthSamples);
|
andrew@16
|
68 }
|
andrew@16
|
69
|
andrew@9
|
70 void AudioEventMatcher::startPlaying(){
|
andrew@3
|
71 bayesianStruct.setStartPlaying();
|
andrew@8
|
72 currentAlignmentPosition = 0;
|
andrew@8
|
73 startTime = ofGetElapsedTimeMillis();
|
andrew@11
|
74
|
andrew@11
|
75 projectedPrior = bayesianStruct.prior;
|
andrew@15
|
76 startedPlaying = true;
|
andrew@17
|
77 synchroniser.reset();
|
andrew@19
|
78 temporal.reset();
|
andrew@17
|
79
|
andrew@3
|
80 //bayesianStruct.posterior.printArray();
|
andrew@3
|
81 }
|
andrew@3
|
82
|
andrew@9
|
83
|
andrew@15
|
84 void AudioEventMatcher::stopPlaying(){
|
andrew@15
|
85 startedPlaying = false;
|
andrew@19
|
86 temporal.printEventTimes();
|
andrew@15
|
87 }
|
andrew@15
|
88
|
andrew@9
|
89 void AudioEventMatcher::updatePosition(){
|
andrew@19
|
90
|
andrew@19
|
91 if (startedPlaying){
|
andrew@9
|
92 if (!followingLiveInput)
|
andrew@9
|
93 recordedTracks.updatePosition();
|
andrew@19
|
94 else
|
andrew@9
|
95 recordedTracks.updatePositionToMillis(currentAlignmentPosition);
|
andrew@9
|
96
|
andrew@9
|
97 updateBestAlignmentPosition();
|
andrew@19
|
98 }
|
andrew@19
|
99
|
andrew@19
|
100 temporal.tempoPosterior.addGaussianShape(temporal.tempoPosterior.MAPestimate, temporal.tempoArraySize / 4, 0.5 );
|
andrew@9
|
101 }
|
andrew@9
|
102
|
andrew@8
|
103 void AudioEventMatcher::updateBestAlignmentPosition(){
|
andrew@10
|
104 //THIS DEALS WITH WHERE WE ARE NOW! ON THE SCREEN
|
andrew@10
|
105 //DIFFERENT TO WHEN EVENTS COME IN AS THEY ARE TIMESTAMPED - SO EG A PITCH EVENT MAY ARRIVE 16 CHROMA FRAMES LATER - BIG DIFFERENCE
|
andrew@10
|
106
|
andrew@10
|
107 int newTime = ofGetElapsedTimeMillis() - startTime;
|
andrew@10
|
108 // double tmp = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);;
|
andrew@10
|
109 // double timetmp = (newTime - lastAlignmentTime);
|
andrew@10
|
110 // double speedtmp = bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate);
|
andrew@11
|
111 // currentAlignmentTime = newTime;
|
andrew@9
|
112 currentAlignmentPosition = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);
|
andrew@10
|
113 currentAlignmentPosition += (newTime - lastAlignmentTime) * bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate);
|
andrew@10
|
114
|
andrew@16
|
115
|
andrew@17
|
116 synchroniser.updateRecordedPosition(currentAlignmentPosition, newTime);
|
andrew@16
|
117
|
andrew@16
|
118 synchroniser.updateOutputSpeed();
|
andrew@16
|
119
|
andrew@11
|
120 bayesianStruct.projectDistribution(newTime, currentAlignmentPosition, projectedPrior);//prior gets updated to where we are now
|
andrew@11
|
121
|
andrew@10
|
122 // printf("ALIGN pos %f time diff %f (now %f , last %f)speed %f :: ALIGN BEST %f\n", tmp, timetmp, (double)ofGetElapsedTimeMillis(), lastAlignmentTime, speedtmp, currentAlignmentPosition);
|
andrew@8
|
123 }
|
andrew@8
|
124
|
andrew@0
|
125 void AudioEventMatcher::draw(){
|
andrew@6
|
126 //draw some outlines in blue
|
andrew@3
|
127 ofSetColor(20,200,200);
|
andrew@3
|
128 bayesPositionWindow.drawOutline();
|
andrew@3
|
129 bayesTempoWindow.drawOutline();
|
andrew@0
|
130
|
andrew@6
|
131 //draw the scrolling audio tracks
|
andrew@1
|
132 recordedTracks.drawTracks();
|
andrew@7
|
133
|
andrew@2
|
134 ofSetColor(255);
|
andrew@2
|
135 // bayesianStruct.relativeSpeedPrior.drawVector(0, 200, bayesTempoWindow);
|
andrew@9
|
136
|
andrew@9
|
137 setScreenDisplayTimes();
|
andrew@6
|
138 drawBayesianDistributions();
|
andrew@8
|
139
|
andrew@11
|
140 //bayesianStruct.posterior.drawVector(0, bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow);
|
andrew@6
|
141 //bayesianStruct.posterior.drawVector(bayesianStruct.posterior.getRealTermsAsIndex(0), bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow);
|
andrew@11
|
142 //bayesianStruct.relativeSpeedPosterior.drawVector(0, bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(2), bayesTempoWindow);
|
andrew@18
|
143 string tmpStr = "pitch "+ofToString(recentPitch, 2);
|
andrew@18
|
144 tmpStr += " Nearest "+ofToString(pitchOfNearestMatch,2);
|
andrew@18
|
145 tmpStr += " dist "+ofToString(distanceOfNearestMatch, 2);
|
andrew@18
|
146 tmpStr += ", Time "+ofToString(recentTime, 0);
|
andrew@18
|
147 ofDrawBitmapString(tmpStr, 20, 20);
|
andrew@18
|
148
|
andrew@18
|
149
|
andrew@9
|
150
|
andrew@16
|
151 string alignString = " align "+ofToString(currentAlignmentPosition, 2);
|
andrew@16
|
152 alignString += " playing "+ofToString(synchroniser.playingPositionRatio, 5);
|
andrew@17
|
153 alignString += " pos "+ofToString(synchroniser.playingPositionMillis,0)+" ms";
|
andrew@17
|
154 alignString += " rec pos "+ofToString(synchroniser.recordedPositionMillis,0)+" ms";
|
andrew@16
|
155 ofDrawBitmapString(alignString, 20, 50);
|
andrew@16
|
156
|
andrew@9
|
157 ofDrawBitmapString("pos "+ofToString(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.playPosition), 200,600);
|
andrew@6
|
158 }
|
andrew@6
|
159
|
andrew@9
|
160 void AudioEventMatcher::setScreenDisplayTimes(){
|
andrew@9
|
161 screenWidthMillis = recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.framesToMillis(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.amplitudeNumber);
|
andrew@9
|
162 // if (!followingLiveInput){
|
andrew@9
|
163
|
andrew@9
|
164 screenStartTimeMillis = recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.framesToMillis(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.drawParams.windowStartFrame);
|
andrew@9
|
165 screenEndTimeMillis = screenStartTimeMillis + screenWidthMillis;
|
andrew@9
|
166
|
andrew@9
|
167 //need PRECISION in this alignment
|
andrew@9
|
168
|
andrew@9
|
169
|
andrew@9
|
170 /*}else{
|
andrew@9
|
171
|
andrew@9
|
172 screenStartTimeMillis = (int)(currentAlignmentPosition/screenWidthMillis) * screenWidthMillis;
|
andrew@9
|
173 screenEndTimeMillis = screenStartTimeMillis + screenWidthMillis;
|
andrew@9
|
174 }*/
|
andrew@9
|
175 }
|
andrew@9
|
176
|
andrew@6
|
177 void AudioEventMatcher::drawBayesianDistributions(){
|
andrew@6
|
178
|
andrew@6
|
179
|
andrew@6
|
180 int startIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenStartTimeMillis);
|
andrew@6
|
181 int endIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenEndTimeMillis);
|
andrew@4
|
182
|
andrew@6
|
183 bayesianStruct.posterior.drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), bayesPositionWindow);
|
andrew@6
|
184
|
andrew@6
|
185 string tmpString = "start "+ofToString(screenStartTimeMillis)+" (index "+ofToString(startIndex)+"), end "+ofToString(screenEndTimeMillis);
|
andrew@6
|
186 ofDrawBitmapString(tmpString, bayesPositionWindow.x+20, bayesPositionWindow.y+20);
|
andrew@4
|
187
|
andrew@8
|
188 // bayesianStruct.likelihood.drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), bayesLikelihoodWindow);
|
andrew@2
|
189
|
andrew@6
|
190 bayesianStruct.relativeSpeedPosterior.drawConstrainedVector(0, bayesianStruct.relativeSpeedPosterior.arraySize, 0, ofGetWidth(), bayesTempoWindow);
|
andrew@6
|
191
|
andrew@3
|
192 string tmpStr = "zero is "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(0));
|
andrew@3
|
193 tmpStr += " offsetis "+ofToString(bayesianStruct.posterior.offset);
|
andrew@3
|
194 tmpStr += " screenWidth = "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis));
|
andrew@3
|
195 ofDrawBitmapString(tmpStr, 20,140);
|
andrew@3
|
196 tmpStr = "best est "+ofToString(bayesianStruct.bestEstimate);
|
andrew@3
|
197 ofDrawBitmapString(tmpStr, 20, 180);
|
andrew@3
|
198
|
andrew@8
|
199 ofDrawBitmapString("screenwidth "+ofToString(screenWidthMillis), 20, 800);
|
andrew@3
|
200
|
andrew@9
|
201 //green line at current best estimate
|
andrew@13
|
202 ofSetColor(0,255,0);//green scrolling line best position
|
andrew@8
|
203 double currentEstimateIndex = (currentAlignmentPosition - screenStartTimeMillis)*ofGetWidth()/screenWidthMillis;
|
andrew@8
|
204 ofLine(currentEstimateIndex, bayesPositionWindow.y, currentEstimateIndex, bayesPositionWindow.y + bayesPositionWindow.height);
|
andrew@7
|
205
|
andrew@16
|
206
|
andrew@16
|
207 ofSetColor(0,255,255);//synchroniser position
|
andrew@16
|
208 currentEstimateIndex = (synchroniser.playingPositionMillis - screenStartTimeMillis)*ofGetWidth()/screenWidthMillis;
|
andrew@16
|
209 ofLine(currentEstimateIndex, bayesLikelihoodWindow.y, currentEstimateIndex, bayesLikelihoodWindow.y + bayesPositionWindow.height);
|
andrew@16
|
210
|
andrew@16
|
211
|
andrew@16
|
212
|
andrew@7
|
213 //draw track by track likelihoods
|
andrew@7
|
214 for (int i = 0; i <recordedTracks.numberOfAudioTracks;i++){
|
andrew@13
|
215 ofSetColor(200,255,50);//channel likelihoods in yellow
|
andrew@8
|
216 likelihoodVisualisation[i].drawConstrainedVector(likelihoodVisualisation[i].getRealTermsAsIndex(screenStartTimeMillis), likelihoodVisualisation[i].getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window);
|
andrew@11
|
217
|
andrew@13
|
218 ofSetColor(0,255,150);//channel priors
|
andrew@11
|
219 recentPriors[i].drawConstrainedVector(recentPriors[i].getRealTermsAsIndex(screenStartTimeMillis), recentPriors[i].getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window);
|
andrew@11
|
220
|
andrew@11
|
221
|
andrew@8
|
222 ofSetColor(255);
|
andrew@8
|
223 ofDrawBitmapString("recent event "+ofToString(recentEventTime[i]), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.x + 20, recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.y + recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.height - 10);
|
andrew@7
|
224 }
|
andrew@8
|
225
|
andrew@13
|
226 int priorStartIndex = bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis);
|
andrew@13
|
227 int priorEndIndex = bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis);
|
andrew@13
|
228 // ofSetColor(0,200,200);//recent prior
|
andrew@13
|
229 // recentPrior.drawConstrainedVector(priorStartIndex, priorEndIndex, 0, ofGetWidth(), bayesPositionWindow);
|
andrew@8
|
230
|
andrew@10
|
231 ofSetColor(255,0,100);//purple prior
|
andrew@11
|
232 bayesianStruct.prior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
|
andrew@11
|
233
|
andrew@11
|
234 ofSetColor(255,0,0);
|
andrew@13
|
235 projectedPrior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
|
andrew@7
|
236
|
andrew@19
|
237 temporal.drawTempoArray(bayesLikelihoodWindow);
|
andrew@19
|
238
|
andrew@1
|
239 }
|
andrew@1
|
240
|
andrew@6
|
241 void AudioEventMatcher::newPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){
|
andrew@7
|
242 if (pitchIn > 0){
|
andrew@1
|
243 liveInput.addPitchEvent(pitchIn, timeIn);
|
andrew@4
|
244
|
andrew@10
|
245 //printPosteriorMAPinfo();
|
andrew@11
|
246
|
andrew@7
|
247 matchNewPitchEvent(channel, pitchIn, timeIn);//main pitch matching fn
|
andrew@7
|
248
|
andrew@7
|
249 likelihoodVisualisation[1] = bayesianStruct.likelihood;
|
andrew@7
|
250
|
andrew@7
|
251 recentPitch = pitchIn;//for drawing
|
andrew@7
|
252 recentTime = timeIn;
|
andrew@7
|
253 }
|
andrew@8
|
254
|
andrew@2
|
255 }
|
andrew@2
|
256
|
andrew@6
|
257 void AudioEventMatcher::newKickEvent(const double& timeIn){
|
andrew@6
|
258 // liveInput.addKickEvent(timeIn);
|
andrew@2
|
259 matchNewOnsetEvent(0, timeIn);
|
andrew@7
|
260 likelihoodVisualisation[0] = bayesianStruct.likelihood;
|
andrew@2
|
261 }
|
andrew@2
|
262
|
andrew@6
|
263 void AudioEventMatcher::newKickEvent(const int& channel, const double& timeIn){
|
andrew@6
|
264 // liveInput.addKickEvent(timeIn);
|
andrew@6
|
265 matchNewOnsetEvent(channel, timeIn);
|
andrew@7
|
266 likelihoodVisualisation[0] = bayesianStruct.likelihood;
|
andrew@6
|
267 }
|
andrew@6
|
268
|
andrew@2
|
269
|
andrew@2
|
270 void AudioEventMatcher::newSnareEvent(const double& timeIn){
|
andrew@6
|
271 matchNewOnsetEvent(2, timeIn);
|
andrew@7
|
272 likelihoodVisualisation[2] = bayesianStruct.likelihood;
|
andrew@7
|
273 }
|
andrew@7
|
274
|
andrew@7
|
275
|
andrew@7
|
276 void AudioEventMatcher::newSnareEvent(const int& channel, const double& timeIn){
|
andrew@7
|
277 matchNewOnsetEvent(channel, timeIn);
|
andrew@7
|
278 likelihoodVisualisation[2] = bayesianStruct.likelihood;
|
andrew@2
|
279 }
|
andrew@2
|
280
|
andrew@2
|
281 //Needs just to set bounds for the matching process, not have TimeIn
|
andrew@2
|
282 void AudioEventMatcher::matchNewOnsetEvent(const int& channel, const double& timeIn){
|
andrew@3
|
283
|
andrew@6
|
284 bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets
|
andrew@10
|
285
|
andrew@2
|
286 //start at beginning but OPTIMISE later
|
andrew@15
|
287
|
andrew@2
|
288
|
andrew@2
|
289 bayesianStruct.likelihood.offset = bayesianStruct.prior.offset;
|
andrew@2
|
290 bayesianStruct.likelihood.zero();//set to zero
|
andrew@2
|
291
|
andrew@2
|
292 double quantity = 1;//likelihoodToNoiseRatio / numberOfMatches;
|
andrew@2
|
293 int numberOfMatchesFound = 0;
|
andrew@2
|
294
|
andrew@2
|
295
|
andrew@10
|
296 double startMatchingTime = bayesianStruct.likelihood.offset;
|
andrew@10
|
297 double endMatchingTime = bayesianStruct.likelihood.offset + matchWindowWidth;
|
andrew@2
|
298
|
andrew@2
|
299 if (channel <= recordedTracks.numberOfAudioTracks){
|
andrew@2
|
300 for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){
|
andrew@2
|
301 double millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
|
andrew@10
|
302 if (millisTime >= startMatchingTime && millisTime <= endMatchingTime){
|
andrew@14
|
303 bayesianStruct.likelihood.addGaussianShapeFromRealTime(millisTime, onsetLikelihoodWidth, quantity);
|
andrew@2
|
304 numberOfMatchesFound++;
|
andrew@6
|
305 // printf("Adding Gaussian for onset at time %f offset %f\n", millisTime, bayesianStruct.likelihood.offset);
|
andrew@2
|
306
|
andrew@2
|
307 }
|
andrew@2
|
308 }
|
andrew@2
|
309 }
|
andrew@2
|
310
|
andrew@11
|
311 if (numberOfMatchesFound > 0){
|
andrew@3
|
312 // bayesianStruct.likelihood.addConstant((1-likelihoodToNoiseRatio)/bayesianStruct.likelihood.length);
|
andrew@3
|
313 bayesianStruct.likelihood.addConstant(numberOfMatchesFound*(1-onsetLikelihoodToNoise)/(onsetLikelihoodToNoise*bayesianStruct.likelihood.length));
|
andrew@2
|
314 bayesianStruct.likelihood.renormalise();
|
andrew@2
|
315
|
andrew@8
|
316 bayesianStruct.calculatePosterior();
|
andrew@10
|
317 lastAlignmentTime = timeIn;//use TIMESTAMP
|
andrew@10
|
318 recentEventTime[channel] = timeIn;//ofGetElapsedTimeMillis() - startTime;
|
andrew@11
|
319
|
andrew@11
|
320 recentPriors[channel] = bayesianStruct.prior;
|
andrew@13
|
321 projectedPrior = bayesianStruct.prior;
|
andrew@19
|
322
|
andrew@19
|
323
|
andrew@19
|
324 temporal.updateTempo(channel, timeIn);
|
andrew@11
|
325 }
|
andrew@11
|
326
|
andrew@11
|
327
|
andrew@6
|
328
|
andrew@3
|
329 }
|
andrew@3
|
330
|
andrew@3
|
331
|
andrew@3
|
332
|
andrew@3
|
333 void AudioEventMatcher::matchNewPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){
|
andrew@3
|
334 //start at beginning but OPTIMISE later
|
andrew@10
|
335 /*printf("TIME %i\n", ofGetElapsedTimeMillis());
|
andrew@10
|
336 //tmp debug
|
andrew@10
|
337 updateBestAlignmentPosition();
|
andrew@10
|
338 printf("current alignment best estimate %f\n", currentAlignmentPosition);
|
andrew@10
|
339 */
|
andrew@6
|
340 bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets
|
andrew@8
|
341
|
andrew@7
|
342 //set the lielihoods by matching the pitched note
|
andrew@7
|
343
|
andrew@15
|
344
|
andrew@3
|
345 int numberOfMatches = 0;
|
andrew@3
|
346 bayesianStruct.likelihood.zero();//set to zero
|
andrew@18
|
347 double newOnsetTime;
|
andrew@18
|
348 double closestDistance = INFINITY;
|
andrew@3
|
349
|
andrew@3
|
350 double quantity = 0;
|
andrew@3
|
351 if (channel <= recordedTracks.numberOfAudioTracks){
|
andrew@3
|
352 for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){
|
andrew@3
|
353
|
andrew@3
|
354 if (checkMatch(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn)) {
|
andrew@18
|
355 quantity = getPitchDistance(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn, 8);
|
andrew@18
|
356
|
andrew@3
|
357 bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, 30, quantity);
|
andrew@3
|
358 recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = true;
|
andrew@3
|
359 numberOfMatches++;
|
andrew@3
|
360 }
|
andrew@3
|
361 else{
|
andrew@3
|
362 recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = false;
|
andrew@3
|
363 }
|
andrew@18
|
364 //checking nearest pitch
|
andrew@18
|
365 newOnsetTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
|
andrew@18
|
366 if (abs(newOnsetTime - currentAlignmentPosition) < closestDistance){
|
andrew@18
|
367 closestDistance = abs(newOnsetTime - currentAlignmentPosition);
|
andrew@18
|
368 pitchOfNearestMatch = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch;
|
andrew@18
|
369 distanceOfNearestMatch = quantity;
|
andrew@18
|
370 }
|
andrew@3
|
371
|
andrew@3
|
372 }
|
andrew@3
|
373 }
|
andrew@6
|
374
|
andrew@8
|
375
|
andrew@8
|
376
|
andrew@6
|
377 if (numberOfMatches > 0){//no point updating unless there is a match
|
andrew@7
|
378
|
andrew@6
|
379 bayesianStruct.likelihood.addConstant(numberOfMatches*(1-pitchLikelihoodToNoise)/(pitchLikelihoodToNoise*bayesianStruct.likelihood.length));
|
andrew@4
|
380
|
andrew@4
|
381 //tmp set likelihood constant and calculate using that
|
andrew@6
|
382 //bayesianStruct.likelihood.zero();
|
andrew@6
|
383 //bayesianStruct.likelihood.addConstant(1);
|
andrew@7
|
384
|
andrew@6
|
385 bayesianStruct.calculatePosterior();
|
andrew@11
|
386 lastAlignmentTime = timeIn;//has to use the STAMPED time
|
andrew@11
|
387 recentEventTime[channel] = timeIn;
|
andrew@11
|
388
|
andrew@11
|
389 recentPriors[channel] = bayesianStruct.prior;
|
andrew@13
|
390 projectedPrior = bayesianStruct.prior;
|
andrew@19
|
391
|
andrew@19
|
392 temporal.eventTimes[channel].push_back(timeIn);
|
andrew@6
|
393 }
|
andrew@4
|
394
|
andrew@11
|
395
|
andrew@1
|
396 }
|
andrew@1
|
397
|
andrew@3
|
398 double AudioEventMatcher::getPitchDistance(const double& pitchOne, const double& pitchTwo, const double& scale){
|
andrew@3
|
399
|
andrew@18
|
400 double scaleFactor = scale * pitchOne / 110.0;
|
andrew@16
|
401
|
andrew@18
|
402 int multiplicationFactor = 1;
|
andrew@18
|
403 if (pitchTwo > 0){
|
andrew@18
|
404 int multiplicationFactor = round(pitchOne/pitchTwo);
|
andrew@18
|
405 }
|
andrew@16
|
406
|
andrew@18
|
407 double distance = abs(pitchOne - pitchTwo*multiplicationFactor);
|
andrew@16
|
408 if (distance < scaleFactor)
|
andrew@16
|
409 distance = 1 - (distance/scaleFactor);
|
andrew@3
|
410 else
|
andrew@3
|
411 distance = 0;
|
andrew@3
|
412
|
andrew@3
|
413 // printf("[pitch distance %f vs %f = %f\n", pitchOne, pitchTwo, distance);
|
andrew@3
|
414 return distance;
|
andrew@3
|
415
|
andrew@3
|
416 }
|
andrew@3
|
417
|
andrew@3
|
418
|
andrew@3
|
419 bool AudioEventMatcher::checkMatch(const double& recordedPitch, const double& livePitch){
|
andrew@18
|
420
|
andrew@18
|
421 if (livePitch > 0){
|
andrew@18
|
422 int multiplicationFactor = (int)(round(recordedPitch/livePitch));
|
andrew@18
|
423
|
andrew@18
|
424 if (abs(recordedPitch - livePitch * multiplicationFactor) < 16)
|
andrew@3
|
425 return true;
|
andrew@3
|
426 else
|
andrew@3
|
427 return false;
|
andrew@18
|
428 }else {
|
andrew@18
|
429 return false;
|
andrew@18
|
430 }
|
andrew@18
|
431
|
andrew@3
|
432 }
|
andrew@3
|
433
|
andrew@3
|
434
|
andrew@1
|
435
|
andrew@1
|
436 void AudioEventMatcher::windowResized(const int& w, const int& h){
|
andrew@1
|
437 recordedTracks.windowResized(w,h);
|
andrew@3
|
438 bayesTempoWindow.resized(w,h);
|
andrew@3
|
439 bayesPositionWindow.resized(w,h);
|
andrew@3
|
440 }
|
andrew@3
|
441
|
andrew@10
|
442 /*
|
andrew@10
|
443
|
andrew@10
|
444 void printPosteriorMAPinfo(){ //tmp print stuff
|
andrew@10
|
445 printf("New pitch MAP post estimate now %i, ", bayesianStruct.posterior.MAPestimate);
|
andrew@10
|
446 double tmp = bayesianStruct.posterior.getMAPestimate();
|
andrew@10
|
447 printf(" getting it %f and offset %f == %f ms\n", tmp, bayesianStruct.posterior.offset, bayesianStruct.posterior.getIndexInRealTerms(tmp));
|
andrew@10
|
448
|
andrew@10
|
449 }
|
andrew@10
|
450 */
|
andrew@3
|
451
|