andrew@0
|
1 /*
|
andrew@0
|
2 * AudioEventMatcher.cpp
|
andrew@0
|
3 * MultipleAudioMathcher
|
andrew@0
|
4 *
|
andrew@0
|
5 * Created by Andrew on 31/01/2012.
|
andrew@0
|
6 * Copyright 2012 QMUL. All rights reserved.
|
andrew@0
|
7 *
|
andrew@0
|
8 */
|
andrew@0
|
9
|
andrew@0
|
10 #include "AudioEventMatcher.h"
|
andrew@0
|
11
|
andrew@0
|
12
|
andrew@2
|
13 const int matchWindowWidth = 6000;
|
andrew@0
|
14
|
andrew@0
|
15 AudioEventMatcher::AudioEventMatcher(){
|
andrew@7
|
16
|
andrew@15
|
17
|
andrew@15
|
18 pitchLikelihoodToNoise = 0.4;//more noise
|
andrew@15
|
19 onsetLikelihoodToNoise = 0.3;
|
andrew@15
|
20 onsetLikelihoodWidth = 20;//in ms
|
andrew@15
|
21
|
andrew@0
|
22 setArraySizes();
|
andrew@3
|
23
|
andrew@3
|
24 usingRealTime = false;
|
andrew@3
|
25 bayesianStruct.realTimeMode = &usingRealTime;
|
andrew@7
|
26 recentPitch = 0;
|
andrew@8
|
27 currentAlignmentPosition = 0;
|
andrew@14
|
28
|
andrew@15
|
29
|
andrew@9
|
30
|
andrew@9
|
31 followingLiveInput = true;
|
andrew@15
|
32 startedPlaying = false;
|
andrew@0
|
33 }
|
andrew@0
|
34
|
andrew@14
|
35
|
andrew@7
|
36 void AudioEventMatcher::setWindowDimensions(){
|
andrew@7
|
37 double startHeight = recordedTracks.numberOfAudioTracks * recordedTracks.trackScreenHeight;
|
andrew@7
|
38 double heightAvailable = 1 - startHeight;
|
andrew@7
|
39 heightAvailable /= 3.0;
|
andrew@7
|
40
|
andrew@7
|
41 bayesPositionWindow.setToRelativeSize(0, startHeight, 1, heightAvailable);
|
andrew@7
|
42 bayesLikelihoodWindow.setToRelativeSize(0, startHeight + 1*heightAvailable, 1, heightAvailable);
|
andrew@7
|
43 bayesTempoWindow.setToRelativeSize(0, startHeight + 2*heightAvailable, 1, heightAvailable);
|
andrew@7
|
44
|
andrew@7
|
45
|
andrew@7
|
46 }
|
andrew@0
|
47
|
andrew@0
|
48 void AudioEventMatcher::setArraySizes(){
|
andrew@0
|
49 bayesianStruct.resetSpeedSize(200);
|
andrew@0
|
50 bayesianStruct.setRelativeSpeedScalar(0.01);
|
andrew@0
|
51 bayesianStruct.setSpeedPrior(1.0);
|
andrew@0
|
52 bayesianStruct.relativeSpeedPrior.getMaximum();
|
andrew@0
|
53
|
andrew@0
|
54 bayesianStruct.resetSize(matchWindowWidth);
|
andrew@0
|
55 bayesianStruct.setPositionDistributionScalar(1);
|
andrew@0
|
56
|
andrew@0
|
57 }
|
andrew@0
|
58
|
andrew@9
|
59 void AudioEventMatcher::startPlaying(){
|
andrew@3
|
60 bayesianStruct.setStartPlaying();
|
andrew@8
|
61 currentAlignmentPosition = 0;
|
andrew@8
|
62 startTime = ofGetElapsedTimeMillis();
|
andrew@11
|
63
|
andrew@11
|
64 projectedPrior = bayesianStruct.prior;
|
andrew@15
|
65 startedPlaying = true;
|
andrew@3
|
66 //bayesianStruct.posterior.printArray();
|
andrew@3
|
67 }
|
andrew@3
|
68
|
andrew@9
|
69
|
andrew@15
|
70 void AudioEventMatcher::stopPlaying(){
|
andrew@15
|
71 startedPlaying = false;
|
andrew@15
|
72 }
|
andrew@15
|
73
|
andrew@9
|
74 void AudioEventMatcher::updatePosition(){
|
andrew@9
|
75 if (!followingLiveInput)
|
andrew@9
|
76 recordedTracks.updatePosition();
|
andrew@15
|
77 else if (startedPlaying)
|
andrew@9
|
78 recordedTracks.updatePositionToMillis(currentAlignmentPosition);
|
andrew@9
|
79
|
andrew@9
|
80 updateBestAlignmentPosition();
|
andrew@9
|
81 }
|
andrew@9
|
82
|
andrew@8
|
83 void AudioEventMatcher::updateBestAlignmentPosition(){
|
andrew@10
|
84 //THIS DEALS WITH WHERE WE ARE NOW! ON THE SCREEN
|
andrew@10
|
85 //DIFFERENT TO WHEN EVENTS COME IN AS THEY ARE TIMESTAMPED - SO EG A PITCH EVENT MAY ARRIVE 16 CHROMA FRAMES LATER - BIG DIFFERENCE
|
andrew@10
|
86
|
andrew@10
|
87 int newTime = ofGetElapsedTimeMillis() - startTime;
|
andrew@10
|
88 // double tmp = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);;
|
andrew@10
|
89 // double timetmp = (newTime - lastAlignmentTime);
|
andrew@10
|
90 // double speedtmp = bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate);
|
andrew@11
|
91 // currentAlignmentTime = newTime;
|
andrew@9
|
92 currentAlignmentPosition = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);
|
andrew@10
|
93 currentAlignmentPosition += (newTime - lastAlignmentTime) * bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate);
|
andrew@10
|
94
|
andrew@11
|
95 bayesianStruct.projectDistribution(newTime, currentAlignmentPosition, projectedPrior);//prior gets updated to where we are now
|
andrew@11
|
96
|
andrew@10
|
97 // printf("ALIGN pos %f time diff %f (now %f , last %f)speed %f :: ALIGN BEST %f\n", tmp, timetmp, (double)ofGetElapsedTimeMillis(), lastAlignmentTime, speedtmp, currentAlignmentPosition);
|
andrew@8
|
98 }
|
andrew@8
|
99
|
andrew@0
|
100 void AudioEventMatcher::draw(){
|
andrew@6
|
101 //draw some outlines in blue
|
andrew@3
|
102 ofSetColor(20,200,200);
|
andrew@3
|
103 bayesPositionWindow.drawOutline();
|
andrew@3
|
104 bayesTempoWindow.drawOutline();
|
andrew@0
|
105
|
andrew@6
|
106 //draw the scrolling audio tracks
|
andrew@1
|
107 recordedTracks.drawTracks();
|
andrew@7
|
108
|
andrew@2
|
109 ofSetColor(255);
|
andrew@2
|
110 // bayesianStruct.relativeSpeedPrior.drawVector(0, 200, bayesTempoWindow);
|
andrew@9
|
111
|
andrew@9
|
112 setScreenDisplayTimes();
|
andrew@6
|
113 drawBayesianDistributions();
|
andrew@8
|
114
|
andrew@11
|
115 //bayesianStruct.posterior.drawVector(0, bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow);
|
andrew@6
|
116 //bayesianStruct.posterior.drawVector(bayesianStruct.posterior.getRealTermsAsIndex(0), bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow);
|
andrew@11
|
117 //bayesianStruct.relativeSpeedPosterior.drawVector(0, bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(2), bayesTempoWindow);
|
andrew@6
|
118
|
andrew@7
|
119 ofDrawBitmapString("pitch "+ofToString(recentPitch, 2)+", Time "+ofToString(recentTime, 0), 20, 20);
|
andrew@9
|
120
|
andrew@9
|
121 ofDrawBitmapString("pos "+ofToString(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.playPosition), 200,600);
|
andrew@6
|
122 }
|
andrew@6
|
123
|
andrew@9
|
124 void AudioEventMatcher::setScreenDisplayTimes(){
|
andrew@9
|
125 screenWidthMillis = recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.framesToMillis(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.amplitudeNumber);
|
andrew@9
|
126 // if (!followingLiveInput){
|
andrew@9
|
127
|
andrew@9
|
128 screenStartTimeMillis = recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.framesToMillis(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.drawParams.windowStartFrame);
|
andrew@9
|
129 screenEndTimeMillis = screenStartTimeMillis + screenWidthMillis;
|
andrew@9
|
130
|
andrew@9
|
131 //need PRECISION in this alignment
|
andrew@9
|
132
|
andrew@9
|
133
|
andrew@9
|
134 /*}else{
|
andrew@9
|
135
|
andrew@9
|
136 screenStartTimeMillis = (int)(currentAlignmentPosition/screenWidthMillis) * screenWidthMillis;
|
andrew@9
|
137 screenEndTimeMillis = screenStartTimeMillis + screenWidthMillis;
|
andrew@9
|
138 }*/
|
andrew@9
|
139 }
|
andrew@9
|
140
|
andrew@6
|
141 void AudioEventMatcher::drawBayesianDistributions(){
|
andrew@6
|
142
|
andrew@6
|
143
|
andrew@6
|
144 int startIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenStartTimeMillis);
|
andrew@6
|
145 int endIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenEndTimeMillis);
|
andrew@4
|
146
|
andrew@6
|
147 bayesianStruct.posterior.drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), bayesPositionWindow);
|
andrew@6
|
148
|
andrew@6
|
149 string tmpString = "start "+ofToString(screenStartTimeMillis)+" (index "+ofToString(startIndex)+"), end "+ofToString(screenEndTimeMillis);
|
andrew@6
|
150 ofDrawBitmapString(tmpString, bayesPositionWindow.x+20, bayesPositionWindow.y+20);
|
andrew@4
|
151
|
andrew@8
|
152 // bayesianStruct.likelihood.drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), bayesLikelihoodWindow);
|
andrew@2
|
153
|
andrew@6
|
154 bayesianStruct.relativeSpeedPosterior.drawConstrainedVector(0, bayesianStruct.relativeSpeedPosterior.arraySize, 0, ofGetWidth(), bayesTempoWindow);
|
andrew@6
|
155
|
andrew@3
|
156 string tmpStr = "zero is "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(0));
|
andrew@3
|
157 tmpStr += " offsetis "+ofToString(bayesianStruct.posterior.offset);
|
andrew@3
|
158 tmpStr += " screenWidth = "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis));
|
andrew@3
|
159 ofDrawBitmapString(tmpStr, 20,140);
|
andrew@3
|
160 tmpStr = "best est "+ofToString(bayesianStruct.bestEstimate);
|
andrew@3
|
161 ofDrawBitmapString(tmpStr, 20, 180);
|
andrew@3
|
162
|
andrew@8
|
163 ofDrawBitmapString("screenwidth "+ofToString(screenWidthMillis), 20, 800);
|
andrew@3
|
164
|
andrew@9
|
165 //green line at current best estimate
|
andrew@13
|
166 ofSetColor(0,255,0);//green scrolling line best position
|
andrew@8
|
167 double currentEstimateIndex = (currentAlignmentPosition - screenStartTimeMillis)*ofGetWidth()/screenWidthMillis;
|
andrew@8
|
168 ofLine(currentEstimateIndex, bayesPositionWindow.y, currentEstimateIndex, bayesPositionWindow.y + bayesPositionWindow.height);
|
andrew@7
|
169
|
andrew@7
|
170 //draw track by track likelihoods
|
andrew@7
|
171 for (int i = 0; i <recordedTracks.numberOfAudioTracks;i++){
|
andrew@13
|
172 ofSetColor(200,255,50);//channel likelihoods in yellow
|
andrew@8
|
173 likelihoodVisualisation[i].drawConstrainedVector(likelihoodVisualisation[i].getRealTermsAsIndex(screenStartTimeMillis), likelihoodVisualisation[i].getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window);
|
andrew@11
|
174
|
andrew@13
|
175 ofSetColor(0,255,150);//channel priors
|
andrew@11
|
176 recentPriors[i].drawConstrainedVector(recentPriors[i].getRealTermsAsIndex(screenStartTimeMillis), recentPriors[i].getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window);
|
andrew@11
|
177
|
andrew@11
|
178
|
andrew@8
|
179 ofSetColor(255);
|
andrew@8
|
180 ofDrawBitmapString("recent event "+ofToString(recentEventTime[i]), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.x + 20, recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.y + recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.height - 10);
|
andrew@7
|
181 }
|
andrew@8
|
182
|
andrew@13
|
183 int priorStartIndex = bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis);
|
andrew@13
|
184 int priorEndIndex = bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis);
|
andrew@13
|
185 // ofSetColor(0,200,200);//recent prior
|
andrew@13
|
186 // recentPrior.drawConstrainedVector(priorStartIndex, priorEndIndex, 0, ofGetWidth(), bayesPositionWindow);
|
andrew@8
|
187
|
andrew@10
|
188 ofSetColor(255,0,100);//purple prior
|
andrew@11
|
189 bayesianStruct.prior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
|
andrew@11
|
190
|
andrew@11
|
191 ofSetColor(255,0,0);
|
andrew@13
|
192 projectedPrior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
|
andrew@7
|
193
|
andrew@1
|
194 }
|
andrew@1
|
195
|
andrew@6
|
196 void AudioEventMatcher::newPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){
|
andrew@7
|
197 if (pitchIn > 0){
|
andrew@1
|
198 liveInput.addPitchEvent(pitchIn, timeIn);
|
andrew@4
|
199
|
andrew@10
|
200 //printPosteriorMAPinfo();
|
andrew@11
|
201
|
andrew@7
|
202 matchNewPitchEvent(channel, pitchIn, timeIn);//main pitch matching fn
|
andrew@7
|
203
|
andrew@7
|
204 likelihoodVisualisation[1] = bayesianStruct.likelihood;
|
andrew@7
|
205
|
andrew@7
|
206 recentPitch = pitchIn;//for drawing
|
andrew@7
|
207 recentTime = timeIn;
|
andrew@7
|
208 }
|
andrew@8
|
209
|
andrew@2
|
210 }
|
andrew@2
|
211
|
andrew@6
|
212 void AudioEventMatcher::newKickEvent(const double& timeIn){
|
andrew@6
|
213 // liveInput.addKickEvent(timeIn);
|
andrew@2
|
214 matchNewOnsetEvent(0, timeIn);
|
andrew@7
|
215 likelihoodVisualisation[0] = bayesianStruct.likelihood;
|
andrew@2
|
216 }
|
andrew@2
|
217
|
andrew@6
|
218 void AudioEventMatcher::newKickEvent(const int& channel, const double& timeIn){
|
andrew@6
|
219 // liveInput.addKickEvent(timeIn);
|
andrew@6
|
220 matchNewOnsetEvent(channel, timeIn);
|
andrew@7
|
221 likelihoodVisualisation[0] = bayesianStruct.likelihood;
|
andrew@6
|
222 }
|
andrew@6
|
223
|
andrew@2
|
224
|
andrew@2
|
225 void AudioEventMatcher::newSnareEvent(const double& timeIn){
|
andrew@6
|
226 matchNewOnsetEvent(2, timeIn);
|
andrew@7
|
227 likelihoodVisualisation[2] = bayesianStruct.likelihood;
|
andrew@7
|
228 }
|
andrew@7
|
229
|
andrew@7
|
230
|
andrew@7
|
231 void AudioEventMatcher::newSnareEvent(const int& channel, const double& timeIn){
|
andrew@7
|
232 matchNewOnsetEvent(channel, timeIn);
|
andrew@7
|
233 likelihoodVisualisation[2] = bayesianStruct.likelihood;
|
andrew@2
|
234 }
|
andrew@2
|
235
|
andrew@2
|
236 //Needs just to set bounds for the matching process, not have TimeIn
|
andrew@2
|
237 void AudioEventMatcher::matchNewOnsetEvent(const int& channel, const double& timeIn){
|
andrew@3
|
238
|
andrew@6
|
239 bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets
|
andrew@10
|
240
|
andrew@2
|
241 //start at beginning but OPTIMISE later
|
andrew@15
|
242
|
andrew@2
|
243
|
andrew@2
|
244 bayesianStruct.likelihood.offset = bayesianStruct.prior.offset;
|
andrew@2
|
245 bayesianStruct.likelihood.zero();//set to zero
|
andrew@2
|
246
|
andrew@2
|
247 double quantity = 1;//likelihoodToNoiseRatio / numberOfMatches;
|
andrew@2
|
248 int numberOfMatchesFound = 0;
|
andrew@2
|
249
|
andrew@2
|
250
|
andrew@10
|
251 double startMatchingTime = bayesianStruct.likelihood.offset;
|
andrew@10
|
252 double endMatchingTime = bayesianStruct.likelihood.offset + matchWindowWidth;
|
andrew@2
|
253
|
andrew@2
|
254 if (channel <= recordedTracks.numberOfAudioTracks){
|
andrew@2
|
255 for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){
|
andrew@2
|
256 double millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
|
andrew@10
|
257 if (millisTime >= startMatchingTime && millisTime <= endMatchingTime){
|
andrew@14
|
258 bayesianStruct.likelihood.addGaussianShapeFromRealTime(millisTime, onsetLikelihoodWidth, quantity);
|
andrew@2
|
259 numberOfMatchesFound++;
|
andrew@6
|
260 // printf("Adding Gaussian for onset at time %f offset %f\n", millisTime, bayesianStruct.likelihood.offset);
|
andrew@2
|
261
|
andrew@2
|
262 }
|
andrew@2
|
263 }
|
andrew@2
|
264 }
|
andrew@2
|
265
|
andrew@11
|
266 if (numberOfMatchesFound > 0){
|
andrew@3
|
267 // bayesianStruct.likelihood.addConstant((1-likelihoodToNoiseRatio)/bayesianStruct.likelihood.length);
|
andrew@3
|
268 bayesianStruct.likelihood.addConstant(numberOfMatchesFound*(1-onsetLikelihoodToNoise)/(onsetLikelihoodToNoise*bayesianStruct.likelihood.length));
|
andrew@2
|
269 bayesianStruct.likelihood.renormalise();
|
andrew@2
|
270
|
andrew@8
|
271 bayesianStruct.calculatePosterior();
|
andrew@10
|
272 lastAlignmentTime = timeIn;//use TIMESTAMP
|
andrew@10
|
273 recentEventTime[channel] = timeIn;//ofGetElapsedTimeMillis() - startTime;
|
andrew@11
|
274
|
andrew@11
|
275 recentPriors[channel] = bayesianStruct.prior;
|
andrew@13
|
276 projectedPrior = bayesianStruct.prior;
|
andrew@11
|
277 }
|
andrew@11
|
278
|
andrew@11
|
279
|
andrew@6
|
280
|
andrew@3
|
281 }
|
andrew@3
|
282
|
andrew@3
|
283
|
andrew@3
|
284
|
andrew@3
|
285 void AudioEventMatcher::matchNewPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){
|
andrew@3
|
286 //start at beginning but OPTIMISE later
|
andrew@10
|
287 /*printf("TIME %i\n", ofGetElapsedTimeMillis());
|
andrew@10
|
288 //tmp debug
|
andrew@10
|
289 updateBestAlignmentPosition();
|
andrew@10
|
290 printf("current alignment best estimate %f\n", currentAlignmentPosition);
|
andrew@10
|
291 */
|
andrew@6
|
292 bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets
|
andrew@8
|
293
|
andrew@7
|
294 //set the lielihoods by matching the pitched note
|
andrew@7
|
295
|
andrew@15
|
296
|
andrew@3
|
297 int numberOfMatches = 0;
|
andrew@3
|
298 bayesianStruct.likelihood.zero();//set to zero
|
andrew@3
|
299
|
andrew@3
|
300 double quantity = 0;
|
andrew@3
|
301 if (channel <= recordedTracks.numberOfAudioTracks){
|
andrew@3
|
302 for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){
|
andrew@3
|
303
|
andrew@3
|
304 if (checkMatch(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn)) {
|
andrew@7
|
305 quantity = getPitchDistance(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn, 10);
|
andrew@3
|
306 bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, 30, quantity);
|
andrew@3
|
307 recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = true;
|
andrew@3
|
308 numberOfMatches++;
|
andrew@3
|
309 }
|
andrew@3
|
310 else{
|
andrew@3
|
311 recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = false;
|
andrew@3
|
312 }
|
andrew@3
|
313
|
andrew@3
|
314 }
|
andrew@3
|
315 }
|
andrew@6
|
316
|
andrew@8
|
317
|
andrew@8
|
318
|
andrew@6
|
319 if (numberOfMatches > 0){//no point updating unless there is a match
|
andrew@7
|
320
|
andrew@6
|
321 bayesianStruct.likelihood.addConstant(numberOfMatches*(1-pitchLikelihoodToNoise)/(pitchLikelihoodToNoise*bayesianStruct.likelihood.length));
|
andrew@4
|
322
|
andrew@4
|
323 //tmp set likelihood constant and calculate using that
|
andrew@6
|
324 //bayesianStruct.likelihood.zero();
|
andrew@6
|
325 //bayesianStruct.likelihood.addConstant(1);
|
andrew@7
|
326
|
andrew@6
|
327 bayesianStruct.calculatePosterior();
|
andrew@11
|
328 lastAlignmentTime = timeIn;//has to use the STAMPED time
|
andrew@11
|
329 recentEventTime[channel] = timeIn;
|
andrew@11
|
330
|
andrew@11
|
331 recentPriors[channel] = bayesianStruct.prior;
|
andrew@13
|
332 projectedPrior = bayesianStruct.prior;
|
andrew@6
|
333 }
|
andrew@4
|
334
|
andrew@11
|
335
|
andrew@1
|
336 }
|
andrew@1
|
337
|
andrew@3
|
338 double AudioEventMatcher::getPitchDistance(const double& pitchOne, const double& pitchTwo, const double& scale){
|
andrew@3
|
339
|
andrew@3
|
340 double distance = abs(pitchOne - pitchTwo);
|
andrew@3
|
341 if (distance < scale)
|
andrew@3
|
342 distance = 1 - (distance/scale);
|
andrew@3
|
343 else
|
andrew@3
|
344 distance = 0;
|
andrew@3
|
345
|
andrew@3
|
346 // printf("[pitch distance %f vs %f = %f\n", pitchOne, pitchTwo, distance);
|
andrew@3
|
347 return distance;
|
andrew@3
|
348
|
andrew@3
|
349 }
|
andrew@3
|
350
|
andrew@3
|
351
|
andrew@3
|
352 bool AudioEventMatcher::checkMatch(const double& recordedPitch, const double& livePitch){
|
andrew@3
|
353 if (abs(recordedPitch - livePitch) < 40)
|
andrew@3
|
354 return true;
|
andrew@3
|
355 else
|
andrew@3
|
356 return false;
|
andrew@3
|
357 }
|
andrew@3
|
358
|
andrew@3
|
359
|
andrew@1
|
360
|
andrew@1
|
361 void AudioEventMatcher::windowResized(const int& w, const int& h){
|
andrew@1
|
362 recordedTracks.windowResized(w,h);
|
andrew@3
|
363 bayesTempoWindow.resized(w,h);
|
andrew@3
|
364 bayesPositionWindow.resized(w,h);
|
andrew@3
|
365 }
|
andrew@3
|
366
|
andrew@10
|
367 /*
|
andrew@10
|
368
|
andrew@10
|
369 void printPosteriorMAPinfo(){ //tmp print stuff
|
andrew@10
|
370 printf("New pitch MAP post estimate now %i, ", bayesianStruct.posterior.MAPestimate);
|
andrew@10
|
371 double tmp = bayesianStruct.posterior.getMAPestimate();
|
andrew@10
|
372 printf(" getting it %f and offset %f == %f ms\n", tmp, bayesianStruct.posterior.offset, bayesianStruct.posterior.getIndexInRealTerms(tmp));
|
andrew@10
|
373
|
andrew@10
|
374 }
|
andrew@10
|
375 */
|
andrew@3
|
376
|