Mercurial > hg > multitrack-audio-matcher
diff src/AudioEventMatcher.cpp @ 7:33dedfe32893
kick, snare and bass windowed. Likelihoods in dedicated screen regions
author | Andrew N Robertson <andrew.robertson@eecs.qmul.ac.uk> |
---|---|
date | Thu, 02 Feb 2012 21:55:51 +0000 |
parents | 746a5af43c02 |
children | 572564b7cb85 |
line wrap: on
line diff
--- a/src/AudioEventMatcher.cpp Thu Feb 02 17:52:08 2012 +0000 +++ b/src/AudioEventMatcher.cpp Thu Feb 02 21:55:51 2012 +0000 @@ -13,16 +13,25 @@ const int matchWindowWidth = 6000; AudioEventMatcher::AudioEventMatcher(){ - bayesPositionWindow.setToRelativeSize(0, 0.4, 1, 0.2); - bayesTempoWindow.setToRelativeSize(0, 0.8, 1, 0.2); - bayesLikelihoodWindow.setToRelativeSize(0, 0.6, 1, 0.2); - + setArraySizes(); usingRealTime = false; bayesianStruct.realTimeMode = &usingRealTime; + recentPitch = 0; } +void AudioEventMatcher::setWindowDimensions(){ + double startHeight = recordedTracks.numberOfAudioTracks * recordedTracks.trackScreenHeight; + double heightAvailable = 1 - startHeight; + heightAvailable /= 3.0; + + bayesPositionWindow.setToRelativeSize(0, startHeight, 1, heightAvailable); + bayesLikelihoodWindow.setToRelativeSize(0, startHeight + 1*heightAvailable, 1, heightAvailable); + bayesTempoWindow.setToRelativeSize(0, startHeight + 2*heightAvailable, 1, heightAvailable); + + +} void AudioEventMatcher::setArraySizes(){ bayesianStruct.resetSpeedSize(200); @@ -48,6 +57,8 @@ //draw the scrolling audio tracks recordedTracks.drawTracks(); + + ofSetColor(255); // bayesianStruct.relativeSpeedPrior.drawVector(0, 200, bayesTempoWindow); @@ -59,6 +70,7 @@ // bayesianStruct.relativeSpeedPosterior.drawVector(0, bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(2), bayesTempoWindow); + ofDrawBitmapString("pitch "+ofToString(recentPitch, 2)+", Time "+ofToString(recentTime, 0), 20, 20); } void AudioEventMatcher::drawBayesianDistributions(){ @@ -96,9 +108,17 @@ ofDrawBitmapString("screenamp "+ofToString(screenWidthMillis), 20, 100); + + //draw track by track likelihoods + for (int i = 0; i <recordedTracks.numberOfAudioTracks;i++){ + ofSetColor(200,255,50); + likelihoodVisualisation[i].drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window); + } + } void AudioEventMatcher::newPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){ + if (pitchIn > 0){ liveInput.addPitchEvent(pitchIn, timeIn); //tmp print stuff @@ -106,22 +126,37 @@ double tmp = bayesianStruct.posterior.getMAPestimate(); printf(" getting it %f and offset %f == %f ms\n", tmp, bayesianStruct.posterior.offset, bayesianStruct.posterior.getIndexInRealTerms(tmp)); - matchNewPitchEvent(channel, pitchIn, timeIn); + matchNewPitchEvent(channel, pitchIn, timeIn);//main pitch matching fn + + likelihoodVisualisation[1] = bayesianStruct.likelihood; + + recentPitch = pitchIn;//for drawing + recentTime = timeIn; + } } void AudioEventMatcher::newKickEvent(const double& timeIn){ // liveInput.addKickEvent(timeIn); matchNewOnsetEvent(0, timeIn); + likelihoodVisualisation[0] = bayesianStruct.likelihood; } void AudioEventMatcher::newKickEvent(const int& channel, const double& timeIn){ // liveInput.addKickEvent(timeIn); matchNewOnsetEvent(channel, timeIn); + likelihoodVisualisation[0] = bayesianStruct.likelihood; } void AudioEventMatcher::newSnareEvent(const double& timeIn){ matchNewOnsetEvent(2, timeIn); + likelihoodVisualisation[2] = bayesianStruct.likelihood; +} + + +void AudioEventMatcher::newSnareEvent(const int& channel, const double& timeIn){ + matchNewOnsetEvent(channel, timeIn); + likelihoodVisualisation[2] = bayesianStruct.likelihood; } //Needs just to set bounds for the matching process, not have TimeIn @@ -174,9 +209,9 @@ bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets - ///set offsets -// bayesianStruct.likelihood.offset = bayesianStruct.prior.offset; - double pitchLikelihoodToNoise = 0.2;//more noise + //set the lielihoods by matching the pitched note + + double pitchLikelihoodToNoise = 0.6;//more noise int numberOfMatches = 0; bayesianStruct.likelihood.zero();//set to zero @@ -185,7 +220,7 @@ for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){ if (checkMatch(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn)) { - quantity = getPitchDistance(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn, 20); + quantity = getPitchDistance(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn, 10); bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, 30, quantity); recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = true; numberOfMatches++; @@ -198,12 +233,13 @@ } if (numberOfMatches > 0){//no point updating unless there is a match + bayesianStruct.likelihood.addConstant(numberOfMatches*(1-pitchLikelihoodToNoise)/(pitchLikelihoodToNoise*bayesianStruct.likelihood.length)); //tmp set likelihood constant and calculate using that //bayesianStruct.likelihood.zero(); //bayesianStruct.likelihood.addConstant(1); - + bayesianStruct.calculatePosterior(); }