changeset 56:4394c9490716 tip

minor changes
author Andrew N Robertson <andrew.robertson@eecs.qmul.ac.uk>
date Mon, 24 Dec 2012 18:58:39 +0000
parents 2eca10a31ae2
children
files src/AudioEventMatcher.cpp src/AudioEventMatcher.h src/RecordedMultitrackAudio.cpp src/testApp.h
diffstat 4 files changed, 41 insertions(+), 19 deletions(-) [+]
line wrap: on
line diff
--- a/src/AudioEventMatcher.cpp	Mon Dec 10 17:07:21 2012 +0000
+++ b/src/AudioEventMatcher.cpp	Mon Dec 24 18:58:39 2012 +0000
@@ -15,6 +15,7 @@
 const int matchWindowWidth = 8000;//ms in which to match
 
 const float pitchCutOff = 16;//within which pitches are even considered
+const double pitchWidth = 12;
 
 bool printInfo = false;
 
@@ -22,14 +23,16 @@
 
 	ofBackground(0);
 	useChromaDotProduct = false;//false for most tests
-	
 	printingData = false;
+	updateTempoMethodOn = false;
 	
 	pitchLikelihoodToNoise = 0.6;//more noise
+	pitchLikelihoodWidth = 30;
+	
 	chromaLikelihoodToNoise = 0.5;//lower => more noise, higher more weight for events
 	chromaLikelihoodWidth = 50;//ms round onset event
 	
-	onsetLikelihoodToNoise = 0.2;//0.1 and 10 as to 9/5/12
+	//onsetLikelihoodToNoise = 0.2;//0.1 and 10 as to 9/5/12
 	kickLikelihoodToNoise = 0.3;
 	snareLikelihoodToNoise = 0.1;
 	
@@ -223,25 +226,33 @@
 		currentAlignmentPosition = markerPlaybackPosition;
 	}
 	
-	updateRecordedTempo();
-	temporal.tempoPosterior.addGaussianShape(temporal.tempoPosterior.MAPestimate, temporal.tempoArraySize / 4, 0.5	);
+	if (updateTempoMethodOn){
+		updateRecordedTempo();
+	}
 }
 
 void AudioEventMatcher::updateRecordedTempo(){
 	//tempo of equivalent recorded position is updated 
-	recordedTempo = getRecordedTempoAtMillis(currentAlignmentPosition);
+
+	if (recordedTempoIndex < recordedTempoData.globalTempoTimes.size()){//if for debug
+		recordedTempo = getRecordedTempoAtMillis(currentAlignmentPosition);
 		
-	double tmpRatio = currentSpeedRatio;
-	currentSpeedRatio = temporal.playingTempo / recordedTempo;
-	if (currentSpeedRatio != tmpRatio)
-		setSpeedRatioDistribution(currentSpeedRatio);
+		double tmpRatio = currentSpeedRatio;
+		
+		currentSpeedRatio = temporal.playingTempo / recordedTempo;
+		if (currentSpeedRatio != tmpRatio)
+			setSpeedRatioDistribution(currentSpeedRatio);
+		
 	}//end if to prevent debug crash
+	
+	temporal.tempoPosterior.addGaussianShape(temporal.tempoPosterior.MAPestimate, temporal.tempoArraySize / 4, 0.5	);
+
 }
-
+ 
 double AudioEventMatcher::getRecordedTempoAtMillis(const double& millisPosition){
-	if (recordedTempoIndex < recordedTempoData.globalTempoTimes.size()){//if for debug
+	
 		while(currentAlignmentPosition < recordedTempoData.globalTempoTimes[recordedTempoIndex] && recordedTempoIndex > 0){
-			//this loop never used as sequential, so we expewct the laignment time to be ahead of the last recorded tempo point
+			//this loop never used as sequential, so we expect the alignment time to be ahead of the last recorded tempo point
 			//but just in case
 			recordedTempoIndex--;
 		}
@@ -250,7 +261,6 @@
 			recordedTempoIndex++;
 		}
 		
-		
 		return recordedTempoData.globalTempo[recordedTempoIndex];
 }
 
@@ -778,9 +788,9 @@
 		for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){
 			
 			if (checkMatch(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn)) {
-				quantity = getPitchDistance(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn, 12);
+				quantity = getPitchDistance(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn, pitchWidth);
 				
-				bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, 30, quantity);
+				bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, pitchLikelihoodWidth, quantity);
 				recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = true;
 				numberOfMatches++;
 				totalLikelihoodAdded += quantity;
--- a/src/AudioEventMatcher.h	Mon Dec 10 17:07:21 2012 +0000
+++ b/src/AudioEventMatcher.h	Mon Dec 24 18:58:39 2012 +0000
@@ -124,7 +124,8 @@
 	
 	//params
 	double onsetLikelihoodWidth;
-	double onsetLikelihoodToNoise;
+	double pitchLikelihoodWidth;
+	//double onsetLikelihoodToNoise;
 	double kickLikelihoodToNoise, snareLikelihoodToNoise;
 	double pitchLikelihoodToNoise;//more noise
 	double chromaLikelihoodToNoise;
@@ -137,12 +138,15 @@
 	TempoFollower temporal;
 	TempoFollower recordedTempoData;
 	void calculateRecordedTempoData();
+
 	void setTempoPrior(double tempo);
 	
 	void setNextOnsetTime(const int&channel, int& time, int* indexForOnsets);
 	int recordedTempoIndex;
 	double recordedTempo;
 	void updateRecordedTempo();
+	double getRecordedTempoAtMillis(const double& millisPosition);
+	
 	void drawRecordedTempo();
 	double currentSpeedRatio;
 	void drawPlayingTempo();
@@ -167,5 +171,7 @@
 	
 	void checkTempo();
 	double relativeTempo;
+	
+	bool updateTempoMethodOn;
 };
 #endif
--- a/src/RecordedMultitrackAudio.cpp	Mon Dec 10 17:07:21 2012 +0000
+++ b/src/RecordedMultitrackAudio.cpp	Mon Dec 24 18:58:39 2012 +0000
@@ -18,7 +18,7 @@
 	numberOfAudioTracks = 4;
 	
 
-	int multitrackToLoad = 27;
+	int multitrackToLoad = 0;
 	setDifferentMultitracks(multitrackToLoad);//command to load this set of audio files - see below
 	
 	//number 7 is problematic with memory
@@ -49,20 +49,26 @@
 			break;
 		case 1:
 			//USE THE 18 AT BOTTOM
+			//numberOfAudioTracks = 3;
 				kickfilename = "../../../data/sound/DiamondMatch1/kick_bip.wav";	
 				bassfilename = "../../../data/sound/DiamondMatch1/bass_bip.wav";	
 				snarefilename = "../../../data/sound/DiamondMatch1/snare_bip.wav";	
+				guitarfilename = "";
 			break;
 		case 2:
+			//numberOfAudioTracks = 3;
 				kickfilename = "../../../data/sound/LiveIdiot2/02kick_bip.wav";	
 				bassfilename = "../../../data/sound/LiveIdiot2/01bass_bip.wav";	
 				snarefilename = "../../../data/sound/LiveIdiot2/03snare_bip.wav";	
+				guitarfilename = "";
 			break;
 		case 3:
+			//numberOfAudioTracks = 3;
 			 	kickfilename = "../../../data/sound/UistLive1/02kick_bip.wav";	
 			 	bassfilename = "../../../data/sound/UistLive1/01bass_bip.wav";	
 			 	snarefilename = "../../../data/sound/UistLive1/03snare_bip.wav";	
-		break;
+				guitarfilename = "";
+			break;
 			
 		case 4:
 			kickfilename = "/Volumes/Supersaurus/TractorsAlbum/tractorsIdiotDance/tractorsSetToRecord2/Bounces/TakeTwo/02kick_bip.wav";	
--- a/src/testApp.h	Mon Dec 10 17:07:21 2012 +0000
+++ b/src/testApp.h	Mon Dec 24 18:58:39 2012 +0000
@@ -9,7 +9,7 @@
 //#include "ofxSoundFileLoader.h"
 #include "AudioEventMatcher.h"
 
-#include "LoadedAudioHolder.h"
+//#include "LoadedAudioHolder.h"
 
 #include "OutputDataWriter.h"