changeset 32:4be22a1a0e24

added in chroma comparison for fourth channel (guitar)
author Andrew N Robertson <andrew.robertson@eecs.qmul.ac.uk>
date Mon, 02 Apr 2012 17:19:22 +0100
parents 02f659277346
children ca40d52d9998
files EvaluationPlan.pages src/AudioEventMatcher.cpp src/AudioEventMatcher.h src/LiveAudioInput.cpp src/RecordedMultitrackAudio.cpp src/testApp.cpp
diffstat 6 files changed, 209 insertions(+), 66 deletions(-) [+]
line wrap: on
line diff
Binary file EvaluationPlan.pages has changed
--- a/src/AudioEventMatcher.cpp	Sun Apr 01 01:33:23 2012 +0100
+++ b/src/AudioEventMatcher.cpp	Mon Apr 02 17:19:22 2012 +0100
@@ -11,11 +11,14 @@
 
 
 const int matchWindowWidth = 6000;
+const float pitchCutOff = 16;//within which pitches are even considered
 
 AudioEventMatcher::AudioEventMatcher(){
 
 	
 	pitchLikelihoodToNoise = 0.6;//more noise
+	chromaLikelihoodToNoise = 0.5;//lower => more noise, higher more weight for events
+	chromaLikelihoodWidth = 50;//ms round onset event
 	
 	onsetLikelihoodToNoise = 0.4;
 	onsetLikelihoodWidth = 10;//in ms
@@ -42,7 +45,7 @@
 void AudioEventMatcher::setWindowDimensions(){
 	double startHeight = recordedTracks.numberOfAudioTracks * recordedTracks.trackScreenHeight;
 	double heightAvailable = 1 - startHeight;
-	heightAvailable /= NUMBER_OF_CHANNELS;
+	heightAvailable /= numberOfChannels;
 	
 	bayesPositionWindow.setToRelativeSize(0, startHeight, 1, heightAvailable);
 	bayesLikelihoodWindow.setToRelativeSize(0, startHeight + 1*heightAvailable, 1, heightAvailable);
@@ -210,12 +213,16 @@
 	synchroniser.updateOutputSpeed();
 	
 	bayesianStruct.projectDistribution(newTime, currentAlignmentPosition, projectedPrior);//prior gets updated to where we are now
-	printf("alignment %i:: %i\n", newTime, (int) currentAlignmentPosition);
+
+//	printf("updateBestAlignment:: alignment %i:: %i\n", newTime, (int) currentAlignmentPosition);
 	
 //	printf("ALIGN pos %f time diff %f (now %f , last %f)speed %f :: ALIGN BEST %f\n", tmp, timetmp, (double)ofGetElapsedTimeMillis(), lastAlignmentTime, speedtmp, currentAlignmentPosition);
 }
 
 void AudioEventMatcher::draw(){
+
+	//MAIN DRAW FUNCTION FOR ALL
+	
 	//draw some outlines in blue
 	ofSetColor(20,200,200);
 	bayesPositionWindow.drawOutline();
@@ -233,22 +240,7 @@
 	//bayesianStruct.posterior.drawVector(0, bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow);
 	//bayesianStruct.posterior.drawVector(bayesianStruct.posterior.getRealTermsAsIndex(0), bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow);
 	//bayesianStruct.relativeSpeedPosterior.drawVector(0, bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(2), bayesTempoWindow);
-	string tmpStr = "pitch "+ofToString(recentPitch, 2);
-	tmpStr += " Nearest "+ofToString(pitchOfNearestMatch,2);
-	tmpStr += " dist "+ofToString(distanceOfNearestMatch, 2);
-	tmpStr +=  ", Time "+ofToString(recentTime, 0);
-	ofDrawBitmapString(tmpStr, 20, 20);
-					   
-					  
 	
-	string alignString = " align "+ofToString(currentAlignmentPosition, 2);
-	alignString += " playing "+ofToString(synchroniser.playingPositionRatio, 5);
-	alignString += "  pos "+ofToString(synchroniser.playingPositionMillis,0)+" ms";
-	alignString +=  "  rec pos "+ofToString(synchroniser.recordedPositionMillis,0)+" ms";
-	ofDrawBitmapString(alignString, 20, 50);
-	
-	ofDrawBitmapString("pos "+ofToString(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.playPosition), 200,600);
-
 	temporal.drawTempoArray(bayesLikelihoodWindow);
 	
 	drawRecordedTempo();
@@ -304,40 +296,57 @@
 
 void AudioEventMatcher::drawBayesianDistributions(){
 	
-	
-	int startIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenStartTimeMillis);
-	int endIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenEndTimeMillis);
-	
-	bayesianStruct.posterior.drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), bayesPositionWindow);
-	
-	string tmpString = "start "+ofToString(screenStartTimeMillis)+" (index "+ofToString(startIndex)+"), end "+ofToString(screenEndTimeMillis);
-	ofDrawBitmapString(tmpString, bayesPositionWindow.x+20, bayesPositionWindow.y+20);
+
+	drawPositionWindow();	
 	
 //	bayesianStruct.likelihood.drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), bayesLikelihoodWindow);
 	
 	bayesianStruct.relativeSpeedPosterior.drawConstrainedVector(0, bayesianStruct.relativeSpeedPosterior.arraySize, 0, ofGetWidth(), bayesTempoWindow);
+
 	
-	string tmpStr = "zero is "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(0));
-	tmpStr += " offsetis "+ofToString(bayesianStruct.posterior.offset);
-	tmpStr += " screenWidth = "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis));
-	ofDrawBitmapString(tmpStr, 20,140);
-	tmpStr = "best est "+ofToString(bayesianStruct.bestEstimate);
-	ofDrawBitmapString(tmpStr, 20, 180);
+	drawTrackLikelihoods();
+
+//	int priorStartIndex = bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis);
+//	int priorEndIndex = bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis);
+//	ofSetColor(0,200,200);//recent prior
+//	recentPrior.drawConstrainedVector(priorStartIndex, priorEndIndex, 0, ofGetWidth(), bayesPositionWindow);
+
+	drawInfo();
+
 	
-	ofDrawBitmapString("screenwidth "+ofToString(screenWidthMillis), 20, 800);
+}
+
+void AudioEventMatcher::drawPositionWindow(){
+	int startIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenStartTimeMillis);
+	int endIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenEndTimeMillis);
+	string tmpString = "start "+ofToString(screenStartTimeMillis)+" (index "+ofToString(startIndex)+"), end "+ofToString(screenEndTimeMillis);
+	ofDrawBitmapString(tmpString, bayesPositionWindow.x+20, bayesPositionWindow.y+20);
+	
+	//draw posterior in the bayes position window
+	ofSetColor(255,0,255);
+	bayesianStruct.posterior.drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), bayesPositionWindow);
 	
 	//green line at current best estimate
 	ofSetColor(0,255,0);//green scrolling line best position
 	double currentEstimateIndex = (currentAlignmentPosition - screenStartTimeMillis)*ofGetWidth()/screenWidthMillis;
 	ofLine(currentEstimateIndex, bayesPositionWindow.y, currentEstimateIndex, bayesPositionWindow.y + bayesPositionWindow.height);
 	
-
+	
 	ofSetColor(0,255,255);//synchroniser position
 	currentEstimateIndex = (synchroniser.playingPositionMillis - screenStartTimeMillis)*ofGetWidth()/screenWidthMillis;
 	ofLine(currentEstimateIndex, bayesLikelihoodWindow.y, currentEstimateIndex, bayesLikelihoodWindow.y + bayesPositionWindow.height);
+
+	ofSetColor(255,0,100);//purple prior
+	bayesianStruct.prior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
 	
+	ofSetColor(255,0,0);//projected prior in red
+	projectedPrior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
 	
 	
+	
+}
+
+void AudioEventMatcher::drawTrackLikelihoods(){
 	//draw track by track likelihoods
 	for (int i = 0; i <recordedTracks.numberOfAudioTracks;i++){
 		ofSetColor(200,255,50);//channel likelihoods in yellow
@@ -350,19 +359,32 @@
 		ofSetColor(255);
 		ofDrawBitmapString("recent event "+ofToString(recentEventTime[i]), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.x + 20, recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.y + recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.height - 10);
 	}
+}
 
-	int priorStartIndex = bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis);
-	int priorEndIndex = bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis);
-//	ofSetColor(0,200,200);//recent prior
-//	recentPrior.drawConstrainedVector(priorStartIndex, priorEndIndex, 0, ofGetWidth(), bayesPositionWindow);
 
-	ofSetColor(255,0,100);//purple prior
-	bayesianStruct.prior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
+void AudioEventMatcher::drawInfo(){
+	string tmpStr = "zero is "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(0));
+	tmpStr += " offsetis "+ofToString(bayesianStruct.posterior.offset);
+	tmpStr += " screenWidth = "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis));
+	ofDrawBitmapString(tmpStr, 20,140);
+	tmpStr = "best est "+ofToString(bayesianStruct.bestEstimate);
+	ofDrawBitmapString(tmpStr, 20, 180);
+	ofDrawBitmapString("screenwidth "+ofToString(screenWidthMillis), 20, 800);
 	
-	ofSetColor(255,0,0);
-	projectedPrior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
+	ofSetColor(255);
+	tmpStr = "pitch "+ofToString(recentPitch, 2);
+	tmpStr += " Nearest "+ofToString(pitchOfNearestMatch,2);
+	tmpStr += " dist "+ofToString(distanceOfNearestMatch, 2);
+	tmpStr +=  ", Time "+ofToString(recentTime, 0);
+	ofDrawBitmapString(tmpStr, 20, 20);
 	
+	string alignString = " align "+ofToString(currentAlignmentPosition, 2);
+	alignString += " playing "+ofToString(synchroniser.playingPositionRatio, 5);
+	alignString += "  pos "+ofToString(synchroniser.playingPositionMillis,0)+" ms";
+	alignString +=  "  rec pos "+ofToString(synchroniser.recordedPositionMillis,0)+" ms";
+	ofDrawBitmapString(alignString, 20, 50);
 	
+	ofDrawBitmapString("pos "+ofToString(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.playPosition), 200,600);
 	
 }
 
@@ -379,9 +401,22 @@
 	recentPitch = pitchIn;//for drawing
 	recentTime = timeIn;
 	}
+}
+
+
+void AudioEventMatcher::newChromaEvent(const int& channel, float* chromaIn, const double& timeIn){
+	
+	//	could add event to the liveInput list? as in pitch event
+//	printf("match chroma channel %i\n", channel);	
+
+	matchNewChromaEvent(channel, chromaIn, timeIn);//main pitch matching fn
+	
+	likelihoodVisualisation[channel] = bayesianStruct.likelihood;
+	
 	
 }
 
+
 void AudioEventMatcher::newKickEvent(const double& timeIn){	
 //	liveInput.addKickEvent(timeIn);
 	matchNewOnsetEvent(0, timeIn);
@@ -412,27 +447,29 @@
 	bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets
 
 	//start at beginning but OPTIMISE later
-	
-	
 	bayesianStruct.likelihood.offset = bayesianStruct.prior.offset;
 	bayesianStruct.likelihood.zero();//set to zero
 	
 	double quantity = 1;//likelihoodToNoiseRatio / numberOfMatches;
 	int numberOfMatchesFound = 0;
 
-	
 	double startMatchingTime = bayesianStruct.likelihood.offset;
 	double endMatchingTime = bayesianStruct.likelihood.offset + matchWindowWidth;
-	
+	double millisTime = -1*INFINITY;//or 0 is fine
+	int checkIndex = 0;
 	if (channel <= recordedTracks.numberOfAudioTracks){
-		for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){
-			double millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
+		while (millisTime < startMatchingTime) {
+			millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[checkIndex].millisTime;
+			checkIndex++;
+		}
+		for (int i = checkIndex;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size() && millisTime <= endMatchingTime;i++){
+			millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
 			if (millisTime >= startMatchingTime && millisTime <= endMatchingTime){
 				bayesianStruct.likelihood.addGaussianShapeFromRealTime(millisTime, onsetLikelihoodWidth,  quantity);
 				numberOfMatchesFound++;
 		//		printf("Adding Gaussian for onset at time %f offset %f\n", millisTime, bayesianStruct.likelihood.offset);
 				
-			}
+			}//end if within limits (changed so it now is 4 sure)
 		}
 	}
 	
@@ -452,8 +489,6 @@
 	temporal.updateTempo(channel, timeIn);
 	}
 	
-
-	
 }
 
 
@@ -476,15 +511,17 @@
 	double closestDistance = INFINITY;
 	
 	double quantity = 0;
+	double totalLikelihoodAdded = 0;
 	if (channel <= recordedTracks.numberOfAudioTracks){
 		for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){
 			
 			if (checkMatch(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn)) {
-				quantity = getPitchDistance(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn, 8);
+				quantity = getPitchDistance(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn, 12);
 				
 				bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, 30, quantity);
 				recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = true;
 				numberOfMatches++;
+				totalLikelihoodAdded += quantity;
 			} 
 			else{
 				recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = false;
@@ -503,8 +540,8 @@
 	
 	
 	if (numberOfMatches > 0){//no point updating unless there is a match
-	
-		bayesianStruct.likelihood.addConstant(numberOfMatches*(1-pitchLikelihoodToNoise)/(pitchLikelihoodToNoise*bayesianStruct.likelihood.length));
+	//replacing numberOfMatches with totalLike below...
+		bayesianStruct.likelihood.addConstant(totalLikelihoodAdded*(1-pitchLikelihoodToNoise)/(pitchLikelihoodToNoise*bayesianStruct.likelihood.length));
 
 	//tmp set likelihood constant and calculate using that
 	//bayesianStruct.likelihood.zero();
@@ -529,7 +566,7 @@
 	
 	int multiplicationFactor = 1;
 	if (pitchTwo > 0){
-		int multiplicationFactor = round(pitchOne/pitchTwo);
+		multiplicationFactor = round(pitchOne/pitchTwo);
 	}
 	
 	double distance = abs(pitchOne - pitchTwo*multiplicationFactor);
@@ -538,7 +575,7 @@
 	else
 		distance = 0;
 	
-//	printf("[pitch distance %f vs %f = %f\n", pitchOne, pitchTwo, distance);
+	//printf("[pitch distance %f vs %f, factor %i = %f\n", pitchOne, pitchTwo, multiplicationFactor, distance);
 	return distance;
 
 }
@@ -549,7 +586,7 @@
 	if (livePitch > 0){
 	int multiplicationFactor = (int)(round(recordedPitch/livePitch));
 	
-	if (abs(recordedPitch - livePitch * multiplicationFactor) < 16)
+	if (abs(recordedPitch - livePitch * multiplicationFactor) < pitchCutOff)
 		return true;
 	else
 		return false;
@@ -560,6 +597,80 @@
 }
 
 
+void AudioEventMatcher::matchNewChromaEvent(const int& channel, float* chromaIn, const double& timeIn){
+	//start at beginning but OPTIMISE later
+
+	bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets
+	
+	//set the likelihoods by matching the pitched note
+	
+	int numberOfMatches = 0;
+	bayesianStruct.likelihood.zero();//set to zero
+	double newOnsetTime;
+	double closestDistance = INFINITY;
+	
+	double quantity = 1;
+	double totalLikelihoodAdded = 0;
+	
+	double startMatchingTime = bayesianStruct.likelihood.offset;
+	double endMatchingTime = bayesianStruct.likelihood.offset + matchWindowWidth;
+	double millisTime = -1*INFINITY;//or 0 is fine
+	
+	int checkIndex = 0;
+	if (channel <= recordedTracks.numberOfAudioTracks){
+		while (millisTime < startMatchingTime) {
+			millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[checkIndex].millisTime;
+			checkIndex++;
+		}//go up to where we need to check from fast
+		
+		for (int i = checkIndex;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size() && millisTime <= endMatchingTime;i++){
+			millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
+			
+			if (millisTime >= startMatchingTime && millisTime <= endMatchingTime){
+				quantity = getChromaDistance(chromaIn, &recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].chromaValues[0]);
+				bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, chromaLikelihoodWidth, quantity);
+				
+			//	bayesianStruct.likelihood.addGaussianShapeFromRealTime(millisTime, onsetLikelihoodWidth,  quantity);
+				numberOfMatches++;
+				totalLikelihoodAdded += quantity;
+				printf("Adding CHROMA Gaussian for onset at time %.1f dist %.3f\n", millisTime, quantity);
+				
+			}//end if within limits (changed so it now is 4 sure)
+		}
+	}
+	
+	
+	if (numberOfMatches > 0){//no point updating unless there is a match
+		//replacing numberOfMatches with totalLike below...
+		
+		printf("CHROMA HAS %i MATCHES\n", numberOfMatches);
+		
+		bayesianStruct.likelihood.addConstant(totalLikelihoodAdded*(1-chromaLikelihoodToNoise)/(chromaLikelihoodToNoise*bayesianStruct.likelihood.length));
+		
+		bayesianStruct.calculatePosterior();
+		lastAlignmentTime = timeIn;//has to use the STAMPED time
+		recentEventTime[channel] = timeIn;
+		
+		recentPriors[channel] = bayesianStruct.prior;
+		projectedPrior = bayesianStruct.prior;	
+		
+		temporal.eventTimes[channel].push_back(timeIn);
+	}
+	
+}
+
+
+double AudioEventMatcher::getChromaDistance(float* chromaOne, float* chromaTwo){
+	double distance = 0;
+	double total = 0;
+	for (int i = 0;i < 12;i++){
+		distance += chromaOne[i]*chromaTwo[i];
+		total += chromaOne[i]*chromaOne[i] + (chromaTwo[i]*chromaTwo[i]);
+	}
+	
+	distance /= sqrt(total);
+	return distance;
+}
 
 void AudioEventMatcher::windowResized(const int& w, const int& h){
 	recordedTracks.windowResized(w,h);
--- a/src/AudioEventMatcher.h	Sun Apr 01 01:33:23 2012 +0100
+++ b/src/AudioEventMatcher.h	Mon Apr 02 17:19:22 2012 +0100
@@ -12,7 +12,7 @@
 #ifndef AUDIO_EVENT_MATCHER_H
 #define AUDIO_EVENT_MATCHER_H
 
-#define NUMBER_OF_CHANNELS 3
+
 
 #include "ofMain.h"
 #include "ChromaOnset.h"
@@ -24,6 +24,8 @@
 #include "AccompanimentSynchroniser.h"
 #include "TempoFollower.h"
 
+static const int numberOfChannels = 4;
+
 class AudioEventMatcher{
 	
 	public:
@@ -35,7 +37,11 @@
 	void updateBestAlignmentPosition();
 	
 	void draw();
-	void drawBayesianDistributions();;
+	void drawBayesianDistributions();
+	void drawPositionWindow();
+	void drawTrackLikelihoods();
+	void drawInfo();
+	
 	void setWindowDimensions();
 	
 	void newPitchEvent(const int& channel, const double& pitchIn, const double& timeIn);
@@ -44,8 +50,13 @@
 	void newSnareEvent(const double& timeIn);
 	void newSnareEvent(const int& channel, const double& timeIn);
 	
+	void newChromaEvent(const int& channel, float* chromaIn, const double& timeIn);
+
+	
 	void matchNewPitchEvent(const int& channel, const double& pitchIn, const double& timeIn);
 	void matchNewOnsetEvent(const int& channel, const double& timeIn);
+	void matchNewChromaEvent(const int& channel, float* chroma, const double& timeIn);
+	double getChromaDistance(float* chromaOne, float* chromaTwo);
 	
 	BayesianArrayStructure bayesianStruct;//hold the probability distriubtions
 	
@@ -70,15 +81,15 @@
 	bool usingRealTime;
 	double recentPitch, recentTime;
 	
-	DynamicVector likelihoodVisualisation[NUMBER_OF_CHANNELS];
-	DynamicVector recentPriors[NUMBER_OF_CHANNELS];
+	DynamicVector likelihoodVisualisation[numberOfChannels];
+	DynamicVector recentPriors[numberOfChannels];
 	//DynamicVector recentPrior;
 	DynamicVector projectedPrior;
 	
 	double currentAlignmentPosition;
 	double lastAlignmentTime;
 	
-	double recentEventTime[NUMBER_OF_CHANNELS];
+	double recentEventTime[numberOfChannels];
 	int startTime;
 	int currentAlignmentTime;
 	
@@ -92,11 +103,12 @@
 	double onsetLikelihoodWidth;
 	double onsetLikelihoodToNoise;
 	double pitchLikelihoodToNoise;//more noise
+	double chromaLikelihoodToNoise;
 	
 	double pitchOfNearestMatch;//for viz purposes
 	double distanceOfNearestMatch;
 	
-
+	double chromaLikelihoodWidth;
 	
 	TempoFollower temporal;
 	TempoFollower recordedTempoData;
--- a/src/LiveAudioInput.cpp	Sun Apr 01 01:33:23 2012 +0100
+++ b/src/LiveAudioInput.cpp	Mon Apr 02 17:19:22 2012 +0100
@@ -33,6 +33,21 @@
 
 
 
+/*
+ //any point in this??
+void LiveAudioInput::addChromaEvent(const double& time){
+	
+	AudioEvent e;
+	e.millisTime = time;
+	e.frameTime = millisToFrames(time);
+	
+	liveEvents.push_back(e);
+	//printf("live input pitch %f time %f ms == %f frames\n", pitch, time, e.frameTime);
+	numberOfEvents++;
+	
+}
+*/
+
 double LiveAudioInput::framesToMillis(const double& frameCount){
 	return ((frameCount*hopsize*1000.0)/44100.0);
 }
--- a/src/RecordedMultitrackAudio.cpp	Sun Apr 01 01:33:23 2012 +0100
+++ b/src/RecordedMultitrackAudio.cpp	Mon Apr 02 17:19:22 2012 +0100
@@ -66,11 +66,13 @@
 			kickfilename = "/Users/andrew/Documents/work/Alignment/MultitrackMatch/tractorsAlign/Take12/02kick_bip.wav";
 			bassfilename = "/Users/andrew/Documents/work/Alignment/MultitrackMatch/tractorsAlign/Take12/01bass_bip.wav";
 			snarefilename = "/Users/andrew/Documents/work/Alignment/MultitrackMatch/tractorsAlign/Take12/03snare_bip.wav";	
+			guitarfilename ="/Users/andrew/Documents/work/Alignment/MultitrackMatch/tractorsAlign/Take12/04elec_bip.wav";
 			break;	
 		case 6:
 			kickfilename = "/Users/andrew/Documents/work/Alignment/MultitrackMatch/tractorsAlign/Take13/02kick_bip.wav";
 			bassfilename = "/Users/andrew/Documents/work/Alignment/MultitrackMatch/tractorsAlign/Take13/01bass_bip.wav";
 			snarefilename = "/Users/andrew/Documents/work/Alignment/MultitrackMatch/tractorsAlign/Take13/03snare_bip.wav";	
+			guitarfilename ="/Users/andrew/Documents/work/Alignment/MultitrackMatch/tractorsAlign/Take13/04elec_bip.wav";
 			break;		
 		
 		case 7:
--- a/src/testApp.cpp	Sun Apr 01 01:33:23 2012 +0100
+++ b/src/testApp.cpp	Mon Apr 02 17:19:22 2012 +0100
@@ -80,12 +80,15 @@
 		
 		// check for mouse moved message
 		if ( m.getAddress() == "/elec" ){
-			float chroma[12];
+			int testChannel = m.getArgAsInt32(0);
+			double timeIn = m.getArgAsFloat(1);
+			float chromaIn[12];
+			printf("CHROMA received at time %f\n", timeIn);
 			for (int i = 0;i < 12;i++){
-				chroma[i] = m.getArgAsFloat(i);
-				printf("chroma[%i]: %f\n", i, chroma[i]);
+				chromaIn[i] = m.getArgAsFloat(i+2);
+				printf("chroma[%i]: %f\n", i, chromaIn[i]);
 			}
-//			eventMatcher.newChromaEvent(testChannel, pitchIn, timeIn); - WRite this!
+			eventMatcher.newChromaEvent(testChannel, chromaIn, timeIn);
 		}