view src/AudioEventMatcher.cpp @ 34:0d52ba6844b9

working on chroma inclusion in alignment
author Andrew N Robertson <andrew.robertson@eecs.qmul.ac.uk>
date Tue, 03 Apr 2012 13:13:38 +0100
parents 4be22a1a0e24
children 6fb77b20413c
line wrap: on
line source
/*
 *  AudioEventMatcher.cpp
 *  MultipleAudioMathcher
 *
 *  Created by Andrew on 31/01/2012.
 *  Copyright 2012 QMUL. All rights reserved.
 *
 */

#include "AudioEventMatcher.h"


const int matchWindowWidth = 6000;
const float pitchCutOff = 16;//within which pitches are even considered

AudioEventMatcher::AudioEventMatcher(){

	
	pitchLikelihoodToNoise = 0.6;//more noise
	chromaLikelihoodToNoise = 0.5;//lower => more noise, higher more weight for events
	chromaLikelihoodWidth = 50;//ms round onset event
	
	onsetLikelihoodToNoise = 0.4;
	onsetLikelihoodWidth = 10;//in ms
	
	setArraySizes();
	
	usingRealTime = false;
	bayesianStruct.realTimeMode = &usingRealTime;
	recentPitch = 0;
	currentAlignmentPosition = 0;


	
	followingLiveInput = true;
	startedPlaying = false;
	recordedTempoIndex = 0;
//	temporal.setUpEventTimeMatrix();
//	recordedTempoData.setUpEventTimeMatrix();
}




void AudioEventMatcher::setWindowDimensions(){
	double startHeight = recordedTracks.numberOfAudioTracks * recordedTracks.trackScreenHeight;
	double heightAvailable = 1 - startHeight;
	heightAvailable /= numberOfChannels;
	
	bayesPositionWindow.setToRelativeSize(0, startHeight, 1, heightAvailable);
	bayesLikelihoodWindow.setToRelativeSize(0, startHeight + 1*heightAvailable, 1, heightAvailable);
	bayesTempoWindow.setToRelativeSize(0, startHeight + 2*heightAvailable, 1, heightAvailable);
	
	
}

void AudioEventMatcher::setArraySizes(){
	bayesianStruct.resetSpeedSize(200);
	bayesianStruct.setRelativeSpeedScalar(0.01);
	bayesianStruct.setSpeedPrior(1.0);
	bayesianStruct.relativeSpeedPrior.getMaximum();
	
	bayesianStruct.resetSize(matchWindowWidth);
	bayesianStruct.setPositionDistributionScalar(1);
	
}

void AudioEventMatcher::loadAudioFiles(){
	recordedTracks.loadTestAudio();
	synchroniser.fileLengthSamples = recordedTracks.loadedAudioFiles[0].fileLoader.totalNumberOfSamples;
	printf("synchroniser has %f samples\n", synchroniser.fileLengthSamples);
	
	calculateRecordedTempoData();
	printf("\n\nFIRST PASS: FINAL recorded tempo is %f\n", recordedTempoData.playingTempo);
	setTempoPrior(recordedTempoData.playingTempo);
	calculateRecordedTempoData();//now calculate again using better prior
	
	printf("\n\nSECOND PASS: FINAL recorded tempo is %f\n", recordedTempoData.playingTempo);
	printf("GLOBAL TEMPO of RECORDED FILES\n");
	recordedTempoData.printTempoTimes();
}

void AudioEventMatcher::setTempoPrior(double tempo){
	recordedTempoData.zero();
	recordedTempoData.tempoPosterior.zero();
	recordedTempoData.tempoPosterior.addGaussianShapeFromRealTime(tempo, 3, 1);

}

void AudioEventMatcher::calculateRecordedTempoData(){
	int indexForOnsets[3];
		indexForOnsets[0] = 0;
		indexForOnsets[1] = 0;
		indexForOnsets[2] = 0;
	int kickTime, snareTime;
	while (indexForOnsets[0] < recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.chromaOnsets.size() || 
		   indexForOnsets[2] < recordedTracks.loadedAudioFiles[2].fileLoader.onsetDetect.chromaOnsets.size()) {
		
		setNextOnsetTime(0, kickTime, &indexForOnsets[0]);
		setNextOnsetTime(2, snareTime, &indexForOnsets[0]);
		
		if (kickTime < snareTime){
			printf("update kick at %i\n", kickTime);
			recordedTempoData.updateTempo(0, kickTime);
			printf("recorded tempo is %f\n", recordedTempoData.playingTempo);
			indexForOnsets[0]++;
		}else {
			printf("update snare at %i\n", snareTime);
			recordedTempoData.updateTempo(2, snareTime);
			printf("recorded tempo is %f\n", recordedTempoData.playingTempo);
			indexForOnsets[2]++;
		}
	}//end while
	
	
}

void AudioEventMatcher::setNextOnsetTime(const int& channel, int& time, int* indexForOnsets){
	if (indexForOnsets[channel] < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size()){
		time = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[indexForOnsets[channel]].millisTime;
	}
	else {
		time = 2147483647;//infinity
	}
}

void AudioEventMatcher::startPlaying(){
	bayesianStruct.setStartPlaying();
	currentAlignmentPosition = 0;
	startTime = ofGetElapsedTimeMillis();
	
	projectedPrior = bayesianStruct.prior;
	startedPlaying = true;
	synchroniser.reset();
	temporal.reset();
	
	recordedTempoIndex = 0;
	recordedTempo = recordedTempoData.globalTempo[recordedTempoIndex];
	
	currentSpeedRatio = 1;
	
	temporal.tempoPosterior.zero();
	temporal.tempoPosterior.addGaussianShapeFromRealTime(recordedTempo, 10, 1);
	
	//SET TEMPO PRIOR for Speed Ratio
	//the update this
	setSpeedRatioDistribution(currentSpeedRatio);
	//bayesianStruct.posterior.printArray();
}


void AudioEventMatcher::setSpeedRatioDistribution(const double& speedRatio){
	bayesianStruct.relativeSpeedPosterior.zero();
	bayesianStruct.relativeSpeedPosterior.addToIndex(bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(speedRatio), 1);
	bayesianStruct.relativeSpeedPosterior.addGaussianShapeFromRealTime(1, 0.06, 0.8);
}

void AudioEventMatcher::stopPlaying(){
	startedPlaying = false;
	temporal.printEventTimes();
}

void AudioEventMatcher::rescue(){
	bayesianStruct.posterior.zero();
	bayesianStruct.posterior.addConstant(1);
	bayesianStruct.prior.zero();
	bayesianStruct.prior.addConstant(1);
}

void AudioEventMatcher::updatePosition(){
	
	if (startedPlaying){
	if (!followingLiveInput)
		recordedTracks.updatePosition();
	else
		recordedTracks.updatePositionToMillis(currentAlignmentPosition);
		
		updateBestAlignmentPosition();
	}
	
	updateRecordedTempo();
	
	temporal.tempoPosterior.addGaussianShape(temporal.tempoPosterior.MAPestimate, temporal.tempoArraySize / 4, 0.5	);
}

void AudioEventMatcher::updateRecordedTempo(){
	//tempo of equivalent recorded position is updated 
	while(currentAlignmentPosition > recordedTempoData.globalTempoTimes[recordedTempoIndex]){
		recordedTempoIndex++;
	}
	recordedTempo = recordedTempoData.globalTempo[recordedTempoIndex];
	double tmpRatio = currentSpeedRatio;
	currentSpeedRatio = temporal.playingTempo / recordedTempo;
	if (currentSpeedRatio != tmpRatio)
		setSpeedRatioDistribution(currentSpeedRatio);
}

void AudioEventMatcher::updateBestAlignmentPosition(){
	//THIS DEALS WITH WHERE WE ARE NOW! ON THE SCREEN
	//DIFFERENT TO WHEN EVENTS COME IN AS THEY ARE TIMESTAMPED - SO EG A PITCH EVENT MAY ARRIVE 16 CHROMA FRAMES LATER - BIG DIFFERENCE
	
	int newTime = ofGetElapsedTimeMillis() - startTime;
//	double tmp = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);;
//	double timetmp = (newTime - lastAlignmentTime);
//	double speedtmp = bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate);
//	currentAlignmentTime = newTime;
	currentAlignmentPosition = bayesianStruct.posterior.getIndexInRealTerms(bayesianStruct.posterior.MAPestimate);
	currentAlignmentPosition += (newTime - lastAlignmentTime) * bayesianStruct.relativeSpeedPosterior.getIndexInRealTerms(bayesianStruct.relativeSpeedPosterior.MAPestimate);

	
	synchroniser.updateRecordedPosition(currentAlignmentPosition, newTime);
	
	synchroniser.updateOutputSpeed();
	
	bayesianStruct.projectDistribution(newTime, currentAlignmentPosition, projectedPrior);//prior gets updated to where we are now

//	printf("updateBestAlignment:: alignment %i:: %i\n", newTime, (int) currentAlignmentPosition);
	
//	printf("ALIGN pos %f time diff %f (now %f , last %f)speed %f :: ALIGN BEST %f\n", tmp, timetmp, (double)ofGetElapsedTimeMillis(), lastAlignmentTime, speedtmp, currentAlignmentPosition);
}

void AudioEventMatcher::draw(){

	//MAIN DRAW FUNCTION FOR ALL
	
	//draw some outlines in blue
	ofSetColor(20,200,200);
	bayesPositionWindow.drawOutline();
	bayesTempoWindow.drawOutline();
	
	//draw the scrolling audio tracks
	recordedTracks.drawTracks();

	ofSetColor(255);
//	bayesianStruct.relativeSpeedPrior.drawVector(0, 200, bayesTempoWindow);

	setScreenDisplayTimes();
	drawBayesianDistributions();
	
	//bayesianStruct.posterior.drawVector(0, bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow);
	//bayesianStruct.posterior.drawVector(bayesianStruct.posterior.getRealTermsAsIndex(0), bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis), bayesPositionWindow);
	//bayesianStruct.relativeSpeedPosterior.drawVector(0, bayesianStruct.relativeSpeedPosterior.getRealTermsAsIndex(2), bayesTempoWindow);
	
	temporal.drawTempoArray(bayesLikelihoodWindow);
	
	drawRecordedTempo();
	drawPlayingTempo();
	
	
}

void AudioEventMatcher::drawRecordedTempo(){
	
	int xTempoIndex = ofGetWidth() * (double)(recordedTempo - recordedTempoData.minimumTempoInterval)/(double)(recordedTempoData.maximumTempoInterval - recordedTempoData.minimumTempoInterval);
	ofSetColor(0, 200, 0);
	ofLine(xTempoIndex, bayesLikelihoodWindow.y, xTempoIndex, bayesLikelihoodWindow.y + bayesLikelihoodWindow.height);
	ofDrawBitmapString(ofToString(recordedTempo), xTempoIndex, bayesLikelihoodWindow.y + 10);
}

void AudioEventMatcher::drawPlayingTempo(){
	//purple line for MAP estimate of new intervals
	int xTempoIndex = (double)(ofGetWidth() * (temporal.playingTempo - temporal.minimumTempoInterval))/(double)(temporal.maximumTempoInterval - temporal.minimumTempoInterval);
	ofSetColor(200, 0, 200);
	ofLine(xTempoIndex, bayesLikelihoodWindow.y, xTempoIndex, bayesLikelihoodWindow.y + bayesLikelihoodWindow.height);
	ofDrawBitmapString(ofToString(temporal.playingTempo), xTempoIndex, bayesLikelihoodWindow.y + 10);
	
	//red line where the ratio is between playing tempo and recorded one
	int xSpeedRatioIndex = (double)(temporal.tempoPosterior.getIndexInRealTerms(currentSpeedRatio)*ofGetWidth())/(double)temporal.tempoPosterior.arraySize;
	ofSetColor(200,0,0);
	ofLine(xSpeedRatioIndex, bayesTempoWindow.y, xSpeedRatioIndex, bayesTempoWindow.y + bayesTempoWindow.height);
	string tmpString = "playing "+ofToString(temporal.playingTempo);
	tmpString += ", recorded "+ofToString(recordedTempo);
	tmpString += " ratio "+ofToString(currentSpeedRatio);
	ofSetColor(155,155,155);
	ofDrawBitmapString(tmpString, 20, bayesTempoWindow.y+10);

}


void AudioEventMatcher::setScreenDisplayTimes(){
		screenWidthMillis = recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.framesToMillis(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.amplitudeNumber);
	//	if (!followingLiveInput){
		
		screenStartTimeMillis = recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.framesToMillis(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.drawParams.windowStartFrame);
		screenEndTimeMillis = screenStartTimeMillis + screenWidthMillis;
	
	//need PRECISION in this alignment
	
	
	/*}else{

		screenStartTimeMillis = (int)(currentAlignmentPosition/screenWidthMillis) * screenWidthMillis;
		screenEndTimeMillis = screenStartTimeMillis + screenWidthMillis;
	}*/
}

void AudioEventMatcher::drawBayesianDistributions(){
	

	drawPositionWindow();	
	
//	bayesianStruct.likelihood.drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), bayesLikelihoodWindow);
	
	bayesianStruct.relativeSpeedPosterior.drawConstrainedVector(0, bayesianStruct.relativeSpeedPosterior.arraySize, 0, ofGetWidth(), bayesTempoWindow);

	
	drawTrackLikelihoods();

//	int priorStartIndex = bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis);
//	int priorEndIndex = bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis);
//	ofSetColor(0,200,200);//recent prior
//	recentPrior.drawConstrainedVector(priorStartIndex, priorEndIndex, 0, ofGetWidth(), bayesPositionWindow);

	drawInfo();

	
}

void AudioEventMatcher::drawPositionWindow(){
	int startIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenStartTimeMillis);
	int endIndex = bayesianStruct.posterior.getRealTermsAsIndex(screenEndTimeMillis);
	string tmpString = "start "+ofToString(screenStartTimeMillis)+" (index "+ofToString(startIndex)+"), end "+ofToString(screenEndTimeMillis);
	ofDrawBitmapString(tmpString, bayesPositionWindow.x+20, bayesPositionWindow.y+20);
	
	//draw posterior in the bayes position window
	ofSetColor(255,0,255);
	bayesianStruct.posterior.drawConstrainedVector(startIndex, endIndex, 0, ofGetWidth(), bayesPositionWindow);
	
	//green line at current best estimate
	ofSetColor(0,255,0);//green scrolling line best position
	double currentEstimateIndex = (currentAlignmentPosition - screenStartTimeMillis)*ofGetWidth()/screenWidthMillis;
	ofLine(currentEstimateIndex, bayesPositionWindow.y, currentEstimateIndex, bayesPositionWindow.y + bayesPositionWindow.height);
	
	
	ofSetColor(0,255,255);//synchroniser position
	currentEstimateIndex = (synchroniser.playingPositionMillis - screenStartTimeMillis)*ofGetWidth()/screenWidthMillis;
	ofLine(currentEstimateIndex, bayesLikelihoodWindow.y, currentEstimateIndex, bayesLikelihoodWindow.y + bayesPositionWindow.height);

	ofSetColor(255,0,100);//purple prior
	bayesianStruct.prior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
	
	ofSetColor(255,0,0);//projected prior in red
	projectedPrior.drawConstrainedVector(bayesianStruct.prior.getRealTermsAsIndex(screenStartTimeMillis), bayesianStruct.prior.getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), bayesPositionWindow);
	
	
	
}

void AudioEventMatcher::drawTrackLikelihoods(){
	//draw track by track likelihoods
	for (int i = 0; i <recordedTracks.numberOfAudioTracks;i++){
		ofSetColor(200,255,50);//channel likelihoods in yellow
		likelihoodVisualisation[i].drawConstrainedVector(likelihoodVisualisation[i].getRealTermsAsIndex(screenStartTimeMillis), likelihoodVisualisation[i].getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window);		
		
		ofSetColor(0,255,150);//channel priors
		recentPriors[i].drawConstrainedVector(recentPriors[i].getRealTermsAsIndex(screenStartTimeMillis), recentPriors[i].getRealTermsAsIndex(screenEndTimeMillis), 0, ofGetWidth(), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window);		
		
		
		ofSetColor(255);
		ofDrawBitmapString("recent event "+ofToString(recentEventTime[i]), recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.x + 20, recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.y + recordedTracks.loadedAudioFiles[i].fileLoader.onsetDetect.window.height - 10);
	}
}


void AudioEventMatcher::drawInfo(){
	string tmpStr = "zero is "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(0));
	tmpStr += " offsetis "+ofToString(bayesianStruct.posterior.offset);
	tmpStr += " screenWidth = "+ofToString(bayesianStruct.posterior.getRealTermsAsIndex(screenWidthMillis));
	ofDrawBitmapString(tmpStr, 20,140);
	tmpStr = "best est "+ofToString(bayesianStruct.bestEstimate);
	ofDrawBitmapString(tmpStr, 20, 180);
	ofDrawBitmapString("screenwidth "+ofToString(screenWidthMillis), 20, 800);
	
	ofSetColor(255);
	tmpStr = "pitch "+ofToString(recentPitch, 2);
	tmpStr += " Nearest "+ofToString(pitchOfNearestMatch,2);
	tmpStr += " dist "+ofToString(distanceOfNearestMatch, 2);
	tmpStr +=  ", Time "+ofToString(recentTime, 0);
	ofDrawBitmapString(tmpStr, 20, 20);
	
	string alignString = " align "+ofToString(currentAlignmentPosition, 2);
	alignString += " playing "+ofToString(synchroniser.playingPositionRatio, 5);
	alignString += "  pos "+ofToString(synchroniser.playingPositionMillis,0)+" ms";
	alignString +=  "  rec pos "+ofToString(synchroniser.recordedPositionMillis,0)+" ms";
	ofDrawBitmapString(alignString, 20, 50);
	
	ofDrawBitmapString("pos "+ofToString(recordedTracks.loadedAudioFiles[0].fileLoader.onsetDetect.playPosition), 200,600);
	
}

void AudioEventMatcher::newPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){
	if (pitchIn > 0){
	liveInput.addPitchEvent(pitchIn, timeIn);
	
	//printPosteriorMAPinfo();	

	matchNewPitchEvent(channel, pitchIn, timeIn);//main pitch matching fn

	likelihoodVisualisation[1] = bayesianStruct.likelihood;
		
	recentPitch = pitchIn;//for drawing
	recentTime = timeIn;
	}
}


void AudioEventMatcher::newChromaEvent(const int& channel, float* chromaIn, const double& timeIn){
	
	//	could add event to the liveInput list? as in pitch event
	printf("match chroma channel %i\n", channel);	
	for (int i = 0;i < 12;i++){
		printf("chroma in[%i] = %f\n", i, chromaIn[i]);
	}
	
	matchNewChromaEvent(channel, chromaIn, timeIn);//main pitch matching fn
	
	likelihoodVisualisation[channel] = bayesianStruct.likelihood;
	
	
}


void AudioEventMatcher::newKickEvent(const double& timeIn){	
//	liveInput.addKickEvent(timeIn);
	matchNewOnsetEvent(0, timeIn);
	likelihoodVisualisation[0] = bayesianStruct.likelihood;
}

void AudioEventMatcher::newKickEvent(const int& channel, const double& timeIn){
//	liveInput.addKickEvent(timeIn);
	matchNewOnsetEvent(channel, timeIn);
	likelihoodVisualisation[0] = bayesianStruct.likelihood;
}


void AudioEventMatcher::newSnareEvent(const double& timeIn){
	matchNewOnsetEvent(2, timeIn);
	likelihoodVisualisation[2] = bayesianStruct.likelihood;
}


void AudioEventMatcher::newSnareEvent(const int& channel, const double& timeIn){
	matchNewOnsetEvent(channel, timeIn);
	likelihoodVisualisation[2] = bayesianStruct.likelihood;
}

//Needs just to set bounds for the matching process, not have TimeIn
void AudioEventMatcher::matchNewOnsetEvent(const int& channel, const double& timeIn){

	bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets

	//start at beginning but OPTIMISE later
	bayesianStruct.likelihood.offset = bayesianStruct.prior.offset;
	bayesianStruct.likelihood.zero();//set to zero
	
	double quantity = 1;//likelihoodToNoiseRatio / numberOfMatches;
	int numberOfMatchesFound = 0;

	double startMatchingTime = bayesianStruct.likelihood.offset;
	double endMatchingTime = bayesianStruct.likelihood.offset + matchWindowWidth;
	double millisTime = -1*INFINITY;//or 0 is fine
	int checkIndex = 0;
	if (channel <= recordedTracks.numberOfAudioTracks){
		while (millisTime < startMatchingTime) {
			millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[checkIndex].millisTime;
			checkIndex++;
		}
		for (int i = checkIndex;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size() && millisTime <= endMatchingTime;i++){
			millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
			if (millisTime >= startMatchingTime && millisTime <= endMatchingTime){
				bayesianStruct.likelihood.addGaussianShapeFromRealTime(millisTime, onsetLikelihoodWidth,  quantity);
				numberOfMatchesFound++;
		//		printf("Adding Gaussian for onset at time %f offset %f\n", millisTime, bayesianStruct.likelihood.offset);
				
			}//end if within limits (changed so it now is 4 sure)
		}
	}
	
	if (numberOfMatchesFound > 0){
//	bayesianStruct.likelihood.addConstant((1-likelihoodToNoiseRatio)/bayesianStruct.likelihood.length);
	bayesianStruct.likelihood.addConstant(numberOfMatchesFound*(1-onsetLikelihoodToNoise)/(onsetLikelihoodToNoise*bayesianStruct.likelihood.length));
	bayesianStruct.likelihood.renormalise();
	
	bayesianStruct.calculatePosterior();
	lastAlignmentTime = timeIn;//use TIMESTAMP
	recentEventTime[channel] = timeIn;//ofGetElapsedTimeMillis() - startTime;

	recentPriors[channel] = bayesianStruct.prior;
	projectedPrior = bayesianStruct.prior;
	
	
	temporal.updateTempo(channel, timeIn);
	}
	
}



void AudioEventMatcher::matchNewPitchEvent(const int& channel, const double& pitchIn, const double& timeIn){
	//start at beginning but OPTIMISE later
	/*printf("TIME %i\n", ofGetElapsedTimeMillis());
	//tmp debug
	updateBestAlignmentPosition();
	printf("current alignment best estimate %f\n", currentAlignmentPosition);
	*/
	bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets

	//set the lielihoods by matching the pitched note
	
	
	int numberOfMatches = 0;
	bayesianStruct.likelihood.zero();//set to zero
	double newOnsetTime;
	double closestDistance = INFINITY;
	
	double quantity = 0;
	double totalLikelihoodAdded = 0;
	if (channel <= recordedTracks.numberOfAudioTracks){
		for (int i = 0;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size();i++){
			
			if (checkMatch(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn)) {
				quantity = getPitchDistance(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch, pitchIn, 12);
				
				bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, 30, quantity);
				recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = true;
				numberOfMatches++;
				totalLikelihoodAdded += quantity;
			} 
			else{
				recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].matched = false;
			}
			//checking nearest pitch 
			newOnsetTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
			if (abs(newOnsetTime - currentAlignmentPosition) < closestDistance){
				closestDistance = abs(newOnsetTime - currentAlignmentPosition);
				pitchOfNearestMatch = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].aubioPitch;
				distanceOfNearestMatch = quantity;
			}
			
		}
	}

	
	
	if (numberOfMatches > 0){//no point updating unless there is a match
	//replacing numberOfMatches with totalLike below...
		bayesianStruct.likelihood.addConstant(totalLikelihoodAdded*(1-pitchLikelihoodToNoise)/(pitchLikelihoodToNoise*bayesianStruct.likelihood.length));

	//tmp set likelihood constant and calculate using that
	//bayesianStruct.likelihood.zero();
	//bayesianStruct.likelihood.addConstant(1);
		
		bayesianStruct.calculatePosterior();
		lastAlignmentTime = timeIn;//has to use the STAMPED time
		recentEventTime[channel] = timeIn;
		
		recentPriors[channel] = bayesianStruct.prior;
		projectedPrior = bayesianStruct.prior;
		
		temporal.eventTimes[channel].push_back(timeIn);
	}
	

}

double AudioEventMatcher::getPitchDistance(const double& pitchOne, const double& pitchTwo, const double& scale){
	
	double scaleFactor = scale * pitchOne / 110.0;
	
	int multiplicationFactor = 1;
	if (pitchTwo > 0){
		multiplicationFactor = round(pitchOne/pitchTwo);
	}
	
	double distance = abs(pitchOne - pitchTwo*multiplicationFactor);
	if (distance < scaleFactor)
		distance = 1 - (distance/scaleFactor);
	else
		distance = 0;
	
	//printf("[pitch distance %f vs %f, factor %i = %f\n", pitchOne, pitchTwo, multiplicationFactor, distance);
	return distance;

}


bool AudioEventMatcher::checkMatch(const double& recordedPitch, const double& livePitch){
	
	if (livePitch > 0){
	int multiplicationFactor = (int)(round(recordedPitch/livePitch));
	
	if (abs(recordedPitch - livePitch * multiplicationFactor) < pitchCutOff)
		return true;
	else
		return false;
	}else {
		return false;
	}

}


void AudioEventMatcher::matchNewChromaEvent(const int& channel, float* chromaIn, const double& timeIn){
	//start at beginning but OPTIMISE later

	bayesianStruct.updateBayesianDistributions(timeIn);//moves the posterior up into prior given the time interval and calculates new offsets
	
	//set the likelihoods by matching the pitched note
	
	int numberOfMatches = 0;
	bayesianStruct.likelihood.zero();//set to zero
	double newOnsetTime;
	double closestDistance = INFINITY;
	
	double quantity = 1;
	double totalLikelihoodAdded = 0;
	
	double startMatchingTime = bayesianStruct.likelihood.offset;
	double endMatchingTime = bayesianStruct.likelihood.offset + matchWindowWidth;
	double millisTime = -1*INFINITY;//or 0 is fine
	
	int checkIndex = 0;
	if (channel <= recordedTracks.numberOfAudioTracks){
		while (millisTime < startMatchingTime) {
			millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[checkIndex].millisTime;
			checkIndex++;
		}//go up to where we need to check from fast
		
		for (int i = checkIndex;i < recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets.size() && millisTime <= endMatchingTime;i++){
			millisTime = recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime;
			
			if (millisTime >= startMatchingTime && millisTime <= endMatchingTime){
				quantity = getChromaDistance(chromaIn, &recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].chromaValues[0]);
				bayesianStruct.likelihood.addGaussianShapeFromRealTime(recordedTracks.loadedAudioFiles[channel].fileLoader.onsetDetect.chromaOnsets[i].millisTime, chromaLikelihoodWidth, quantity);
				
			//	bayesianStruct.likelihood.addGaussianShapeFromRealTime(millisTime, onsetLikelihoodWidth,  quantity);
				numberOfMatches++;
				totalLikelihoodAdded += quantity;
				printf("Adding CHROMA Gaussian for onset at time %.1f dist %.3f\n", millisTime, quantity);
				
			}//end if within limits (changed so it now is 4 sure)
		}
	}
	
	
	if (numberOfMatches > 0){//no point updating unless there is a match
		//replacing numberOfMatches with totalLike below...
		
		printf("CHROMA HAS %i MATCHES\n", numberOfMatches);
		
		bayesianStruct.likelihood.addConstant(totalLikelihoodAdded*(1-chromaLikelihoodToNoise)/(chromaLikelihoodToNoise*bayesianStruct.likelihood.length));
		
		bayesianStruct.calculatePosterior();
		lastAlignmentTime = timeIn;//has to use the STAMPED time
		recentEventTime[channel] = timeIn;
		
		recentPriors[channel] = bayesianStruct.prior;
		projectedPrior = bayesianStruct.prior;	
		
		temporal.eventTimes[channel].push_back(timeIn);
	}
	
}


double AudioEventMatcher::getChromaDistance(float* chromaOne, float* chromaTwo){
	double distance = 0;
	double total = 0;
	for (int i = 0;i < 12;i++){
		distance += chromaOne[i]*chromaTwo[i];
		total += chromaOne[i]*chromaOne[i] + (chromaTwo[i]*chromaTwo[i]);
	}
	
	distance /= sqrt(total);
	return distance;
}

void AudioEventMatcher::windowResized(const int& w, const int& h){
	recordedTracks.windowResized(w,h);
	bayesTempoWindow.resized(w,h);
	bayesPositionWindow.resized(w,h);
}

/*
 
void printPosteriorMAPinfo(){	//tmp print stuff
 printf("New pitch MAP post estimate now %i, ", bayesianStruct.posterior.MAPestimate);
 double tmp  = bayesianStruct.posterior.getMAPestimate();
 printf(" getting it %f and offset %f == %f ms\n", tmp,  bayesianStruct.posterior.offset, bayesianStruct.posterior.getIndexInRealTerms(tmp));
 
 }
 */