view sv/filter/MultiRealTimeFilter.cpp @ 230:3c473495403f

add a master bypass if the MultiRealTimeFilter
author lbajardsilogic
date Wed, 05 Mar 2008 16:03:05 +0000
parents 7d5d51145b81
children 70b88fbbfb5c
line wrap: on
line source
/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*-  vi:set ts=8 sts=4 sw=4: */

/*	Sound Access	
		EASAIER client application.	
		Silogic 2007. Laure Bajard. 
	
	Integration of the filter provided by:
	Dublin Institute of Technology - Audio Research Group 2007
	www.audioresearchgroup.com
	Author: Dan Barry

	This program is free software; you can redistribute it and/or    
	modify it under the terms of the GNU General Public License as    
	published by the Free Software Foundation; either version 2 of the    
	License, or (at your option) any later version.  See the file    
	COPYING included with this distribution for more information.
*/

#include <math.h>
#include <iostream>

#include "MultiRealTimeFilter.h"

#include  "FFTReal.h"
#include  "DSP.h"

#include "system/System.h"
#include "main/MainWindow.h"

/*float *audioframe;
float *prev_audioframe;
float *window;
float *processedframe;
float *outbuffer;
float *holdbuffer3;
float *holdbuffer2;
float *holdbuffer1;

float *c_phase;		///CURRENT FRAME phases
float *p_phase;		///PREVIOUS FRAME phases
float *c_synthphase;
float *p_synthphase;
float *synthframe;*/

/* DAN removed
float *c_mags;		///CURRENT FRAME MAGNITUDES
float *p_mags;		///PREVIOUS FRAME MAGNITUDES
float *FFTframe;
*/

// DAN added
float *L_mags;		///CURRENT FRAME MAGNITUDES
float *R_mags;		///CURRENT FRAME MAGNITUDES
float *pL_mags;		///PREVIOUS FRAME MAGNITUDES
float *pR_mags;		///PREVIOUS FRAME MAGNITUDES

float *L_FFTframe;			//Left FFT Frame
float *R_FFTframe;			//Right FFT Frame

float *band1;
float *band2;
float *band3;
float *band4;
float *band5;

extern float hopfactor;

//need in DSP.cpp
float lastfactor;

int numpeaks;
float *peak_locations; 
int currentposition;
//

MultiRealTimeFilter::MultiRealTimeFilter() : Filter(),
	m_framesize(4096),
	m_transhold(0)
{
	m_hop = m_framesize/4;

	currentposition = m_hop;

	m_timeStretchFilter = new TimeStretchFilter();
	m_equalizerFilter = new EqualizerFilter();

	m_filterCollection.push_back(m_timeStretchFilter);
	m_filterCollection.push_back(m_equalizerFilter);

	connect(m_timeStretchFilter, SIGNAL(filterEnabled(bool)), this, SLOT(setFilterEnabled(bool)));
	connect(m_equalizerFilter, SIGNAL(filterEnabled(bool)), this, SLOT(setFilterEnabled(bool)));

	m_inputBufferL = (float *)calloc(m_framesize*(m_timeStretchFilter->getMaxPitchFactor()+1), sizeof(float));
	m_inputBufferR = (float *)calloc(m_framesize*(m_timeStretchFilter->getMaxPitchFactor()+1), sizeof(float));
	/**********malloc***********/
	
		
	//This block specifically sets up the buffers required to do a 75% overlap scheme
	window=(float *)calloc((m_framesize), sizeof(float));				//Window	
	
	L_FFTframe=(float *)calloc((m_framesize), sizeof(float));
	R_FFTframe=(float *)calloc((m_framesize), sizeof(float));
		
	L_audioframe=(float *)calloc((m_framesize), sizeof(float));			//The current frame
	R_audioframe=(float *)calloc((m_framesize), sizeof(float));
	pL_audioframe=(float *)calloc((m_framesize), sizeof(float));		//
	pR_audioframe=(float *)calloc((m_framesize), sizeof(float));
	L_processedframe=(float *)calloc((m_framesize), sizeof(float));	//The current frame
	R_processedframe=(float *)calloc((m_framesize), sizeof(float));
	
	//FFTframe=(float *)calloc((m_framesize), sizeof(float));
	//audioframe=(float *)calloc((m_framesize), sizeof(float));			//The current frame
	//prev_audioframe=(float *)calloc((m_framesize), sizeof(float));
	//processedframe=(float *)calloc((m_framesize), sizeof(float));			//The current frame
	//synthframe=(float *)calloc((m_framesize), sizeof(float));
	
	L_outbuffer=(float *)calloc((m_framesize/4), sizeof(float));		//The current output segment which is 1/4 framesize for 75% overlap
	L_holdbuffer3=(float *)calloc((m_framesize*0.75), sizeof(float));	//The hold buffer for the previous frame segment
	L_holdbuffer2=(float *)calloc((m_framesize/2), sizeof(float));		//The fold buffer for the frame segment 2 frames ago
	L_holdbuffer1=(float *)calloc((m_framesize/4), sizeof(float));		//The fold buffer for the frame segment 3 frames ago
			
	R_outbuffer=(float *)calloc((m_framesize/4), sizeof(float));			//The current output segment which is 1/4 framesize for 75% overlap
	R_holdbuffer3=(float *)calloc((m_framesize*0.75), sizeof(float));	//The hold buffer for the previous frame segment
	R_holdbuffer2=(float *)calloc((m_framesize/2), sizeof(float));		//The fold buffer for the frame segment 2 frames ago
	R_holdbuffer1=(float *)calloc((m_framesize/4), sizeof(float));		
	
	/*
	outbuffer=(float *)calloc((m_framesize/4), sizeof(float));			//The current output segment which is 1/4 framesize for 75% overlap		
	holdbuffer3=(float *)calloc((m_framesize*0.75), sizeof(float));	//The hold buffer for the previous frame segment
	holdbuffer2=(float *)calloc((m_framesize/2), sizeof(float));		//The fold buffer for the frame segment 2 frames ago
	holdbuffer1=(float *)calloc((m_framesize/4), sizeof(float));
	*/

	L_mags=(float *)calloc((m_framesize/2), sizeof(float));			//The magnitude and phase arrays
	R_mags=(float *)calloc((m_framesize/2), sizeof(float));
	L_phase=(float *)calloc((m_framesize/2), sizeof(float));
	R_phase=(float *)calloc((m_framesize/2), sizeof(float));
	pL_mags=(float *)calloc((m_framesize/2), sizeof(float));			//The magnitude and phase arrays
	pR_mags=(float *)calloc((m_framesize/2), sizeof(float));
	pL_phase=(float *)calloc((m_framesize/2), sizeof(float));			//The magnitude and phase arrays
	pR_phase=(float *)calloc((m_framesize/2), sizeof(float));
	cL_synthphase=(float *)calloc((m_framesize/2), sizeof(float));
	cR_synthphase=(float *)calloc((m_framesize/2), sizeof(float));
	pL_synthphase=(float *)calloc((m_framesize/2), sizeof(float));
	pR_synthphase=(float *)calloc((m_framesize/2), sizeof(float));
	
	/*
	c_mags=(float *)calloc((m_framesize/2), sizeof(float));			//The magnitude and phase arrays
	p_mags=(float *)calloc((m_framesize/2), sizeof(float));
	c_phase=(float *)calloc((m_framesize/2), sizeof(float));
	p_phase=(float *)calloc((m_framesize/2), sizeof(float));
	c_synthphase=(float *)calloc((m_framesize/2), sizeof(float));
	p_synthphase=(float *)calloc((m_framesize/2), sizeof(float));
	*/

	band1=(float *)calloc((m_framesize/2), sizeof(float));
	band2=(float *)calloc((m_framesize/2), sizeof(float));
	band3=(float *)calloc((m_framesize/2), sizeof(float));
	band4=(float *)calloc((m_framesize/2), sizeof(float));
	band5=(float *)calloc((m_framesize/2), sizeof(float));

			
	bandcurve=(float *)calloc((m_framesize/2), sizeof(float));
	plotFFTarray=(float *)calloc((400), sizeof(float));
	plotbandcurve=(float *)calloc((400), sizeof(float));
	//eqcurve=(float *)calloc((400), sizeof(float));

	peak_locations=(float *)calloc((m_framesize/2), sizeof(float));

	hanning(window, m_framesize);

	connect(this, SIGNAL(playSpeedChanged(float)),
		MainWindow::instance(), SLOT(playSpeedChanged(float)));

	hopfactor = 1;
	/***************************/
}

MultiRealTimeFilter::~MultiRealTimeFilter()
{
	/**********de-alloc***********/
	delete m_inputBufferL;
	delete m_inputBufferR;
	delete L_mags;		///CURRENT FRAME MAGNITUDES
	delete R_mags;		///CURRENT FRAME MAGNITUDES
	delete pL_mags;		///PREVIOUS FRAME MAGNITUDES
	delete pR_mags;		///PREVIOUS FRAME MAGNITUDES

	delete L_FFTframe;			//Left FFT Frame
	delete R_FFTframe;
	
	
	//delete FFTframe;
			
	delete L_audioframe;		//
	delete R_audioframe;		//
	delete pL_audioframe;		//
	delete pR_audioframe;		//
	delete L_processedframe;	//
	delete R_processedframe;
	
	/* DAN Removed
	delete audioframe;		
	delete prev_audioframe;
	delete processedframe;	
	delete synthframe;
	delete holdbuffer3;
	delete holdbuffer2;
	delete holdbuffer1;
	delete outbuffer;
	*/
	
	delete window;		
	
	delete L_outbuffer;
	delete L_holdbuffer3;
	delete L_holdbuffer2;
	delete L_holdbuffer1;
	delete R_outbuffer;
	delete R_holdbuffer3;
	delete R_holdbuffer2;
	delete R_holdbuffer1;
	
	/*
	delete c_mags;
	delete p_mags;
	delete c_phase;
	delete p_phase;
	delete c_synthphase;
	delete p_synthphase;*/

	delete L_phase;		
	delete R_phase;		
	delete pL_phase;		
	delete pR_phase;		
	delete cL_synthphase;
	delete cR_synthphase;
	delete pL_synthphase;
	delete pR_synthphase;

	delete peak_locations;

	delete band1;
	delete band2;
	delete band3;
	delete band4;
	delete band5;

	delete bandcurve;
	delete plotbandcurve;
	//delete eqcurve;
	delete plotFFTarray; 

		

	hopfactor = 1;

	emit playSpeedChanged(1);

	/***************************/
}

void MultiRealTimeFilter::putInput(float **input, size_t samples)
{	
	int dd;
	float sampdiff;
	float difratio;
	float interpsample;

	bool drum = 0;
	float drumthresh = 65;

	int delta = m_hop; //the "current position" is shifted of m_hop

	if ( samples < ( (size_t) floor(m_framesize*(m_timeStretchFilter->getPitchFactor() + 1)) ) )
		return;

	int channel = getSourceChannelCount();

	
	
	for (int i=0; i< ((int) samples); i++){
		if (channel > 1)
		{
			m_inputBufferL[i] = input[0][i];	//real-time biffer taken from left ring buffer
			m_inputBufferR[i] = input[1][i];	//real-time biffer taken from right ring buffer
		} else {
			m_inputBufferL[i] = input[0][i];	//If file is mono we copy same channel into both buffers into both buffers
			m_inputBufferR[i] = input[0][i];
		}
	}
	
	
	
	for (int i = 0; i< ((int) m_framesize); i++)
	{
			
		//This block was specifically written to do resampling interpolation for crude pitch shifting
		//if it's not being used the audioframe line after the else should be used which is also used in bypass mode
		//At
		
		if (m_timeStretchFilter->bypass() == false) 
		{
			dd = floor(double(i*m_timeStretchFilter->getPitchFactor()));
			difratio = (double(i*m_timeStretchFilter->getPitchFactor())) - floor(double(i*m_timeStretchFilter->getPitchFactor()));
			
			// this block loads a frame as normal
			sampdiff=m_inputBufferL[dd+delta+1]-m_inputBufferL[dd+delta];
			interpsample = (difratio*sampdiff)+m_inputBufferL[dd+delta];
			L_audioframe[i] = interpsample*window[i];

			sampdiff=m_inputBufferL[dd+delta+1-m_hop]-m_inputBufferL[dd+delta-m_hop];
			interpsample = (difratio*sampdiff)+m_inputBufferL[dd+delta-m_hop];
			pL_audioframe[i] = interpsample*window[i];


			sampdiff=m_inputBufferR[dd+delta+1]-m_inputBufferR[dd+delta];
			interpsample = (difratio*sampdiff)+m_inputBufferR[dd+delta];
			R_audioframe[i] = interpsample*window[i];

			sampdiff=m_inputBufferR[dd+delta+1-m_hop]-m_inputBufferR[dd+delta-m_hop];
			interpsample = (difratio*sampdiff)+m_inputBufferR[dd+delta-m_hop];
			pR_audioframe[i] = interpsample*window[i];

		}
		else {
			L_audioframe[i] = m_inputBufferL[i+delta+1]*window[i];
			R_audioframe[i] = m_inputBufferR[i+delta+1]*window[i];
			
			L_processedframe[i] = L_audioframe[i]*window[i];
			R_processedframe[i] = R_audioframe[i]*window[i];
		}
	}
	
	FFTReal fft_object(m_framesize);
			
	//if (m_timeStretchFilter->bypass() == false)//DAN This bypass would stop all processing...need to replace this with a master bypass instead of just timestretch bypass
	if (filterEnabled)
	{ 
		
		//CURRENT FRAME LEFT AND RIGHT TIME 2 FREQUENCY ***** THIS IS REQUIRED BY ALL PROCESSES	
		fft_object.do_fft(L_FFTframe,L_audioframe);
		fft_object.do_fft(R_FFTframe,R_audioframe);
	
		cart2pol(L_FFTframe, L_mags, L_phase, m_framesize);
		cart2pol(R_FFTframe, R_mags, R_phase, m_framesize);


		//CURRENT FRAME LEFT AND RIGHT TIME 2 FREQUENCY ***** THIS IS REQUIRED BY ALL PROCESSES	
		fft_object.do_fft (L_FFTframe,pL_audioframe);
		fft_object.do_fft (R_FFTframe,pR_audioframe);

		cart2pol(L_FFTframe, pL_mags, pL_phase, m_framesize);
		cart2pol(R_FFTframe, pR_mags, pR_phase, m_framesize);
		
		//--------------------------------------------
		
		
		//EQUALISER CALLS - 
		//Will need a bypass " m_equalizerFilter->bypass()" as below
		//Will need a simplemode " m_equalizerFilter->simplemode_bypass() " to switch between draw and slider control
		if (m_equalizerFilter->bypass() == false)
		{
		
			/*if (m_equalizerFilter->simplemode_bypass() == false)
			{
				create_filterbands();	//Creates each filter band and creates "bandcurve"
				bandeq(L_mags, R_mags, gainband1, gainband2, gainband3, gainband4, gainband5, bandcurve); //Gainband 1 - 5 values from EQ sliders
				log10plot2(bandcurve,plotbandcurve, m_framesize, 400);	//Converts linear band curve to log band curve only for plot
			}
			else
			{*/
				applyEQ(L_mags, R_mags, m_framesize, m_equalizerFilter->curve().size(), m_equalizerFilter->curve()); //Takes "eqcurve" which is what user draws in advanced mode 
			//}

			m_equalizerFilter->emitPlotFFTArray(L_mags, m_framesize);
			//log10plot(L_mags,plotFFTarray, m_framesize, 400);	//Plots FFT for either eq 
		
		}
				
		if (m_timeStretchFilter->bypass() == false) 
		{
			//TRANSIENT DETECTION FOR TIME STRETCHER
			drum=transient_detect(L_mags, R_mags, pL_mags, pR_mags, drumthresh, m_framesize);
		
			if (m_timeStretchFilter->transcheck())
			{
		
				if (drum && m_transhold==0){
					// FOR TIME STRETCHER
					cur2last(L_phase, cL_synthphase, pL_synthphase, m_framesize);
					cur2last(R_phase, cR_synthphase, pR_synthphase, m_framesize);
					
					m_transhold=4;
				}
				else{
					if(m_timeStretchFilter->peakcheck()){
						// FOR TIME STRETCHER
						rotatephases_peaklocked(L_phase, pL_phase, cL_synthphase, pL_synthphase, L_mags, m_framesize, m_timeStretchFilter->getPitchFactor());
						rotatephases_peaklocked(R_phase, pR_phase, cR_synthphase, pR_synthphase, R_mags, m_framesize, m_timeStretchFilter->getPitchFactor());
						
					}
					else{
						// FOR TIME STRETCHER
						rotatephases(L_phase, pL_phase, cL_synthphase, pL_synthphase, m_framesize, m_timeStretchFilter->getPitchFactor());
						rotatephases(R_phase, pR_phase, cR_synthphase, pR_synthphase, m_framesize, m_timeStretchFilter->getPitchFactor());
						
					}
				}
			}
			else
			{
				if(m_timeStretchFilter->peakcheck()){
					// FOR TIME STRETCHER
					rotatephases_peaklocked(L_phase, pL_phase, cL_synthphase, pL_synthphase, L_mags, m_framesize, m_timeStretchFilter->getPitchFactor());
					rotatephases_peaklocked(R_phase, pR_phase, cR_synthphase, pR_synthphase, R_mags, m_framesize, m_timeStretchFilter->getPitchFactor());
						
				}
				else{
					// FOR TIME STRETCHER
					rotatephases(L_phase, pL_phase, cL_synthphase, pL_synthphase, m_framesize, m_timeStretchFilter->getPitchFactor());
					rotatephases(R_phase, pR_phase, cR_synthphase, pR_synthphase, m_framesize, m_timeStretchFilter->getPitchFactor());
				}
			}
		
			// FOR TIME STRETCHER
			if(m_transhold != 0){
				m_transhold = m_transhold - 1;
			}

			drum = 0;

			pol2cart(L_FFTframe, L_mags, cL_synthphase, m_framesize);
			pol2cart(R_FFTframe, R_mags, cR_synthphase, m_framesize);
		
		} else
		{
			pol2cart(L_FFTframe, L_mags, L_phase, m_framesize);
			pol2cart(R_FFTframe, R_mags, R_phase, m_framesize);
		}
		
		// INVERSE FFTS REQUIRED BY ALL PROCESSES	
		/*pol2cart(L_FFTframe, L_mags, cL_synthphase, m_framesize);
		pol2cart(R_FFTframe, R_mags, cR_synthphase, m_framesize);
	
		fft_object.do_ifft (L_FFTframe,L_processedframe);
		fft_object.do_ifft (R_FFTframe,R_processedframe);
		fft_object.rescale (L_processedframe); 
		fft_object.rescale (R_processedframe);
		*/
		
		fft_object.do_ifft (L_FFTframe,L_processedframe);
		fft_object.do_ifft (R_FFTframe,R_processedframe);
		
		fft_object.rescale (L_processedframe); 
		fft_object.rescale (R_processedframe);		
		
	} 

	for (int p = 0; p < ((int) m_framesize); p++){
		L_processedframe[p]=L_processedframe[p]*window[p];
		R_processedframe[p]=R_processedframe[p]*window[p];
	}
	
	for (int j = 0; j< ((int) m_framesize); j++)
	{		
		//This block deals with the buffers for a 75% overlap scheme AND IS NOW STEREO		
		if (j < ((int) m_framesize)/4){
		L_outbuffer[j]=(L_processedframe[j]+L_holdbuffer1[j]+L_holdbuffer2[j]+L_holdbuffer3[j])*0.5;
		L_holdbuffer1[j]=L_holdbuffer2[j+(m_framesize/4)];

		R_outbuffer[j]=(R_processedframe[j]+R_holdbuffer1[j]+R_holdbuffer2[j]+R_holdbuffer3[j])*0.5;
		R_holdbuffer1[j]=R_holdbuffer2[j+(m_framesize/4)];
		}

		if (j < ((int) m_framesize)/2){
		L_holdbuffer2[j]=L_holdbuffer3[j+(m_framesize/4)];
		R_holdbuffer2[j]=R_holdbuffer3[j+(m_framesize/4)];
		}

		if (j < ((int) m_framesize)*0.75){
		L_holdbuffer3[j]=L_processedframe[j+(m_framesize/4)];
		R_holdbuffer3[j]=R_processedframe[j+(m_framesize/4)];
		}

	
	}
}

void MultiRealTimeFilter::getOutput(float **output, size_t samples)
{
	if (samples > m_framesize/4)
		return;

	int channel = getSourceChannelCount();

	for (size_t i = 0; i < samples; ++i)		// Now that there are 2 out buffers, the output is filled according to channel count 
	{
			
		output[0][i] = L_outbuffer[i];
			
			if (channel > 1)
			{
				output[1][i] = R_outbuffer[i];
			}
	}
	
	////DAN - REMOVED REPLACED ABOVE
	/*for (int ch = 0; ch < channel; ++ch) 
	{
		for (size_t i = 0; i < samples; ++i) {
			output[ch][i] = outbuffer[i];
		}
	}*/
}

size_t MultiRealTimeFilter::getRequiredInputSamples(size_t outputSamplesNeeded)
{
	size_t need = (size_t) (floor(m_framesize*(m_timeStretchFilter->getPitchFactor() + 1)));
	return need;
}

size_t MultiRealTimeFilter::getRequiredSkipSamples()
{
	size_t skip = 1024;

	if (m_timeStretchFilter->bypass() == false && m_transhold==0)
	{
			skip = floor(m_hop*hopfactor);
			currentposition += floor(m_hop*hopfactor);
	}
	else
	{		
			skip = m_hop;
			currentposition += m_hop;
	}
	return skip;
}

void MultiRealTimeFilter::setFilterEnabled(bool b)
{
	filterEnabled = (m_timeStretchFilter->isEnabled() || m_equalizerFilter->isEnabled());
	/*filterEnabled=b;
	
	if (filterEnabled)
	{
		if (!m_timeStretchFilter->bypass())
			emit playSpeedChanged(hopfactor);
	}
	else 
		emit playSpeedChanged(1);*/
}

void MultiRealTimeFilter::setFilterEnabled(int b)
{
	filterEnabled = (m_timeStretchFilter->isEnabled() || m_equalizerFilter->isEnabled());
	/*filterEnabled=b;
	
	if (filterEnabled)
	{
		if (!m_timeStretchFilter->bypass())
			emit playSpeedChanged(hopfactor);
	}
	else 
		emit playSpeedChanged(1);*/
}