Mercurial > hg > easaier-soundaccess
view sv/filter/TimeStretchFilter.cpp @ 79:afcf540ae3a2
add the real time filter stack to manage real time filters and their attributes
author | lbajardsilogic |
---|---|
date | Tue, 19 Jun 2007 15:15:12 +0000 |
parents | |
children | 8ebc85f6ce4e |
line wrap: on
line source
/* -*- c-basic-offset: 4 indent-tabs-mode: nil -*- vi:set ts=8 sts=4 sw=4: */ /* Sound Access EASAIER client application. Silogic 2007. Laure Bajard. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. See the file COPYING included with this distribution for more information. */ #include <math.h> #include "TimeStretchFilter.h" #include "FFTReal.h" #include "DSP.h" float *audioframe; float *prev_audioframe; float *window; float *processedframe; float *outbuffer; float *holdbuffer3; float *holdbuffer2; float *holdbuffer1; float *c_mags; ///CURRENT FRAME MAGNITUDES float *p_mags; ///PREVIOUS FRAME MAGNITUDES float *c_phase; ///CURRENT FRAME phases float *p_phase; ///PREVIOUS FRAME phases float *c_synthphase; float *p_synthphase; float *synthframe; float *FFTframe; //FFTReal fft_object; int framesize = 1024; int hop = framesize/4; float volume = 1; float interpfactor = 1; int currentposition = hop+1; float *wavdata; int dd; float sampdiff; float difratio; float interpsample; float maxvalue = 0; float hopfactor = 1; float lastfactor; int filelength; char byte1, byte2; int samplevalue; char *pbyte1 = &byte1;; char *pbyte2 = &byte2;; char *buffer; int tempval = 0; bool drum = 0; float drumthresh = 65; int transhold = 0; int numpeaks; float *peak_locations; TimeStretchFilter::TimeStretchFilter() : Filter() { m_bypass = true; m_transcheck = false; m_peakcheck = false; /**********malloc***********/ /* FFTframe=(float *)calloc((framesize), sizeof(float)); //This block specifically sets up the buffers required to do a 75% overlap scheme audioframe=(float *)calloc((framesize), sizeof(float)); //The current frame prev_audioframe=(float *)calloc((framesize), sizeof(float)); window=(float *)calloc((framesize), sizeof(float)); //Window processedframe=(float *)calloc((framesize), sizeof(float)); //The current frame synthframe=(float *)calloc((framesize), sizeof(float)); outbuffer=(float *)calloc((framesize/4), sizeof(float)); //The current output segment which is 1/4 framesize for 75% overlap holdbuffer3=(float *)calloc((framesize*0.75), sizeof(float)); //The hold buffer for the previous frame segment holdbuffer2=(float *)calloc((framesize/2), sizeof(float)); //The fold buffer for the frame segment 2 frames ago holdbuffer1=(float *)calloc((framesize/4), sizeof(float)); c_mags=(float *)calloc((framesize/2), sizeof(float)); //The magnitude and phase arrays p_mags=(float *)calloc((framesize/2), sizeof(float)); c_phase=(float *)calloc((framesize/2), sizeof(float)); p_phase=(float *)calloc((framesize/2), sizeof(float)); c_synthphase=(float *)calloc((framesize/2), sizeof(float)); p_synthphase=(float *)calloc((framesize/2), sizeof(float)); peak_locations=(float *)calloc((framesize/2), sizeof(float)); buffer=(char *)calloc((framesize/2), sizeof(char)); wavdata = (float*)calloc((framesize*2), sizeof(float)); hanning(window, framesize); */ /***************************/ } TimeStretchFilter::~TimeStretchFilter() { /**********de-alloc***********/ /* delete FFTframe; delete audioframe; delete prev_audioframe; delete window; delete processedframe; delete synthframe; //delete outbuffer; delete holdbuffer3; delete holdbuffer2; delete holdbuffer1; delete c_mags; delete p_mags; delete c_phase; delete p_phase; delete c_synthphase; delete p_synthphase; delete peak_locations; delete buffer; delete outbuffer; */ /***************************/ } TimeStretchFilter::PropertyList TimeStretchFilter::getProperties() const { PropertyList list; list.push_back("Time"); list.push_back("Pitch"); list.push_back("Bypass"); list.push_back("Transdetect"); list.push_back("Peaklock"); return list; } QString TimeStretchFilter::getPropertyLabel(const PropertyName &name) const { if (name == "Time") return tr("Time"); if (name == "Pitch") return tr("Pitch"); if (name == "Bypass") return tr("Bypass Processing"); if (name == "Transdetect") return tr("Transient Detection"); if (name == "Peaklock") return tr("Peak Locking"); return ""; } TimeStretchFilter::PropertyType TimeStretchFilter::getPropertyType(const PropertyName &name) const { if (name == "Time") return RangeProperty; if (name == "Pitch") return RangeProperty; if (name == "Bypass") return ToggleProperty; if (name == "Transdetect") return ToggleProperty; if (name == "Peaklock") return ToggleProperty; return InvalidProperty; } int TimeStretchFilter::getPropertyRangeAndValue(const PropertyName &name, int *min, int *max, int *deflt) const { //!!! factor this colour handling stuff out into a colour manager class int val = 0; if (name == "Time") { if (min) *min = -100; if (max) *max = 100; if (deflt) *deflt = 0; } if (name == "Pitch") { if (min) *min = -100; if (max) *max = 100; if (deflt) *deflt = 0; } return val; } QString TimeStretchFilter::getPropertyValueLabel(const PropertyName &name, int value) const { if (name == "Time") { if (value == -100) return tr("Slow"); if (value == 100) return tr("Fast"); } return tr("<unknown>"); } void TimeStretchFilter::setProperty(const PropertyName &name, int value) { if (name == "Time") { int tmaxfactor=2; if (value > 0){ hopfactor=1+((tmaxfactor-1)*(value/100)); } if (value < 0){ hopfactor=1/(1+((tmaxfactor-1)*((-value)/100))); } if(value == 0){ hopfactor=1; } } else if (name == "Pitch") { int pmaxfactor=2; if (value > 0){ interpfactor=1+((pmaxfactor-1)*(value/100)); } if (value < 0){ interpfactor=1/(1+((pmaxfactor-1)*((-value)/100))); } if(value == 0){ interpfactor=1; } } else if (name == "Bypass"){ m_bypass = (value > 0) ? true : false; } else if (name == "Transdetect"){ m_transcheck = (value > 0) ? true : false; } else if (name == "Peaklock"){ m_peakcheck = (value > 0) ? true : false; } } void TimeStretchFilter::putInput(float **input, size_t samples) { /* int i; int channel = 2; for (i=0; i<framesize; i++){ wavdata[2*i]=input[0][i]; wavdata[2*i+1]=input[1][i]; //wavdata[2*i+1]=input[0][i]; } currentposition=hop+1; for (int i = 0; i<(framesize); i++) { //This block was specifically written to do resampling interpolation for crude pitch shifting //if it's not being used the audioframe line after the else should be used which is also used in bypass mode //At if (m_bypass == false) { dd = floor(double(i*interpfactor)); difratio = (double(i*interpfactor)) - floor(double(i*interpfactor)); // this block loads a frame as normal sampdiff=wavdata[dd+currentposition+1]-wavdata[dd+currentposition]; interpsample = (difratio*sampdiff)+wavdata[dd+currentposition]; audioframe[i] = (interpsample*32767*volume)*window[i]; // this block loads a frame exactly 1 hop back. This is used only forthe purposes of an efficient // way to calculate phase differences without having to use hetrodyning as suggested by Dave Dorran sampdiff=wavdata[dd+currentposition+1-hop]-wavdata[dd+currentposition-hop]; interpsample = (difratio*sampdiff)+wavdata[dd+currentposition-hop]; prev_audioframe[i] = (interpsample*32767*volume)*window[i]; //processedframe[i] = (audioframe[i])*(0.5*(1-cos(2*PI*(i)/framesize))); ///needs to happen after processing } else { audioframe[i] = (wavdata[i+currentposition+1]*32767*volume)*window[i]; processedframe[i] = (audioframe[i])*window[i]; } //-------------------------------------------------------------------------------------------- //calculate time frame stats here if (audioframe[i]> maxvalue){ maxvalue=audioframe[i]; } } //This maxvalue is the paeak in the frame. The progress bar is on //a timer event which checks this value in order to have a realtime //display update for the peak meter. tempval = ((maxvalue/32767)*2)*100; maxvalue = maxvalue*.9; //--------------------------------------------------------------------------------- FFTReal fft_object (framesize); if (m_bypass == false) { fft_object.do_fft (FFTframe,audioframe); cart2pol(FFTframe, c_mags, c_phase, framesize); //-------------------------------------------- fft_object.do_fft (FFTframe,prev_audioframe); cart2pol(FFTframe, p_mags, p_phase, framesize); drum=transient_detect(c_mags, c_mags, p_mags, p_mags, drumthresh, framesize); if (m_transcheck) { if (drum && transhold==0){ cur2last(c_phase, c_synthphase, p_synthphase, framesize); transhold=4; } else{ if(m_peakcheck){ rotatephases_peaklocked(c_phase, p_phase, c_synthphase, p_synthphase, framesize, interpfactor); } else{ rotatephases(c_phase, p_phase, c_synthphase, p_synthphase, framesize, interpfactor); } } } else { if(m_peakcheck){ rotatephases_peaklocked(c_phase, p_phase, c_synthphase, p_synthphase, framesize, interpfactor); } else{ rotatephases(c_phase, p_phase, c_synthphase, p_synthphase, framesize, interpfactor); } } if(transhold != 0){ transhold=transhold-1; } drum = 0; //--------------------------------------- //updatephases2(c_phase, p_phase, c_synthphase, p_synthphase, framesize, hopfactor, interpfactor); //calculate freqframe stats here //process freq domian here pol2cart(FFTframe, c_mags, c_synthphase, framesize); fft_object.do_ifft (FFTframe,processedframe); fft_object.rescale (processedframe); //VIP######## I have edited this function to do rewindowing also###### } //----------------------------------------------------------------------------------- for (int p = 0; p<(framesize); p++){ processedframe[p]=processedframe[p]*window[p]; } for (int j = 0; j<(framesize); j++) { //This block deals with the buffers for a 75% overlap scheme if (j < framesize/4){ outbuffer[j]=(processedframe[j]+holdbuffer1[j]+holdbuffer2[j]+holdbuffer3[j])*0.5; holdbuffer1[j]=holdbuffer2[j+(framesize/4)]; } if (j < framesize/2){ holdbuffer2[j]=holdbuffer3[j+(framesize/4)]; } if (j < framesize*0.75){ holdbuffer3[j]=processedframe[j+(framesize/4)]; } if (j < framesize/4){ samplevalue = outbuffer[j]; intobyte(samplevalue, pbyte1, pbyte2); buffer[j*2] = byte1; buffer[j*2+1] = byte2; } } if (m_bypass == false && transhold==0) { currentposition = currentposition + floor(hop*hopfactor); } else { currentposition = currentposition + hop; } // LB + floor(hop) if (filelength - currentposition < framesize*2){ currentposition=hop+1; } */ } void TimeStretchFilter::getOutput(float **output, size_t samples) { /*int i; int channel = 2; for (i=0; i<samples; i++){ output[0][i] = outbuffer[i*2]; output[1][i] = outbuffer[i*2+1]; }*/ }