robert@372
|
1 /*
|
robert@372
|
2 ____ _____ _ _
|
robert@372
|
3 | __ )| ____| | / \
|
robert@372
|
4 | _ \| _| | | / _ \
|
robert@372
|
5 | |_) | |___| |___ / ___ \
|
robert@372
|
6 |____/|_____|_____/_/ \_\.io
|
robert@372
|
7
|
robert@372
|
8 */
|
robert@372
|
9
|
giuliomoro@250
|
10 /*
|
giuliomoro@250
|
11 * render.cpp
|
giuliomoro@250
|
12 *
|
giuliomoro@250
|
13 * Created on: Oct 24, 2014
|
giuliomoro@250
|
14 * Author: parallels
|
giuliomoro@250
|
15 */
|
giuliomoro@250
|
16
|
robert@372
|
17 /**
|
robert@372
|
18 \example 4_audio_FFT_phase_vocoder
|
robert@372
|
19
|
robert@372
|
20 Phase Vocoder
|
robert@372
|
21 ----------------------
|
robert@372
|
22
|
robert@372
|
23 This sketch shows an implementation of a phase vocoder and builds on the previous FFT example.
|
robert@372
|
24 Again it uses the NE10 library, included at the top of the file (line 11).
|
robert@372
|
25
|
robert@372
|
26 Read the documentation on the NE10 library [here](http://projectne10.github.io/Ne10/doc/annotated.html).
|
robert@372
|
27 */
|
giuliomoro@250
|
28
|
giuliomoro@301
|
29 #include <Bela.h>
|
giuliomoro@250
|
30 #include <rtdk.h>
|
andrewm@379
|
31 #include <ne10/NE10.h> // NEON FFT library
|
giuliomoro@250
|
32 #include <cmath>
|
giuliomoro@250
|
33 #include "SampleData.h"
|
giuliomoro@250
|
34 #include <Midi.h>
|
giuliomoro@250
|
35
|
giuliomoro@250
|
36 #define BUFFER_SIZE 16384
|
giuliomoro@250
|
37
|
giuliomoro@250
|
38 // TODO: your buffer and counter go here!
|
giuliomoro@250
|
39 float gInputBuffer[BUFFER_SIZE];
|
giuliomoro@250
|
40 int gInputBufferPointer = 0;
|
giuliomoro@250
|
41 float gOutputBuffer[BUFFER_SIZE];
|
giuliomoro@250
|
42 int gOutputBufferWritePointer = 0;
|
giuliomoro@250
|
43 int gOutputBufferReadPointer = 0;
|
giuliomoro@250
|
44 int gSampleCount = 0;
|
giuliomoro@250
|
45
|
giuliomoro@250
|
46 float *gWindowBuffer;
|
giuliomoro@250
|
47
|
giuliomoro@250
|
48 // -----------------------------------------------
|
giuliomoro@250
|
49 // These variables used internally in the example:
|
giuliomoro@250
|
50 int gFFTSize = 2048;
|
giuliomoro@250
|
51 int gHopSize = 512;
|
giuliomoro@250
|
52 int gPeriod = 512;
|
giuliomoro@250
|
53 float gFFTScaleFactor = 0;
|
giuliomoro@250
|
54
|
giuliomoro@250
|
55 // FFT vars
|
giuliomoro@250
|
56 ne10_fft_cpx_float32_t* timeDomainIn;
|
giuliomoro@250
|
57 ne10_fft_cpx_float32_t* timeDomainOut;
|
giuliomoro@250
|
58 ne10_fft_cpx_float32_t* frequencyDomain;
|
giuliomoro@250
|
59 ne10_fft_cfg_float32_t cfg;
|
giuliomoro@250
|
60
|
giuliomoro@250
|
61 // Sample info
|
giuliomoro@250
|
62 SampleData gSampleData; // User defined structure to get complex data from main
|
giuliomoro@250
|
63 int gReadPtr = 0; // Position of last read sample from file
|
giuliomoro@250
|
64
|
giuliomoro@250
|
65 // Auxiliary task for calculating FFT
|
giuliomoro@250
|
66 AuxiliaryTask gFFTTask;
|
giuliomoro@250
|
67 int gFFTInputBufferPointer = 0;
|
giuliomoro@250
|
68 int gFFTOutputBufferPointer = 0;
|
giuliomoro@250
|
69
|
giuliomoro@250
|
70 void process_fft_background();
|
giuliomoro@250
|
71
|
giuliomoro@250
|
72
|
giuliomoro@250
|
73 int gEffect = 0; // change this here or with midi CC
|
giuliomoro@250
|
74 enum{
|
giuliomoro@250
|
75 kBypass,
|
giuliomoro@250
|
76 kRobot,
|
giuliomoro@250
|
77 kWhisper,
|
giuliomoro@250
|
78 };
|
giuliomoro@250
|
79
|
giuliomoro@250
|
80 float gDryWet = 1; // mix between the unprocessed and processed sound
|
giuliomoro@250
|
81 float gPlaybackLive = 0.5f; // mix between the file playback and the live audio input
|
giuliomoro@250
|
82 float gGain = 1; // overall gain
|
andrewm@373
|
83 float *gInputAudio = NULL;
|
giuliomoro@250
|
84 Midi midi;
|
andrewm@373
|
85
|
andrewm@373
|
86
|
giuliomoro@250
|
87 void midiCallback(MidiChannelMessage message, void* arg){
|
giuliomoro@250
|
88 if(message.getType() == kmmNoteOn){
|
giuliomoro@250
|
89 if(message.getDataByte(1) > 0){
|
giuliomoro@250
|
90 int note = message.getDataByte(0);
|
giuliomoro@250
|
91 float frequency = powf(2, (note-69)/12.f)*440;
|
giuliomoro@250
|
92 gPeriod = (int)(44100 / frequency + 0.5);
|
giuliomoro@250
|
93 printf("\nnote: %d, frequency: %f, hop: %d\n", note, frequency, gPeriod);
|
giuliomoro@250
|
94 }
|
giuliomoro@250
|
95 }
|
giuliomoro@250
|
96
|
giuliomoro@250
|
97 bool shouldPrint = false;
|
giuliomoro@250
|
98 if(message.getType() == kmmControlChange){
|
giuliomoro@250
|
99 float data = message.getDataByte(1) / 127.0f;
|
giuliomoro@250
|
100 switch (message.getDataByte(0)){
|
giuliomoro@250
|
101 case 2 :
|
giuliomoro@250
|
102 gEffect = (int)(data * 2 + 0.5); // CC2 selects an effect between 0,1,2
|
giuliomoro@250
|
103 break;
|
giuliomoro@250
|
104 case 3 :
|
giuliomoro@250
|
105 gPlaybackLive = data;
|
giuliomoro@250
|
106 break;
|
giuliomoro@250
|
107 case 4 :
|
giuliomoro@250
|
108 gDryWet = data;
|
giuliomoro@250
|
109 break;
|
giuliomoro@250
|
110 case 5:
|
giuliomoro@250
|
111 gGain = data*10;
|
giuliomoro@250
|
112 break;
|
giuliomoro@250
|
113 default:
|
giuliomoro@250
|
114 shouldPrint = true;
|
giuliomoro@250
|
115 }
|
giuliomoro@250
|
116 }
|
giuliomoro@250
|
117 if(shouldPrint){
|
giuliomoro@250
|
118 message.prettyPrint();
|
giuliomoro@250
|
119 }
|
giuliomoro@250
|
120 }
|
giuliomoro@250
|
121
|
giuliomoro@250
|
122 // userData holds an opaque pointer to a data structure that was passed
|
giuliomoro@250
|
123 // in from the call to initAudio().
|
giuliomoro@250
|
124 //
|
giuliomoro@250
|
125 // Return true on success; returning false halts the program.
|
giuliomoro@301
|
126 bool setup(BelaContext* context, void* userData)
|
giuliomoro@250
|
127 {
|
giuliomoro@250
|
128 midi.readFrom(0);
|
giuliomoro@250
|
129 midi.setParserCallback(midiCallback);
|
giuliomoro@250
|
130 // Retrieve a parameter passed in from the initAudio() call
|
giuliomoro@250
|
131 gSampleData = *(SampleData *)userData;
|
giuliomoro@250
|
132
|
giuliomoro@250
|
133 gFFTScaleFactor = 1.0f / (float)gFFTSize;
|
giuliomoro@250
|
134 gOutputBufferWritePointer += gHopSize;
|
giuliomoro@250
|
135
|
giuliomoro@250
|
136 timeDomainIn = (ne10_fft_cpx_float32_t*) NE10_MALLOC (gFFTSize * sizeof (ne10_fft_cpx_float32_t));
|
giuliomoro@250
|
137 timeDomainOut = (ne10_fft_cpx_float32_t*) NE10_MALLOC (gFFTSize * sizeof (ne10_fft_cpx_float32_t));
|
giuliomoro@250
|
138 frequencyDomain = (ne10_fft_cpx_float32_t*) NE10_MALLOC (gFFTSize * sizeof (ne10_fft_cpx_float32_t));
|
andrewm@373
|
139 cfg = ne10_fft_alloc_c2c_float32_neon (gFFTSize);
|
giuliomoro@250
|
140
|
giuliomoro@250
|
141 memset(timeDomainOut, 0, gFFTSize * sizeof (ne10_fft_cpx_float32_t));
|
giuliomoro@250
|
142 memset(gOutputBuffer, 0, BUFFER_SIZE * sizeof(float));
|
giuliomoro@250
|
143
|
andrewm@373
|
144 // Allocate buffer to mirror and modify the input
|
andrewm@373
|
145 gInputAudio = (float *)malloc(context->audioFrames * context->audioChannels * sizeof(float));
|
andrewm@373
|
146 if(gInputAudio == 0)
|
andrewm@373
|
147 return false;
|
andrewm@373
|
148
|
giuliomoro@250
|
149 // Allocate the window buffer based on the FFT size
|
giuliomoro@250
|
150 gWindowBuffer = (float *)malloc(gFFTSize * sizeof(float));
|
giuliomoro@250
|
151 if(gWindowBuffer == 0)
|
giuliomoro@250
|
152 return false;
|
giuliomoro@250
|
153
|
giuliomoro@250
|
154 // Calculate a Hann window
|
giuliomoro@250
|
155 for(int n = 0; n < gFFTSize; n++) {
|
giuliomoro@250
|
156 gWindowBuffer[n] = 0.5f * (1.0f - cosf(2.0 * M_PI * n / (float)(gFFTSize - 1)));
|
giuliomoro@250
|
157 }
|
giuliomoro@250
|
158
|
giuliomoro@250
|
159 // Initialise auxiliary tasks
|
giuliomoro@301
|
160 if((gFFTTask = Bela_createAuxiliaryTask(&process_fft_background, 90, "fft-calculation")) == 0)
|
giuliomoro@250
|
161 return false;
|
giuliomoro@251
|
162 rt_printf("You are listening to an FFT phase-vocoder with overlap-and-add.\n"
|
giuliomoro@250
|
163 "Use Midi Control Change to control:\n"
|
giuliomoro@251
|
164 "CC 2: effect type (bypass/robotization/whisperization)\n"
|
giuliomoro@251
|
165 "CC 3: mix between recorded sample and live audio input\n"
|
giuliomoro@251
|
166 "CC 4: mix between the unprocessed and processed sound\n"
|
giuliomoro@251
|
167 "CC 5: gain\n"
|
giuliomoro@250
|
168 );
|
giuliomoro@250
|
169 return true;
|
giuliomoro@250
|
170 }
|
giuliomoro@250
|
171
|
giuliomoro@250
|
172 // This function handles the FFT processing in this example once the buffer has
|
giuliomoro@250
|
173 // been assembled.
|
giuliomoro@250
|
174 void process_fft(float *inBuffer, int inWritePointer, float *outBuffer, int outWritePointer)
|
giuliomoro@250
|
175 {
|
giuliomoro@250
|
176 // Copy buffer into FFT input
|
giuliomoro@250
|
177 int pointer = (inWritePointer - gFFTSize + BUFFER_SIZE) % BUFFER_SIZE;
|
giuliomoro@250
|
178 for(int n = 0; n < gFFTSize; n++) {
|
giuliomoro@250
|
179 timeDomainIn[n].r = (ne10_float32_t) inBuffer[pointer] * gWindowBuffer[n];
|
giuliomoro@250
|
180 timeDomainIn[n].i = 0;
|
giuliomoro@250
|
181
|
giuliomoro@250
|
182 pointer++;
|
giuliomoro@250
|
183 if(pointer >= BUFFER_SIZE)
|
giuliomoro@250
|
184 pointer = 0;
|
giuliomoro@250
|
185 }
|
giuliomoro@250
|
186
|
giuliomoro@250
|
187 // Run the FFT
|
andrewm@373
|
188 ne10_fft_c2c_1d_float32_neon (frequencyDomain, timeDomainIn, cfg, 0);
|
giuliomoro@250
|
189
|
giuliomoro@250
|
190 switch (gEffect){
|
giuliomoro@250
|
191 case kRobot :
|
giuliomoro@250
|
192 // Robotise the output
|
giuliomoro@250
|
193 for(int n = 0; n < gFFTSize; n++) {
|
giuliomoro@250
|
194 float amplitude = sqrtf(frequencyDomain[n].r * frequencyDomain[n].r + frequencyDomain[n].i * frequencyDomain[n].i);
|
giuliomoro@250
|
195 frequencyDomain[n].r = amplitude;
|
giuliomoro@250
|
196 frequencyDomain[n].i = 0;
|
giuliomoro@250
|
197 }
|
giuliomoro@250
|
198 break;
|
giuliomoro@250
|
199 case kWhisper :
|
giuliomoro@250
|
200 for(int n = 0; n < gFFTSize; n++) {
|
giuliomoro@250
|
201 float amplitude = sqrtf(frequencyDomain[n].r * frequencyDomain[n].r + frequencyDomain[n].i * frequencyDomain[n].i);
|
giuliomoro@250
|
202 float phase = rand()/(float)RAND_MAX * 2 * M_PI;
|
giuliomoro@250
|
203 frequencyDomain[n].r = cosf(phase) * amplitude;
|
giuliomoro@250
|
204 frequencyDomain[n].i = sinf(phase) * amplitude;
|
giuliomoro@250
|
205 }
|
giuliomoro@250
|
206 break;
|
giuliomoro@250
|
207 case kBypass:
|
giuliomoro@250
|
208 //bypass
|
giuliomoro@250
|
209 break;
|
giuliomoro@250
|
210 }
|
giuliomoro@250
|
211
|
giuliomoro@250
|
212 // Run the inverse FFT
|
andrewm@373
|
213 ne10_fft_c2c_1d_float32_neon (timeDomainOut, frequencyDomain, cfg, 1);
|
giuliomoro@250
|
214 // Overlap-and-add timeDomainOut into the output buffer
|
giuliomoro@250
|
215 pointer = outWritePointer;
|
giuliomoro@250
|
216 for(int n = 0; n < gFFTSize; n++) {
|
giuliomoro@250
|
217 outBuffer[pointer] += (timeDomainOut[n].r) * gFFTScaleFactor;
|
giuliomoro@250
|
218 if(isnan(outBuffer[pointer]))
|
giuliomoro@250
|
219 rt_printf("outBuffer OLA\n");
|
giuliomoro@250
|
220 pointer++;
|
giuliomoro@250
|
221 if(pointer >= BUFFER_SIZE)
|
giuliomoro@250
|
222 pointer = 0;
|
giuliomoro@250
|
223 }
|
giuliomoro@250
|
224 }
|
giuliomoro@250
|
225
|
giuliomoro@250
|
226 // Function to process the FFT in a thread at lower priority
|
giuliomoro@250
|
227 void process_fft_background() {
|
giuliomoro@250
|
228 process_fft(gInputBuffer, gFFTInputBufferPointer, gOutputBuffer, gFFTOutputBufferPointer);
|
giuliomoro@250
|
229 }
|
giuliomoro@250
|
230
|
giuliomoro@250
|
231 // render() is called regularly at the highest priority by the audio engine.
|
giuliomoro@250
|
232 // Input and output are given from the audio hardware and the other
|
giuliomoro@250
|
233 // ADCs and DACs (if available). If only audio is available, numMatrixFrames
|
giuliomoro@250
|
234 // will be 0.
|
giuliomoro@301
|
235 void render(BelaContext* context, void* userData)
|
giuliomoro@250
|
236 {
|
giuliomoro@250
|
237 float* audioOut = context->audioOut;
|
giuliomoro@250
|
238 int numAudioFrames = context->audioFrames;
|
giuliomoro@250
|
239 int numAudioChannels = context->audioChannels;
|
giuliomoro@250
|
240 // ------ this code internal to the demo; leave as is ----------------
|
giuliomoro@250
|
241
|
giuliomoro@250
|
242 // Prep the "input" to be the sound file played in a loop
|
giuliomoro@250
|
243 for(int n = 0; n < numAudioFrames; n++) {
|
giuliomoro@250
|
244 if(gReadPtr < gSampleData.sampleLen)
|
andrewm@373
|
245 gInputAudio[2*n] = gInputAudio[2*n+1] = gSampleData.samples[gReadPtr]*(1-gPlaybackLive) +
|
andrewm@308
|
246 gPlaybackLive*0.5f*(audioRead(context,n,0)+audioRead(context,n,1));
|
giuliomoro@250
|
247 else
|
andrewm@373
|
248 gInputAudio[2*n] = gInputAudio[2*n+1] = 0;
|
giuliomoro@250
|
249 if(++gReadPtr >= gSampleData.sampleLen)
|
giuliomoro@250
|
250 gReadPtr = 0;
|
giuliomoro@250
|
251 }
|
giuliomoro@250
|
252 // -------------------------------------------------------------------
|
giuliomoro@250
|
253
|
giuliomoro@250
|
254 for(int n = 0; n < numAudioFrames; n++) {
|
andrewm@373
|
255 gInputBuffer[gInputBufferPointer] = ((gInputAudio[n*numAudioChannels] + gInputAudio[n*numAudioChannels+1]) * 0.5);
|
giuliomoro@250
|
256
|
giuliomoro@250
|
257 // Copy output buffer to output
|
giuliomoro@250
|
258 for(int channel = 0; channel < numAudioChannels; channel++){
|
andrewm@373
|
259 audioOut[n * numAudioChannels + channel] = gOutputBuffer[gOutputBufferReadPointer] * gGain * gDryWet + (1 - gDryWet) * gInputAudio[n * numAudioChannels + channel];
|
giuliomoro@250
|
260 }
|
giuliomoro@250
|
261
|
giuliomoro@250
|
262 // Clear the output sample in the buffer so it is ready for the next overlap-add
|
giuliomoro@250
|
263 gOutputBuffer[gOutputBufferReadPointer] = 0;
|
giuliomoro@250
|
264 gOutputBufferReadPointer++;
|
giuliomoro@250
|
265 if(gOutputBufferReadPointer >= BUFFER_SIZE)
|
giuliomoro@250
|
266 gOutputBufferReadPointer = 0;
|
giuliomoro@250
|
267 gOutputBufferWritePointer++;
|
giuliomoro@250
|
268 if(gOutputBufferWritePointer >= BUFFER_SIZE)
|
giuliomoro@250
|
269 gOutputBufferWritePointer = 0;
|
giuliomoro@250
|
270
|
giuliomoro@250
|
271 gInputBufferPointer++;
|
giuliomoro@250
|
272 if(gInputBufferPointer >= BUFFER_SIZE)
|
giuliomoro@250
|
273 gInputBufferPointer = 0;
|
giuliomoro@250
|
274
|
giuliomoro@250
|
275 gSampleCount++;
|
giuliomoro@250
|
276 if(gSampleCount >= gHopSize) {
|
giuliomoro@250
|
277 //process_fft(gInputBuffer, gInputBufferPointer, gOutputBuffer, gOutputBufferPointer);
|
giuliomoro@250
|
278 gFFTInputBufferPointer = gInputBufferPointer;
|
giuliomoro@250
|
279 gFFTOutputBufferPointer = gOutputBufferWritePointer;
|
giuliomoro@301
|
280 Bela_scheduleAuxiliaryTask(gFFTTask);
|
giuliomoro@250
|
281
|
giuliomoro@250
|
282 gSampleCount = 0;
|
giuliomoro@250
|
283 }
|
giuliomoro@250
|
284 }
|
giuliomoro@250
|
285 gHopSize = gPeriod;
|
giuliomoro@250
|
286 }
|
giuliomoro@250
|
287
|
giuliomoro@250
|
288 // cleanup_render() is called once at the end, after the audio has stopped.
|
giuliomoro@250
|
289 // Release any resources that were allocated in initialise_render().
|
giuliomoro@250
|
290
|
giuliomoro@301
|
291 void cleanup(BelaContext* context, void* userData)
|
giuliomoro@250
|
292 {
|
giuliomoro@250
|
293 NE10_FREE(timeDomainIn);
|
giuliomoro@250
|
294 NE10_FREE(timeDomainOut);
|
giuliomoro@250
|
295 NE10_FREE(frequencyDomain);
|
giuliomoro@250
|
296 NE10_FREE(cfg);
|
andrewm@373
|
297 free(gInputAudio);
|
giuliomoro@250
|
298 free(gWindowBuffer);
|
giuliomoro@250
|
299 }
|