giuliomoro@24: /* giuliomoro@24: * render.cpp giuliomoro@24: * giuliomoro@24: * Created on: Oct 24, 2014 giuliomoro@24: * Author: parallels giuliomoro@24: */ giuliomoro@24: giuliomoro@24: #include "../../include/RTAudioSettings.h" giuliomoro@24: #include "../../include/render.h" giuliomoro@24: #include giuliomoro@24: #include "../../include/client.h" giuliomoro@24: #include "../../include/RTAudio.h" // to schedule lower prio parallel process giuliomoro@24: #include giuliomoro@41: #include giuliomoro@41: #include "../../include/PRU.h" giuliomoro@24: float gFrequency; giuliomoro@24: float gPhase; giuliomoro@24: float gInverseSampleRate; giuliomoro@24: int gCount=0; giuliomoro@24: networkData networkObject; giuliomoro@41: networkAudio netAudio; giuliomoro@41: extern PRU *gPRU; giuliomoro@41: AuxiliaryTask printIntervalTask; giuliomoro@41: AuxiliaryTask transmitReceiveAudioTask; giuliomoro@41: void transmitReceiveData(){ //transmit and receive asynchronous messages giuliomoro@41: // printf("transmitReceiveData auxiliary task has started\n"); giuliomoro@41: // while(!gShouldStop){ giuliomoro@41: // sendMessage(&networkObject); giuliomoro@41: // receiveMessage(networkObject); giuliomoro@41: // usleep(1000); giuliomoro@41: // } giuliomoro@41: // closeSockets(); giuliomoro@41: } giuliomoro@24: giuliomoro@41: void transmitReceiveAudio(){ //transmit and receive audio buffers giuliomoro@41: sendAudio(&netAudio); giuliomoro@24: } giuliomoro@24: giuliomoro@41: giuliomoro@24: // initialise_render() is called once before the audio rendering starts. giuliomoro@24: // Use it to perform any initialisation and allocation which is dependent giuliomoro@24: // on the period size or sample rate. giuliomoro@24: // giuliomoro@24: // userData holds an opaque pointer to a data structure that was passed giuliomoro@24: // in from the call to initAudio(). giuliomoro@24: // giuliomoro@24: // Return true on success; returning false halts the program. giuliomoro@24: bool initialise_render(int numMatrixChannels, int numDigitalChannels, int numAudioChannels, giuliomoro@24: int numMatrixFramesPerPeriod, giuliomoro@24: int numAudioFramesPerPeriod, giuliomoro@24: float matrixSampleRate, float audioSampleRate, giuliomoro@24: void *userData, RTAudioSettings *settings) giuliomoro@24: { giuliomoro@24: // Retrieve a parameter passed in from the initAudio() call giuliomoro@24: gFrequency = *(float *)userData; giuliomoro@24: giuliomoro@24: gInverseSampleRate = 1.0 / audioSampleRate; giuliomoro@24: gPhase = 0.0; giuliomoro@24: giuliomoro@41: // networkObject.counter=&gCount; giuliomoro@41: // networkObject.variables[0]=&gFrequency; giuliomoro@41: // networkObject.variables[1]=&gPhase; giuliomoro@41: // networkObject.numVariables=2; giuliomoro@41: // netAudio.doneOnTime=1; giuliomoro@41: // netAudio.index=0; giuliomoro@41: // netAudio.currentBuffer=0; giuliomoro@41: // setupSockets(settings->receivePort, settings->transmitPort, settings->serverName); giuliomoro@41: //// transmitReceiveDataTask=createAuxiliaryTaskLoop(*transmitReceiveData, 10, "transmit-receive-data"); giuliomoro@41: //// scheduleAuxiliaryTask(transmitReceiveDataTask); //here it does not work giuliomoro@41: // transmitReceiveAudioTask=createAuxiliaryTaskLoop(*transmitReceiveAudio, 98, "transmit-receive-audio"); giuliomoro@24: return true; giuliomoro@24: } giuliomoro@24: giuliomoro@24: // render() is called regularly at the highest priority by the audio engine. giuliomoro@24: // Input and output are given from the audio hardware and the other giuliomoro@24: // ADCs and DACs (if available). If only audio is available, numMatrixFrames giuliomoro@24: // will be 0. giuliomoro@24: giuliomoro@24: void render(int numAnalogFrames, int numAudioFrames, int numDigitalFrames, float *audioIn, float *audioOut, giuliomoro@24: float *analogIn, float *analogOut, uint32_t *digital) giuliomoro@24: { giuliomoro@24: for(int n = 0; n < numAudioFrames; n++) { giuliomoro@24: float out = 0.7f * sinf(gPhase); giuliomoro@41: float fake=0.1; giuliomoro@41: for(int a=0; a<24; a++){ giuliomoro@41: fake = 0.7f * sinf(fake+out); giuliomoro@41: } giuliomoro@41: fake/=1000000000000000; giuliomoro@24: gPhase += 2.0 * M_PI * gFrequency * gInverseSampleRate; giuliomoro@24: if(gPhase > 2.0 * M_PI) giuliomoro@24: gPhase -= 2.0 * M_PI; giuliomoro@24: giuliomoro@41: // for(int channel = 0; channel < gNumAudioChannels; channel++) giuliomoro@41: // audioOut[n * gNumAudioChannels + channel] = audioIn[n * gNumAudioChannels + 0]+audioIn[n * gNumAudioChannels + 1]; giuliomoro@41: audioOut[n * gNumAudioChannels] = fake*0.0000000001; giuliomoro@41: audioOut[n * gNumAudioChannels+1]=out; giuliomoro@41: if(0==gCount){ giuliomoro@41: // scheduleAuxiliaryTask(transmitReceiveDataTask); giuliomoro@24: } giuliomoro@41: // if(netAudio.index==(NETWORK_AUDIO_BUFFER_SIZE)){ // when the buffer is ready ... giuliomoro@41: // netAudio.index=0; //reset the counter giuliomoro@41: // if(netAudio.doneOnTime==0) giuliomoro@41: // rt_printf("Network buffer underrun :-{\n"); giuliomoro@41: // netAudio.timestamp=gCount; giuliomoro@41: // netAudio.currentBuffer=!netAudio.currentBuffer; //switch buffer giuliomoro@41: // netAudio.doneOnTime=0; giuliomoro@41: // scheduleAuxiliaryTask(transmitReceiveAudioTask); //send the buffer giuliomoro@41: // } giuliomoro@41: // netAudio.buffers[netAudio.currentBuffer][netAudio.index++]=audioOut[n*gNumAudioChannels + 0];//copy channel 0 to the buffer giuliomoro@24: gCount++; giuliomoro@24: } giuliomoro@24: } giuliomoro@24: giuliomoro@24: // cleanup_render() is called once at the end, after the audio has stopped. giuliomoro@24: // Release any resources that were allocated in initialise_render(). giuliomoro@24: giuliomoro@24: void cleanup_render() giuliomoro@24: { giuliomoro@41: // closeSockets(); giuliomoro@24: }