Mercurial > hg > beaglert
view projects/basic_network/render.cpp @ 54:d3f869b98147 ultra-staging
Udp transmission working (enough) for demo in Nottingham
author | Giulio Moro <giuliomoro@yahoo.it> |
---|---|
date | Mon, 08 Jun 2015 01:07:48 +0100 |
parents | 4255ecbb9bec |
children | 3068421c0737 |
line wrap: on
line source
/* * render.cpp * * Created on: Oct 24, 2014 * Author: parallels */ #include "../../include/RTAudioSettings.h" #include "../../include/render.h" #include <cmath> //#include "../../include/client.h" #include "../../include/RTAudio.h" // to schedule lower prio parallel process #include <rtdk.h> #include <native/timer.h> #include "../../include/PRU.h" #include "../../include/UdpClient.h" #define NETWORK_AUDIO_BUFFER_SIZE 400 //1400/4 //maximum payload for a UDP datagram over ethernet is 1472 bytes, I leave some headroom and divide by 4 to get the number of floats struct networkAudio{ int timestamp; int currentBuffer; int index; float buffers[2][NETWORK_AUDIO_BUFFER_SIZE]; int doneOnTime; bool toBeSent; UdpClient udpClient; }; float gFrequency; float gPhase; float gInverseSampleRate; int gCount=0; //networkData networkObject; #define numNetAudio 3 networkAudio netAudio[numNetAudio]; extern PRU *gPRU; AuxiliaryTask printIntervalTask; AuxiliaryTask transmitReceiveAudioTask; void transmitReceiveData(){ //transmit and receive asynchronous messages // printf("transmitReceiveData auxiliary task has started\n"); // while(!gShouldStop){ // sendMessage(&networkObject); // receiveMessage(networkObject); // usleep(1000); // } // closeSockets(); } void transmitReceiveAudio(){ //transmit and receive audio buffers for(int n=0;n<numNetAudio; n++){ if(netAudio[n].toBeSent){ netAudio[n].toBeSent=false; netAudio[n].udpClient.send(netAudio[n].buffers[!netAudio[n].currentBuffer],NETWORK_AUDIO_BUFFER_SIZE*sizeof(float)); netAudio[n].doneOnTime=1; } } } // initialise_render() is called once before the audio rendering starts. // Use it to perform any initialisation and allocation which is dependent // on the period size or sample rate. // // userData holds an opaque pointer to a data structure that was passed // in from the call to initAudio(). // // Return true on success; returning false halts the program. bool initialise_render(int numMatrixChannels, int numDigitalChannels, int numAudioChannels, int numMatrixFramesPerPeriod, int numAudioFramesPerPeriod, float matrixSampleRate, float audioSampleRate, void *userData, RTAudioSettings *settings) { // Retrieve a parameter passed in from the initAudio() call gFrequency = *(float *)userData; gInverseSampleRate = 1.0 / audioSampleRate; gPhase = 0.0; // networkObject.counter=&gCount; // networkObject.variables[0]=&gFrequency; // networkObject.variables[1]=&gPhase; // networkObject.numVariables=2; for(int n=0; n<numNetAudio; n++){ netAudio[n].doneOnTime=1; netAudio[n].index=0; netAudio[n].currentBuffer=0; netAudio[n].toBeSent=false; netAudio[n].udpClient.setPort(settings->transmitPort+n); netAudio[n].udpClient.setServer(settings->serverName); } // setupSockets(settings->receivePort, settings->transmitPort, settings->serverName); // transmitReceiveDataTask=createAuxiliaryTaskLoop(*transmitReceiveData, 10, "transmit-receive-data"); // scheduleAuxiliaryTask(transmitReceiveDataTask); //here it does not work transmitReceiveAudioTask=createAuxiliaryTaskLoop(*transmitReceiveAudio, 98, "transmit-receive-audio"); return true; } // render() is called regularly at the highest priority by the audio engine. // Input and output are given from the audio hardware and the other // ADCs and DACs (if available). If only audio is available, numMatrixFrames // will be 0. void render(int numAnalogFrames, int numAudioFrames, int numDigitalFrames, float *audioIn, float *audioOut, float *analogIn, float *analogOut, uint32_t *digital) { for(int n = 0; n < numAudioFrames; n++) { float out = 0.7f * sinf(gPhase); gPhase += 2.0 * M_PI * gFrequency * gInverseSampleRate; if(gPhase > 2.0 * M_PI) gPhase -= 2.0 * M_PI; // for(int channel = 0; channel < gNumAudioChannels; channel++) // audioOut[n * gNumAudioChannels + channel] = audioIn[n * gNumAudioChannels + 0]+audioIn[n * gNumAudioChannels + 1]; audioOut[n * gNumAudioChannels] = audioIn[n*gNumAudioChannels+0]; audioOut[n * gNumAudioChannels+1]=out; if(0==gCount){ // scheduleAuxiliaryTask(transmitReceiveDataTask); } for(int j=0; j<numNetAudio; j++){ if(netAudio[j].index==(NETWORK_AUDIO_BUFFER_SIZE)){ // when the buffer is ready ... netAudio[j].toBeSent=true; netAudio[j].index=0; //reset the counter if(netAudio[j].doneOnTime==0) rt_printf("Network buffer underrun :-{\n"); netAudio[j].timestamp=gCount; netAudio[j].currentBuffer=!netAudio[j].currentBuffer; //switch buffer netAudio[j].doneOnTime=0; scheduleAuxiliaryTask(transmitReceiveAudioTask); //send the buffer } } if((gCount&1)==0){ netAudio[1].buffers[netAudio[1].currentBuffer][netAudio[1].index++]=analogRead(0,n/2)+audioOut[n*gNumAudioChannels + 0]; netAudio[2].buffers[netAudio[2].currentBuffer][netAudio[2].index++]=analogRead(1,n/2)+audioOut[n*gNumAudioChannels + 0]; } netAudio[0].buffers[netAudio[0].currentBuffer][netAudio[0].index++]=0.5*(out+audioOut[n*gNumAudioChannels + 0]);//copy channel 0 to the buffer // netAudio[1].buffers[netAudio[1].currentBuffer][netAudio[1].index++]=0.5*(out+audioOut[n*gNumAudioChannels + 0]); // netAudio[2].buffers[netAudio[2].currentBuffer][netAudio[2].index++]=0.5*(out+audioOut[n*gNumAudioChannels + 0]); gCount++; } } // cleanup_render() is called once at the end, after the audio has stopped. // Release any resources that were allocated in initialise_render(). void cleanup_render() { // closeSockets(); }