annotate projects/basic_network/render.cpp @ 54:d3f869b98147 ultra-staging

Udp transmission working (enough) for demo in Nottingham
author Giulio Moro <giuliomoro@yahoo.it>
date Mon, 08 Jun 2015 01:07:48 +0100
parents 4255ecbb9bec
children 3068421c0737
rev   line source
giuliomoro@24 1 /*
giuliomoro@24 2 * render.cpp
giuliomoro@24 3 *
giuliomoro@24 4 * Created on: Oct 24, 2014
giuliomoro@24 5 * Author: parallels
giuliomoro@24 6 */
giuliomoro@24 7
giuliomoro@24 8 #include "../../include/RTAudioSettings.h"
giuliomoro@24 9 #include "../../include/render.h"
giuliomoro@24 10 #include <cmath>
giuliomoro@54 11 //#include "../../include/client.h"
giuliomoro@24 12 #include "../../include/RTAudio.h" // to schedule lower prio parallel process
giuliomoro@24 13 #include <rtdk.h>
giuliomoro@41 14 #include <native/timer.h>
giuliomoro@41 15 #include "../../include/PRU.h"
giuliomoro@54 16 #include "../../include/UdpClient.h"
giuliomoro@54 17
giuliomoro@54 18 #define NETWORK_AUDIO_BUFFER_SIZE 400 //1400/4 //maximum payload for a UDP datagram over ethernet is 1472 bytes, I leave some headroom and divide by 4 to get the number of floats
giuliomoro@54 19 struct networkAudio{
giuliomoro@54 20 int timestamp;
giuliomoro@54 21 int currentBuffer;
giuliomoro@54 22 int index;
giuliomoro@54 23 float buffers[2][NETWORK_AUDIO_BUFFER_SIZE];
giuliomoro@54 24 int doneOnTime;
giuliomoro@54 25 bool toBeSent;
giuliomoro@54 26 UdpClient udpClient;
giuliomoro@54 27 };
giuliomoro@54 28
giuliomoro@24 29 float gFrequency;
giuliomoro@24 30 float gPhase;
giuliomoro@24 31 float gInverseSampleRate;
giuliomoro@24 32 int gCount=0;
giuliomoro@54 33 //networkData networkObject;
giuliomoro@54 34 #define numNetAudio 3
giuliomoro@54 35 networkAudio netAudio[numNetAudio];
giuliomoro@41 36 extern PRU *gPRU;
giuliomoro@41 37 AuxiliaryTask printIntervalTask;
giuliomoro@41 38 AuxiliaryTask transmitReceiveAudioTask;
giuliomoro@41 39 void transmitReceiveData(){ //transmit and receive asynchronous messages
giuliomoro@41 40 // printf("transmitReceiveData auxiliary task has started\n");
giuliomoro@41 41 // while(!gShouldStop){
giuliomoro@41 42 // sendMessage(&networkObject);
giuliomoro@41 43 // receiveMessage(networkObject);
giuliomoro@41 44 // usleep(1000);
giuliomoro@41 45 // }
giuliomoro@41 46 // closeSockets();
giuliomoro@41 47 }
giuliomoro@24 48
giuliomoro@41 49 void transmitReceiveAudio(){ //transmit and receive audio buffers
giuliomoro@54 50 for(int n=0;n<numNetAudio; n++){
giuliomoro@54 51 if(netAudio[n].toBeSent){
giuliomoro@54 52 netAudio[n].toBeSent=false;
giuliomoro@54 53 netAudio[n].udpClient.send(netAudio[n].buffers[!netAudio[n].currentBuffer],NETWORK_AUDIO_BUFFER_SIZE*sizeof(float));
giuliomoro@54 54 netAudio[n].doneOnTime=1;
giuliomoro@54 55 }
giuliomoro@54 56 }
giuliomoro@24 57 }
giuliomoro@24 58
giuliomoro@41 59
giuliomoro@24 60 // initialise_render() is called once before the audio rendering starts.
giuliomoro@24 61 // Use it to perform any initialisation and allocation which is dependent
giuliomoro@24 62 // on the period size or sample rate.
giuliomoro@24 63 //
giuliomoro@24 64 // userData holds an opaque pointer to a data structure that was passed
giuliomoro@24 65 // in from the call to initAudio().
giuliomoro@24 66 //
giuliomoro@24 67 // Return true on success; returning false halts the program.
giuliomoro@24 68 bool initialise_render(int numMatrixChannels, int numDigitalChannels, int numAudioChannels,
giuliomoro@24 69 int numMatrixFramesPerPeriod,
giuliomoro@24 70 int numAudioFramesPerPeriod,
giuliomoro@24 71 float matrixSampleRate, float audioSampleRate,
giuliomoro@24 72 void *userData, RTAudioSettings *settings)
giuliomoro@24 73 {
giuliomoro@24 74 // Retrieve a parameter passed in from the initAudio() call
giuliomoro@24 75 gFrequency = *(float *)userData;
giuliomoro@24 76
giuliomoro@24 77 gInverseSampleRate = 1.0 / audioSampleRate;
giuliomoro@24 78 gPhase = 0.0;
giuliomoro@24 79
giuliomoro@41 80 // networkObject.counter=&gCount;
giuliomoro@41 81 // networkObject.variables[0]=&gFrequency;
giuliomoro@41 82 // networkObject.variables[1]=&gPhase;
giuliomoro@41 83 // networkObject.numVariables=2;
giuliomoro@54 84 for(int n=0; n<numNetAudio; n++){
giuliomoro@54 85 netAudio[n].doneOnTime=1;
giuliomoro@54 86 netAudio[n].index=0;
giuliomoro@54 87 netAudio[n].currentBuffer=0;
giuliomoro@54 88 netAudio[n].toBeSent=false;
giuliomoro@54 89 netAudio[n].udpClient.setPort(settings->transmitPort+n);
giuliomoro@54 90 netAudio[n].udpClient.setServer(settings->serverName);
giuliomoro@54 91 }
giuliomoro@41 92 // setupSockets(settings->receivePort, settings->transmitPort, settings->serverName);
giuliomoro@54 93
giuliomoro@54 94 // transmitReceiveDataTask=createAuxiliaryTaskLoop(*transmitReceiveData, 10, "transmit-receive-data");
giuliomoro@54 95 // scheduleAuxiliaryTask(transmitReceiveDataTask); //here it does not work
giuliomoro@54 96 transmitReceiveAudioTask=createAuxiliaryTaskLoop(*transmitReceiveAudio, 98, "transmit-receive-audio");
giuliomoro@24 97 return true;
giuliomoro@24 98 }
giuliomoro@24 99
giuliomoro@24 100 // render() is called regularly at the highest priority by the audio engine.
giuliomoro@24 101 // Input and output are given from the audio hardware and the other
giuliomoro@24 102 // ADCs and DACs (if available). If only audio is available, numMatrixFrames
giuliomoro@24 103 // will be 0.
giuliomoro@24 104
giuliomoro@24 105 void render(int numAnalogFrames, int numAudioFrames, int numDigitalFrames, float *audioIn, float *audioOut,
giuliomoro@24 106 float *analogIn, float *analogOut, uint32_t *digital)
giuliomoro@24 107 {
giuliomoro@24 108 for(int n = 0; n < numAudioFrames; n++) {
giuliomoro@24 109 float out = 0.7f * sinf(gPhase);
giuliomoro@24 110 gPhase += 2.0 * M_PI * gFrequency * gInverseSampleRate;
giuliomoro@24 111 if(gPhase > 2.0 * M_PI)
giuliomoro@24 112 gPhase -= 2.0 * M_PI;
giuliomoro@24 113
giuliomoro@41 114 // for(int channel = 0; channel < gNumAudioChannels; channel++)
giuliomoro@41 115 // audioOut[n * gNumAudioChannels + channel] = audioIn[n * gNumAudioChannels + 0]+audioIn[n * gNumAudioChannels + 1];
giuliomoro@54 116 audioOut[n * gNumAudioChannels] = audioIn[n*gNumAudioChannels+0];
giuliomoro@41 117 audioOut[n * gNumAudioChannels+1]=out;
giuliomoro@41 118 if(0==gCount){
giuliomoro@41 119 // scheduleAuxiliaryTask(transmitReceiveDataTask);
giuliomoro@24 120 }
giuliomoro@54 121 for(int j=0; j<numNetAudio; j++){
giuliomoro@54 122 if(netAudio[j].index==(NETWORK_AUDIO_BUFFER_SIZE)){ // when the buffer is ready ...
giuliomoro@54 123 netAudio[j].toBeSent=true;
giuliomoro@54 124 netAudio[j].index=0; //reset the counter
giuliomoro@54 125 if(netAudio[j].doneOnTime==0)
giuliomoro@54 126 rt_printf("Network buffer underrun :-{\n");
giuliomoro@54 127 netAudio[j].timestamp=gCount;
giuliomoro@54 128 netAudio[j].currentBuffer=!netAudio[j].currentBuffer; //switch buffer
giuliomoro@54 129 netAudio[j].doneOnTime=0;
giuliomoro@54 130 scheduleAuxiliaryTask(transmitReceiveAudioTask); //send the buffer
giuliomoro@54 131 }
giuliomoro@54 132 }
giuliomoro@54 133 if((gCount&1)==0){
giuliomoro@54 134 netAudio[1].buffers[netAudio[1].currentBuffer][netAudio[1].index++]=analogRead(0,n/2)+audioOut[n*gNumAudioChannels + 0];
giuliomoro@54 135 netAudio[2].buffers[netAudio[2].currentBuffer][netAudio[2].index++]=analogRead(1,n/2)+audioOut[n*gNumAudioChannels + 0];
giuliomoro@54 136 }
giuliomoro@54 137 netAudio[0].buffers[netAudio[0].currentBuffer][netAudio[0].index++]=0.5*(out+audioOut[n*gNumAudioChannels + 0]);//copy channel 0 to the buffer
giuliomoro@54 138 // netAudio[1].buffers[netAudio[1].currentBuffer][netAudio[1].index++]=0.5*(out+audioOut[n*gNumAudioChannels + 0]);
giuliomoro@54 139 // netAudio[2].buffers[netAudio[2].currentBuffer][netAudio[2].index++]=0.5*(out+audioOut[n*gNumAudioChannels + 0]);
giuliomoro@24 140 gCount++;
giuliomoro@24 141 }
giuliomoro@24 142 }
giuliomoro@24 143
giuliomoro@24 144 // cleanup_render() is called once at the end, after the audio has stopped.
giuliomoro@24 145 // Release any resources that were allocated in initialise_render().
giuliomoro@24 146
giuliomoro@24 147 void cleanup_render()
giuliomoro@24 148 {
giuliomoro@41 149 // closeSockets();
giuliomoro@24 150 }