andrew@0
|
1 #include "testApp.h"
|
andrew@0
|
2 #include "stdio.h"
|
andrew@0
|
3 //#include "aubio.h"
|
andrew@0
|
4 #include <iostream>
|
andrew@0
|
5 #include <cstring>
|
andrew@0
|
6 #include <string>
|
andrew@0
|
7 #include <cstdlib>
|
andrew@0
|
8
|
andrew@0
|
9
|
andrew@0
|
10 const double samplingFrequency = 44100.0;
|
andrew@0
|
11
|
andrew@0
|
12 //--------------------------------------------------------------
|
andrew@0
|
13 void testApp::setup(){
|
andrew@0
|
14
|
andrew@0
|
15 // 2 output channels,
|
andrew@0
|
16 // 0 input channels
|
andrew@0
|
17 // 22050 samples per second
|
andrew@0
|
18 // 256 samples per buffer
|
andrew@0
|
19 // 4 num buffers (latency)
|
andrew@0
|
20
|
andrew@0
|
21 //nb THIS CODE WOULD BE USEFUL IF WE EVER WANTED REAL-TIME INPUT - VIA ofSoundSteam
|
andrew@0
|
22
|
andrew@0
|
23 receiver.setup( PORT );
|
andrew@0
|
24
|
andrew@0
|
25 sampleRate = 44100;
|
andrew@0
|
26 ofSoundStreamSetup(2,0,this, sampleRate,256, 4);
|
andrew@0
|
27
|
andrew@20
|
28 ofSetFrameRate(20);
|
andrew@0
|
29
|
andrew@1
|
30
|
andrew@16
|
31 eventMatcher.loadAudioFiles();
|
andrew@1
|
32
|
andrew@7
|
33 eventMatcher.setWindowDimensions();
|
andrew@1
|
34 //audioFilePlayer.loadAudioFile(infilename);
|
andrew@1
|
35 }
|
andrew@1
|
36
|
andrew@0
|
37
|
andrew@0
|
38 //--------------------------------------------------------------
|
andrew@0
|
39 void testApp::update(){
|
andrew@9
|
40 eventMatcher.updatePosition();
|
andrew@39
|
41
|
andrew@0
|
42 checkForOSCmessages();
|
andrew@0
|
43
|
andrew@40
|
44 outputWriter.writeOutput(eventMatcher.synchroniser.recordedPositionTimeSent, eventMatcher.synchroniser.recordedPositionMillis, eventMatcher.synchroniser.playingPositionMillis);
|
andrew@40
|
45
|
andrew@0
|
46 }
|
andrew@0
|
47
|
andrew@0
|
48 void testApp::checkForOSCmessages(){
|
andrew@0
|
49 // check for waiting messages
|
andrew@0
|
50 while( receiver.hasWaitingMessages() )
|
andrew@0
|
51 {
|
andrew@0
|
52 // get the next message
|
andrew@0
|
53 ofxOscMessage m;
|
andrew@0
|
54 receiver.getNextMessage( &m );
|
andrew@0
|
55
|
andrew@0
|
56 // check for mouse moved message
|
andrew@1
|
57 if ( m.getAddress() == "/aubioPitch" ){
|
andrew@6
|
58 int testChannel = m.getArgAsInt32(0);
|
andrew@6
|
59 float pitchIn = m.getArgAsFloat(1);
|
andrew@6
|
60 int timeIn = m.getArgAsInt32(2);
|
andrew@3
|
61 printf("\nAUBIO PITCH RECEIVED %f at time %i\n", pitchIn, timeIn);
|
andrew@6
|
62 eventMatcher.newPitchEvent(testChannel, pitchIn, timeIn);
|
andrew@0
|
63 }
|
andrew@2
|
64
|
andrew@2
|
65 if ( m.getAddress() == "/kick" ){
|
andrew@6
|
66 // float pitchIn = m.getArgAsFloat(0);
|
andrew@6
|
67 int testChannel = m.getArgAsInt32(0);
|
andrew@6
|
68 double timeIn = m.getArgAsInt32(1);
|
andrew@7
|
69 printf("\nKICK RECEIVED at time %f\n", timeIn);
|
andrew@10
|
70
|
andrew@6
|
71 eventMatcher.newKickEvent(testChannel, timeIn);
|
andrew@10
|
72
|
andrew@2
|
73 }
|
andrew@2
|
74
|
andrew@2
|
75 if ( m.getAddress() == "/snare" ){
|
andrew@7
|
76 int testChannel = m.getArgAsInt32(0);
|
andrew@7
|
77 double timeIn = m.getArgAsInt32(1);
|
andrew@7
|
78 printf("\nSNARE RECEIVED at time %f\n", timeIn);
|
andrew@10
|
79
|
andrew@7
|
80 eventMatcher.newSnareEvent(testChannel, timeIn);
|
andrew@2
|
81 }
|
andrew@3
|
82
|
andrew@31
|
83 // check for mouse moved message
|
andrew@31
|
84 if ( m.getAddress() == "/elec" ){
|
andrew@32
|
85 int testChannel = m.getArgAsInt32(0);
|
andrew@32
|
86 double timeIn = m.getArgAsFloat(1);
|
andrew@32
|
87 float chromaIn[12];
|
andrew@37
|
88
|
andrew@32
|
89 printf("CHROMA received at time %f\n", timeIn);
|
andrew@31
|
90 for (int i = 0;i < 12;i++){
|
andrew@32
|
91 chromaIn[i] = m.getArgAsFloat(i+2);
|
andrew@37
|
92 // printf("chroma[%i]: %f\n", i, chromaIn[i]);
|
andrew@31
|
93 }
|
andrew@32
|
94 eventMatcher.newChromaEvent(testChannel, chromaIn, timeIn);
|
andrew@31
|
95 }
|
andrew@31
|
96
|
andrew@31
|
97
|
andrew@3
|
98 if ( m.getAddress() == "/start" ){
|
andrew@3
|
99 printf("start!\n");
|
andrew@10
|
100 printf("STRART TIME IN %i\n", ofGetElapsedTimeMillis());
|
andrew@3
|
101 eventMatcher.startPlaying();
|
andrew@40
|
102 outputWriter.openFile();
|
andrew@10
|
103 printf("TIME OUT %i\n", ofGetElapsedTimeMillis());
|
andrew@3
|
104 }
|
andrew@15
|
105
|
andrew@15
|
106 if ( m.getAddress() == "/stop" ){
|
andrew@15
|
107 printf("stop!\n");
|
andrew@15
|
108 eventMatcher.stopPlaying();
|
andrew@40
|
109 outputWriter.closeFile();
|
andrew@15
|
110 }
|
andrew@15
|
111
|
andrew@16
|
112 if ( m.getAddress() == "/accompanimentRatio" ){
|
andrew@16
|
113 double time = m.getArgAsFloat(0);
|
andrew@16
|
114 double ratio = m.getArgAsFloat(1);
|
andrew@16
|
115 eventMatcher.synchroniser.setPlayingRatio(ratio, time);
|
andrew@16
|
116 }
|
andrew@16
|
117
|
andrew@22
|
118 if ( m.getAddress() == "/rescue" ){
|
andrew@22
|
119 printf("rescue!\n");
|
andrew@22
|
120 eventMatcher.rescue();
|
andrew@22
|
121 }
|
andrew@36
|
122
|
andrew@0
|
123 }
|
andrew@0
|
124 }
|
andrew@0
|
125
|
andrew@0
|
126 //--------------------------------------------------------------
|
andrew@0
|
127 void testApp::draw(){
|
andrew@0
|
128
|
andrew@0
|
129 eventMatcher.draw();
|
andrew@0
|
130
|
andrew@0
|
131 // audioFilePlayer.draw();
|
andrew@0
|
132
|
andrew@0
|
133 }
|
andrew@0
|
134
|
andrew@0
|
135
|
andrew@0
|
136
|
andrew@0
|
137 //--------------------------------------------------------------
|
andrew@0
|
138 void testApp::keyPressed (int key){
|
andrew@0
|
139 if (key == '-'){
|
andrew@0
|
140 volume -= 0.05;
|
andrew@0
|
141 volume = MAX(volume, 0);
|
andrew@0
|
142 } else if (key == '+'){
|
andrew@0
|
143 volume += 0.05;
|
andrew@0
|
144 volume = MIN(volume, 1);
|
andrew@0
|
145 }
|
andrew@0
|
146
|
andrew@0
|
147 if (key == 'q'){
|
andrew@1
|
148 eventMatcher.recordedTracks.switchScreens();
|
andrew@0
|
149 }
|
andrew@0
|
150
|
andrew@0
|
151 if (key == OF_KEY_RIGHT){
|
andrew@0
|
152 // audioFilePlayer.loadedAudio.setPosition(min(1.0, audioFilePlayer.loadedAudio.getPosition() + (audioFilePlayer.fileLoader.audioHolder.audioScaleSamples/(4.0*audioFilePlayer.fileLoader.audioHolder.audioVector.size()))) );
|
andrew@0
|
153
|
andrew@0
|
154 }
|
andrew@0
|
155
|
andrew@0
|
156 if (key == OF_KEY_LEFT){
|
andrew@0
|
157 // audioFilePlayer.loadedAudio.setPosition(max(0.0, audioFilePlayer.loadedAudio.getPosition() - (audioFilePlayer.fileLoader.audioHolder.audioScaleSamples/(4.0*audioFilePlayer.fileLoader.audioHolder.audioVector.size()))));
|
andrew@0
|
158
|
andrew@0
|
159 }
|
andrew@0
|
160
|
andrew@35
|
161 if (key == 'd'){
|
andrew@35
|
162 eventMatcher.useChromaDotProduct = !eventMatcher.useChromaDotProduct;
|
andrew@35
|
163 printf("Use dot product is %i\n", eventMatcher.useChromaDotProduct);
|
andrew@35
|
164 }
|
andrew@0
|
165
|
andrew@0
|
166 if (key == ' '){
|
andrew@1
|
167
|
andrew@1
|
168 eventMatcher.recordedTracks.togglePlay();
|
andrew@0
|
169 }
|
andrew@36
|
170
|
andrew@36
|
171 if (key == 'j'){
|
andrew@36
|
172 printf("dynamic vector scalar is %f\n", eventMatcher.bayesianStruct.posterior.scalar);
|
andrew@36
|
173 }
|
andrew@0
|
174
|
andrew@0
|
175 if (key == OF_KEY_RETURN){
|
andrew@1
|
176
|
andrew@0
|
177
|
andrew@1
|
178 eventMatcher.recordedTracks.stop();
|
andrew@0
|
179 }
|
andrew@0
|
180
|
andrew@0
|
181
|
andrew@0
|
182 if (key == 'o'){
|
andrew@0
|
183 openNewAudioFileWithdialogBox();
|
andrew@0
|
184
|
andrew@0
|
185 }
|
andrew@0
|
186
|
andrew@0
|
187 if (key == 'p'){
|
andrew@3
|
188 eventMatcher.bayesianStruct.posterior.printArray();
|
andrew@0
|
189 }
|
andrew@0
|
190
|
andrew@0
|
191
|
andrew@0
|
192 if (key == OF_KEY_UP){
|
andrew@1
|
193 eventMatcher.recordedTracks.zoomOut();
|
andrew@1
|
194
|
andrew@0
|
195 }
|
andrew@0
|
196
|
andrew@0
|
197 if (key == OF_KEY_DOWN){
|
andrew@1
|
198 eventMatcher.recordedTracks.zoomIn();
|
andrew@0
|
199 }
|
andrew@0
|
200
|
andrew@0
|
201 }
|
andrew@0
|
202
|
andrew@0
|
203 //--------------------------------------------------------------
|
andrew@0
|
204 void testApp::keyReleased (int key){
|
andrew@0
|
205
|
andrew@0
|
206 }
|
andrew@0
|
207
|
andrew@0
|
208
|
andrew@0
|
209 //--------------------------------------------------------------
|
andrew@0
|
210 void testApp::mouseMoved(int x, int y ){
|
andrew@0
|
211
|
andrew@0
|
212
|
andrew@0
|
213 }
|
andrew@0
|
214
|
andrew@0
|
215 //--------------------------------------------------------------
|
andrew@0
|
216 void testApp::mouseDragged(int x, int y, int button){
|
andrew@0
|
217
|
andrew@0
|
218 }
|
andrew@0
|
219
|
andrew@0
|
220 //--------------------------------------------------------------
|
andrew@0
|
221 void testApp::mousePressed(int x, int y, int button){
|
andrew@0
|
222
|
andrew@0
|
223 }
|
andrew@0
|
224
|
andrew@0
|
225
|
andrew@0
|
226 //--------------------------------------------------------------
|
andrew@0
|
227 void testApp::mouseReleased(int x, int y, int button){
|
andrew@0
|
228
|
andrew@0
|
229 }
|
andrew@0
|
230
|
andrew@0
|
231 //--------------------------------------------------------------
|
andrew@0
|
232 void testApp::windowResized(int w, int h){
|
andrew@1
|
233
|
andrew@1
|
234 eventMatcher.windowResized(w, h);
|
andrew@0
|
235
|
andrew@0
|
236
|
andrew@0
|
237 }
|
andrew@0
|
238 //--------------------------------------------------------------
|
andrew@0
|
239 void testApp::audioRequested (float * output, int bufferSize, int nChannels){
|
andrew@0
|
240 //pan = 0.5f;
|
andrew@0
|
241 float leftScale = 1 - pan;
|
andrew@0
|
242 float rightScale = pan;
|
andrew@0
|
243
|
andrew@0
|
244 }
|
andrew@0
|
245
|
andrew@0
|
246
|
andrew@0
|
247
|
andrew@0
|
248 //--------------------------------------------------------------
|
andrew@0
|
249 void testApp::openNewAudioFileWithdialogBox(){
|
andrew@0
|
250 std::string filename;
|
andrew@0
|
251 getFilenameFromDialogBox(&filename);
|
andrew@0
|
252 loadNewAudio(filename);
|
andrew@0
|
253
|
andrew@0
|
254 }
|
andrew@0
|
255
|
andrew@0
|
256
|
andrew@0
|
257 void testApp::loadNewAudio(string soundFileName){
|
andrew@0
|
258
|
andrew@1
|
259 eventMatcher.recordedTracks.loadedAudioFiles[0].loadAudioFile(soundFileName);
|
andrew@0
|
260
|
andrew@0
|
261 // for (int i = 0;i < numberOfAudioTracks;i++)
|
andrew@0
|
262 // loadedAudioFiles[i].loadAudioFile(soundFileName);
|
andrew@0
|
263
|
andrew@0
|
264 // audioFilePlayer.loadAudioFile(soundFileName);
|
andrew@0
|
265
|
andrew@0
|
266 }
|
andrew@0
|
267
|
andrew@0
|
268
|
andrew@0
|
269 bool testApp::getFilenameFromDialogBox(std::string* fileNameToSave){
|
andrew@0
|
270 //this uses a pointer structure within the loader and returns true if the dialogue box was used successfully
|
andrew@0
|
271 // first, create a string that will hold the URL
|
andrew@0
|
272 string URL;
|
andrew@0
|
273
|
andrew@0
|
274 // openFile(string& URL) returns 1 if a file was picked
|
andrew@0
|
275 // returns 0 when something went wrong or the user pressed 'cancel'
|
andrew@0
|
276 int response = ofxFileDialogOSX::openFile(URL);
|
andrew@0
|
277 if(response){
|
andrew@0
|
278 // now you can use the URL
|
andrew@0
|
279 *fileNameToSave = URL;
|
andrew@0
|
280 //printf("\n filename is %s \n", soundFileName.c_str());
|
andrew@0
|
281 return true;
|
andrew@0
|
282 }
|
andrew@0
|
283 else {
|
andrew@0
|
284 // soundFileName = "OPEN canceled. ";
|
andrew@0
|
285 printf("\n open file cancelled \n");
|
andrew@0
|
286 return false;
|
andrew@0
|
287 }
|
andrew@0
|
288
|
andrew@0
|
289 }
|
andrew@0
|
290
|
andrew@0
|
291
|
andrew@0
|
292
|
andrew@0
|
293
|
andrew@0
|
294
|