b@1607
|
1 /**
|
b@1607
|
2 * core.js
|
b@1607
|
3 *
|
b@1607
|
4 * Main script to run, calls all other core functions and manages loading/store to backend.
|
b@1607
|
5 * Also contains all global variables.
|
b@1607
|
6 */
|
b@1607
|
7
|
b@1607
|
8 /* create the web audio API context and store in audioContext*/
|
b@1607
|
9 var audioContext; // Hold the browser web audio API
|
b@1607
|
10 var projectXML; // Hold the parsed setup XML
|
b@1607
|
11
|
b@1607
|
12 var testXMLSetups = []; // Hold the parsed test instances
|
b@1607
|
13 var testResultsHolders =[]; // Hold the results from each test for publishing to XML
|
b@1607
|
14 var currentTrackOrder = []; // Hold the current XML tracks in their (randomised) order
|
b@1607
|
15 var currentTestHolder; // Hold any intermediate results during test - metrics
|
b@1607
|
16 var audioEngineContext; // The custome AudioEngine object
|
b@1607
|
17 var projectReturn; // Hold the URL for the return
|
b@1607
|
18 var preTestQuestions = document.createElement('PreTest'); // Store any pre-test question response
|
b@1607
|
19 var postTestQuestions = document.createElement('PostTest'); // Store any post-test question response
|
b@1607
|
20
|
b@1607
|
21 // Add a prototype to the bufferSourceNode to reference to the audioObject holding it
|
b@1607
|
22 AudioBufferSourceNode.prototype.owner = undefined;
|
b@1607
|
23
|
b@1607
|
24 window.onload = function() {
|
b@1607
|
25 // Function called once the browser has loaded all files.
|
b@1607
|
26 // This should perform any initial commands such as structure / loading documents
|
b@1607
|
27
|
b@1607
|
28 // Create a web audio API context
|
b@1607
|
29 // Fixed for cross-browser support
|
b@1607
|
30 var AudioContext = window.AudioContext || window.webkitAudioContext;
|
b@1607
|
31 audioContext = new AudioContext;
|
b@1607
|
32
|
b@1607
|
33 // Create the audio engine object
|
b@1607
|
34 audioEngineContext = new AudioEngine();
|
b@1607
|
35 };
|
b@1607
|
36
|
b@1607
|
37 function loadProjectSpec(url) {
|
b@1607
|
38 // Load the project document from the given URL, decode the XML and instruct audioEngine to get audio data
|
b@1607
|
39 // If url is null, request client to upload project XML document
|
b@1607
|
40 var r = new XMLHttpRequest();
|
b@1607
|
41 r.open('GET',url,true);
|
b@1607
|
42 r.onload = function() {
|
b@1607
|
43 loadProjectSpecCallback(r.response);
|
b@1607
|
44 };
|
b@1607
|
45 r.send();
|
b@1607
|
46 };
|
b@1607
|
47
|
b@1607
|
48 function loadProjectSpecCallback(response) {
|
b@1607
|
49 // Function called after asynchronous download of XML project specification
|
b@1607
|
50 var decode = $.parseXML(response);
|
b@1607
|
51 projectXML = $(decode);
|
b@1607
|
52
|
b@1607
|
53 // Now extract the setup tag
|
b@1607
|
54 var xmlSetup = projectXML.find('setup');
|
b@1607
|
55 // Detect the interface to use and load the relevant javascripts.
|
b@1607
|
56 var interfaceType = xmlSetup[0].attributes['interface'];
|
b@1607
|
57 var interfaceJS = document.createElement('script');
|
b@1607
|
58 interfaceJS.setAttribute("type","text/javascript");
|
b@1607
|
59 if (interfaceType.value == 'APE') {
|
b@1607
|
60 interfaceJS.setAttribute("src","ape.js");
|
b@1607
|
61
|
b@1607
|
62 // APE comes with a css file
|
b@1607
|
63 var css = document.createElement('link');
|
b@1607
|
64 css.rel = 'stylesheet';
|
b@1607
|
65 css.type = 'text/css';
|
b@1607
|
66 css.href = 'ape.css';
|
b@1607
|
67
|
b@1607
|
68 document.getElementsByTagName("head")[0].appendChild(css);
|
b@1607
|
69 }
|
b@1607
|
70 document.getElementsByTagName("head")[0].appendChild(interfaceJS);
|
b@1607
|
71 }
|
b@1607
|
72
|
b@1607
|
73 function createProjectSave(destURL) {
|
b@1607
|
74 // Save the data from interface into XML and send to destURL
|
b@1607
|
75 // If destURL is null then download XML in client
|
b@1607
|
76 // Now time to render file locally
|
b@1607
|
77 var xmlDoc = interfaceXMLSave();
|
b@1607
|
78 if (destURL == "null" || destURL == undefined) {
|
b@1607
|
79 var parent = document.createElement("div");
|
b@1607
|
80 parent.appendChild(xmlDoc);
|
b@1607
|
81 var file = [parent.innerHTML];
|
b@1607
|
82 var bb = new Blob(file,{type : 'application/xml'});
|
b@1607
|
83 var dnlk = window.URL.createObjectURL(bb);
|
b@1607
|
84 var a = document.createElement("a");
|
b@1607
|
85 a.hidden = '';
|
b@1607
|
86 a.href = dnlk;
|
b@1607
|
87 a.download = "save.xml";
|
b@1607
|
88 a.textContent = "Save File";
|
b@1607
|
89
|
b@1607
|
90 var submitDiv = document.getElementById('download-point');
|
b@1607
|
91 submitDiv.appendChild(a);
|
b@1607
|
92 }
|
b@1607
|
93 return submitDiv;
|
b@1607
|
94 }
|
b@1607
|
95
|
b@1607
|
96 function AudioEngine() {
|
b@1607
|
97
|
b@1607
|
98 // Create two output paths, the main outputGain and fooGain.
|
b@1607
|
99 // Output gain is default to 1 and any items for playback route here
|
b@1607
|
100 // Foo gain is used for analysis to ensure paths get processed, but are not heard
|
b@1607
|
101 // because web audio will optimise and any route which does not go to the destination gets ignored.
|
b@1607
|
102 this.outputGain = audioContext.createGain();
|
b@1607
|
103 this.fooGain = audioContext.createGain();
|
b@1607
|
104 this.fooGain.gain = 0;
|
b@1607
|
105
|
b@1607
|
106 // Use this to detect playback state: 0 - stopped, 1 - playing
|
b@1607
|
107 this.status = 0;
|
b@1607
|
108
|
b@1607
|
109 // Connect both gains to output
|
b@1607
|
110 this.outputGain.connect(audioContext.destination);
|
b@1607
|
111 this.fooGain.connect(audioContext.destination);
|
b@1607
|
112
|
b@1607
|
113 // Create the timer Object
|
b@1607
|
114 this.timer = new timer();
|
b@1607
|
115 // Create session metrics
|
b@1607
|
116 this.metric = new sessionMetrics(this);
|
b@1607
|
117
|
b@1607
|
118 this.loopPlayback = false;
|
b@1607
|
119
|
b@1607
|
120 // Create store for new audioObjects
|
b@1607
|
121 this.audioObjects = [];
|
b@1607
|
122
|
b@1607
|
123 this.play = function(){};
|
b@1607
|
124
|
b@1607
|
125 this.stop = function(){};
|
b@1607
|
126
|
b@1607
|
127
|
b@1607
|
128 this.newTrack = function(url) {
|
b@1607
|
129 // Pull data from given URL into new audio buffer
|
b@1607
|
130 // URLs must either be from the same source OR be setup to 'Access-Control-Allow-Origin'
|
b@1607
|
131
|
b@1607
|
132 // Create the audioObject with ID of the new track length;
|
b@1607
|
133 audioObjectId = this.audioObjects.length;
|
b@1607
|
134 this.audioObjects[audioObjectId] = new audioObject(audioObjectId);
|
b@1607
|
135
|
b@1607
|
136 // AudioObject will get track itself.
|
b@1607
|
137 this.audioObjects[audioObjectId].constructTrack(url);
|
b@1607
|
138 };
|
b@1607
|
139
|
b@1607
|
140 }
|
b@1607
|
141
|
b@1607
|
142 function audioObject(id) {
|
b@1607
|
143 // The main buffer object with common control nodes to the AudioEngine
|
b@1607
|
144
|
b@1607
|
145 this.id = id;
|
b@1607
|
146 this.state = 0; // 0 - no data, 1 - ready
|
b@1607
|
147 this.url = null; // Hold the URL given for the output back to the results.
|
b@1607
|
148 this.metric = new metricTracker();
|
b@1607
|
149
|
b@1607
|
150 // Create a buffer and external gain control to allow internal patching of effects and volume leveling.
|
b@1607
|
151 this.bufferNode = undefined;
|
b@1607
|
152 this.outputGain = audioContext.createGain();
|
b@1607
|
153
|
b@1607
|
154 // Default output gain to be zero
|
b@1607
|
155 this.outputGain.gain.value = 0.0;
|
b@1607
|
156
|
b@1607
|
157 // Connect buffer to the audio graph
|
b@1607
|
158 this.outputGain.connect(audioEngineContext.outputGain);
|
b@1607
|
159
|
b@1607
|
160 // the audiobuffer is not designed for multi-start playback
|
b@1607
|
161 // When stopeed, the buffer node is deleted and recreated with the stored buffer.
|
b@1607
|
162 this.buffer;
|
b@1607
|
163
|
b@1607
|
164 this.play = function(startTime) {
|
b@1607
|
165 this.bufferNode = audioContext.createBufferSource();
|
b@1607
|
166 this.bufferNode.connect(this.outputGain);
|
b@1607
|
167 this.bufferNode.buffer = this.buffer;
|
b@1607
|
168 this.bufferNode.loop = audioEngineContext.loopPlayback;
|
b@1607
|
169 this.bufferNode.start(startTime);
|
b@1607
|
170 };
|
b@1607
|
171
|
b@1607
|
172 this.stop = function() {
|
b@1607
|
173 if (this.bufferNode != undefined)
|
b@1607
|
174 {
|
b@1607
|
175 this.bufferNode.stop(0);
|
b@1607
|
176 this.bufferNode = undefined;
|
b@1607
|
177 }
|
b@1607
|
178 };
|
b@1607
|
179
|
b@1607
|
180 this.constructTrack = function(url) {
|
b@1607
|
181 var request = new XMLHttpRequest();
|
b@1607
|
182 this.url = url;
|
b@1607
|
183 request.open('GET',url,true);
|
b@1607
|
184 request.responseType = 'arraybuffer';
|
b@1607
|
185
|
b@1607
|
186 var audioObj = this;
|
b@1607
|
187
|
b@1607
|
188 // Create callback to decode the data asynchronously
|
b@1607
|
189 request.onloadend = function() {
|
b@1607
|
190 audioContext.decodeAudioData(request.response, function(decodedData) {
|
b@1607
|
191 audioObj.buffer = decodedData;
|
b@1607
|
192 audioObj.state = 1;
|
b@1607
|
193 }, function(){
|
b@1607
|
194 // Should only be called if there was an error, but sometimes gets called continuously
|
b@1607
|
195 // Check here if the error is genuine
|
b@1607
|
196 if (audioObj.state == 0 || audioObj.buffer == undefined) {
|
b@1607
|
197 // Genuine error
|
b@1607
|
198 console.log('FATAL - Error loading buffer on '+audioObj.id);
|
b@1607
|
199 }
|
b@1607
|
200 });
|
b@1607
|
201 };
|
b@1607
|
202 request.send();
|
b@1607
|
203 };
|
b@1607
|
204
|
b@1607
|
205 }
|
b@1607
|
206
|
b@1607
|
207 function timer()
|
b@1607
|
208 {
|
b@1607
|
209 /* Timer object used in audioEngine to keep track of session timings
|
b@1607
|
210 * Uses the timer of the web audio API, so sample resolution
|
b@1607
|
211 */
|
b@1607
|
212 this.testStarted = false;
|
b@1607
|
213 this.testStartTime = 0;
|
b@1607
|
214 this.testDuration = 0;
|
b@1607
|
215 this.minimumTestTime = 0; // No minimum test time
|
b@1607
|
216 this.startTest = function()
|
b@1607
|
217 {
|
b@1607
|
218 if (this.testStarted == false)
|
b@1607
|
219 {
|
b@1607
|
220 this.testStartTime = audioContext.currentTime;
|
b@1607
|
221 this.testStarted = true;
|
b@1607
|
222 this.updateTestTime();
|
b@1607
|
223 audioEngineContext.metric.initialiseTest();
|
b@1607
|
224 }
|
b@1607
|
225 };
|
b@1607
|
226 this.stopTest = function()
|
b@1607
|
227 {
|
b@1607
|
228 if (this.testStarted)
|
b@1607
|
229 {
|
b@1607
|
230 this.testDuration = this.getTestTime();
|
b@1607
|
231 this.testStarted = false;
|
b@1607
|
232 } else {
|
b@1607
|
233 console.log('ERR: Test tried to end before beginning');
|
b@1607
|
234 }
|
b@1607
|
235 };
|
b@1607
|
236 this.updateTestTime = function()
|
b@1607
|
237 {
|
b@1607
|
238 if (this.testStarted)
|
b@1607
|
239 {
|
b@1607
|
240 this.testDuration = audioContext.currentTime - this.testStartTime;
|
b@1607
|
241 }
|
b@1607
|
242 };
|
b@1607
|
243 this.getTestTime = function()
|
b@1607
|
244 {
|
b@1607
|
245 this.updateTestTime();
|
b@1607
|
246 return this.testDuration;
|
b@1607
|
247 };
|
b@1607
|
248 }
|
b@1607
|
249
|
b@1607
|
250 function sessionMetrics(engine)
|
b@1607
|
251 {
|
b@1607
|
252 /* Used by audioEngine to link to audioObjects to minimise the timer call timers;
|
b@1607
|
253 */
|
b@1607
|
254 this.engine = engine;
|
b@1607
|
255 this.lastClicked = -1;
|
b@1607
|
256 this.data = -1;
|
b@1607
|
257 this.initialiseTest = function(){};
|
b@1607
|
258 }
|
b@1607
|
259
|
b@1607
|
260 function metricTracker()
|
b@1607
|
261 {
|
b@1607
|
262 /* Custom object to track and collect metric data
|
b@1607
|
263 * Used only inside the audioObjects object.
|
b@1607
|
264 */
|
b@1607
|
265
|
b@1607
|
266 this.listenedTimer = 0;
|
b@1607
|
267 this.listenStart = 0;
|
b@1607
|
268 this.initialPosition = -1;
|
b@1607
|
269 this.movementTracker = [];
|
b@1607
|
270 this.wasListenedTo = false;
|
b@1607
|
271 this.wasMoved = false;
|
b@1607
|
272 this.hasComments = false;
|
b@1607
|
273
|
b@1607
|
274 this.initialised = function(position)
|
b@1607
|
275 {
|
b@1607
|
276 if (this.initialPosition == -1) {
|
b@1607
|
277 this.initialPosition = position;
|
b@1607
|
278 }
|
b@1607
|
279 };
|
b@1607
|
280
|
b@1607
|
281 this.moved = function(time,position)
|
b@1607
|
282 {
|
b@1607
|
283 this.wasMoved = true;
|
b@1607
|
284 this.movementTracker[this.movementTracker.length] = [time, position];
|
b@1607
|
285 };
|
b@1607
|
286
|
b@1607
|
287 this.listening = function(time)
|
b@1607
|
288 {
|
b@1607
|
289 if (this.listenStart == 0)
|
b@1607
|
290 {
|
b@1607
|
291 this.wasListenedTo = true;
|
b@1607
|
292 this.listenStart = time;
|
b@1607
|
293 } else {
|
b@1607
|
294 this.listenedTimer += (time - this.listenStart);
|
b@1607
|
295 this.listenStart = 0;
|
b@1607
|
296 }
|
b@1607
|
297 };
|
b@1607
|
298 }
|
b@1607
|
299
|
b@1607
|
300 function randomiseOrder(input)
|
b@1607
|
301 {
|
b@1607
|
302 // This takes an array of information and randomises the order
|
b@1607
|
303 var N = input.length;
|
b@1607
|
304 var K = N;
|
b@1607
|
305 var holdArr = [];
|
b@1607
|
306 for (var n=0; n<N; n++)
|
b@1607
|
307 {
|
b@1607
|
308 // First pick a random number
|
b@1607
|
309 var r = Math.random();
|
b@1607
|
310 // Multiply and floor by the number of elements left
|
b@1607
|
311 r = Math.floor(r*input.length);
|
b@1607
|
312 // Pick out that element and delete from the array
|
b@1607
|
313 holdArr.push(input.splice(r,1)[0]);
|
b@1607
|
314 }
|
b@1607
|
315 return holdArr;
|
b@1607
|
316 } |