annotate core.js @ 50:f1a189a102f0 Dev_main

Updated metric functions. Completed Metric Exporting.
author Nicholas Jillings <n.g.r.jillings@se14.qmul.ac.uk>
date Mon, 13 Apr 2015 09:58:16 +0100
parents b5cd02cb262f
children cada56696a15
rev   line source
nicholas@1 1 /**
nicholas@1 2 * core.js
nicholas@1 3 *
nicholas@1 4 * Main script to run, calls all other core functions and manages loading/store to backend.
nicholas@1 5 * Also contains all global variables.
nicholas@1 6 */
nicholas@1 7
n@32 8
n@32 9 /*
n@32 10 *
n@32 11 * WARNING!!!
n@32 12 *
n@32 13 * YOU ARE VIEWING THE DEV VERSION. THERE IS NO GUARANTEE THIS WILL BE FULLY FUNCTIONAL
n@32 14 *
n@32 15 * WARNING!!!
n@32 16 *
n@32 17 */
n@32 18
n@32 19
n@32 20
n@32 21
nicholas@1 22 /* create the web audio API context and store in audioContext*/
n@33 23 var audioContext; // Hold the browser web audio API
n@33 24 var projectXML; // Hold the parsed setup XML
n@38 25
n@44 26 var testXMLSetups = []; // Hold the parsed test instances
n@44 27 var testResultsHolders =[]; // Hold the results from each test for publishing to XML
n@45 28 var currentTrackOrder = []; // Hold the current XML tracks in their (randomised) order
n@46 29 var currentTestHolder; // Hold any intermediate results during test - metrics
n@33 30 var audioEngineContext; // The custome AudioEngine object
n@33 31 var projectReturn; // Hold the URL for the return
n@33 32 var preTestQuestions = document.createElement('PreTest'); // Store any pre-test question response
n@33 33 var postTestQuestions = document.createElement('PostTest'); // Store any post-test question response
nicholas@1 34
nicholas@1 35 window.onload = function() {
nicholas@1 36 // Function called once the browser has loaded all files.
nicholas@1 37 // This should perform any initial commands such as structure / loading documents
nicholas@1 38
nicholas@1 39 // Create a web audio API context
nicholas@21 40 // Fixed for cross-browser support
nicholas@21 41 var AudioContext = window.AudioContext || window.webkitAudioContext;
nicholas@7 42 audioContext = new AudioContext;
nicholas@1 43
nicholas@1 44 // Create the audio engine object
nicholas@1 45 audioEngineContext = new AudioEngine();
n@16 46 };
nicholas@1 47
nicholas@1 48 function loadProjectSpec(url) {
nicholas@1 49 // Load the project document from the given URL, decode the XML and instruct audioEngine to get audio data
nicholas@1 50 // If url is null, request client to upload project XML document
nicholas@2 51 var r = new XMLHttpRequest();
nicholas@2 52 r.open('GET',url,true);
nicholas@2 53 r.onload = function() {
nicholas@2 54 loadProjectSpecCallback(r.response);
n@16 55 };
nicholas@2 56 r.send();
n@16 57 };
nicholas@2 58
nicholas@2 59 function loadProjectSpecCallback(response) {
nicholas@2 60 // Function called after asynchronous download of XML project specification
nicholas@2 61 var decode = $.parseXML(response);
nicholas@2 62 projectXML = $(decode);
nicholas@2 63
nicholas@2 64 // Now extract the setup tag
nicholas@2 65 var xmlSetup = projectXML.find('setup');
n@16 66 // Detect the interface to use and load the relevant javascripts.
nicholas@2 67 var interfaceType = xmlSetup[0].attributes['interface'];
nicholas@2 68 var interfaceJS = document.createElement('script');
nicholas@2 69 interfaceJS.setAttribute("type","text/javascript");
nicholas@2 70 if (interfaceType.value == 'APE') {
nicholas@2 71 interfaceJS.setAttribute("src","ape.js");
n@33 72
n@33 73 // APE comes with a css file
n@33 74 var css = document.createElement('link');
n@33 75 css.rel = 'stylesheet';
n@33 76 css.type = 'text/css';
n@33 77 css.href = 'ape.css';
n@33 78
n@33 79 document.getElementsByTagName("head")[0].appendChild(css);
nicholas@2 80 }
nicholas@2 81 document.getElementsByTagName("head")[0].appendChild(interfaceJS);
nicholas@1 82 }
nicholas@1 83
nicholas@1 84 function createProjectSave(destURL) {
nicholas@1 85 // Save the data from interface into XML and send to destURL
nicholas@1 86 // If destURL is null then download XML in client
nicholas@7 87 // Now time to render file locally
nicholas@7 88 var xmlDoc = interfaceXMLSave();
nicholas@7 89 if (destURL == "null" || destURL == undefined) {
nicholas@7 90 var parent = document.createElement("div");
nicholas@7 91 parent.appendChild(xmlDoc);
nicholas@7 92 var file = [parent.innerHTML];
nicholas@7 93 var bb = new Blob(file,{type : 'application/xml'});
nicholas@7 94 var dnlk = window.URL.createObjectURL(bb);
nicholas@7 95 var a = document.createElement("a");
nicholas@7 96 a.hidden = '';
nicholas@7 97 a.href = dnlk;
nicholas@7 98 a.download = "save.xml";
nicholas@7 99 a.textContent = "Save File";
nicholas@7 100
nicholas@7 101 var submitDiv = document.getElementById('download-point');
nicholas@7 102 submitDiv.appendChild(a);
nicholas@7 103 }
n@43 104 return submitDiv;
nicholas@1 105 }
nicholas@1 106
nicholas@1 107 function AudioEngine() {
nicholas@1 108
nicholas@1 109 // Create two output paths, the main outputGain and fooGain.
nicholas@1 110 // Output gain is default to 1 and any items for playback route here
nicholas@1 111 // Foo gain is used for analysis to ensure paths get processed, but are not heard
nicholas@1 112 // because web audio will optimise and any route which does not go to the destination gets ignored.
nicholas@1 113 this.outputGain = audioContext.createGain();
nicholas@1 114 this.fooGain = audioContext.createGain();
nicholas@1 115 this.fooGain.gain = 0;
nicholas@1 116
nicholas@7 117 // Use this to detect playback state: 0 - stopped, 1 - playing
nicholas@7 118 this.status = 0;
nicholas@7 119
nicholas@1 120 // Connect both gains to output
nicholas@1 121 this.outputGain.connect(audioContext.destination);
nicholas@1 122 this.fooGain.connect(audioContext.destination);
nicholas@1 123
n@49 124 // Create the timer Object
n@49 125 this.timer = new timer();
n@49 126 // Create session metrics
n@49 127 this.metric = new sessionMetrics(this);
n@49 128
nicholas@1 129 // Create store for new audioObjects
nicholas@1 130 this.audioObjects = [];
nicholas@1 131
nicholas@1 132 this.play = function() {
nicholas@1 133 // Send play command to all playback buffers for synchronised start
nicholas@1 134 // Also start timer callbacks to detect if playback has finished
nicholas@7 135 if (this.status == 0) {
n@49 136 this.timer.startTest();
nicholas@7 137 // First get current clock
nicholas@7 138 var timer = audioContext.currentTime;
nicholas@7 139 // Add 3 seconds
nicholas@7 140 timer += 3.0;
nicholas@7 141
nicholas@7 142 // Send play to all tracks
nicholas@7 143 for (var i=0; i<this.audioObjects.length; i++)
nicholas@7 144 {
nicholas@7 145 this.audioObjects[i].play(timer);
nicholas@7 146 }
nicholas@7 147 this.status = 1;
nicholas@7 148 }
n@16 149 };
nicholas@1 150
nicholas@1 151 this.stop = function() {
nicholas@1 152 // Send stop and reset command to all playback buffers
nicholas@7 153 if (this.status == 1) {
nicholas@7 154 for (var i=0; i<this.audioObjects.length; i++)
nicholas@7 155 {
nicholas@7 156 this.audioObjects[i].stop();
nicholas@7 157 }
nicholas@7 158 this.status = 0;
nicholas@7 159 }
n@16 160 };
nicholas@1 161
nicholas@8 162 this.selectedTrack = function(id) {
nicholas@8 163 for (var i=0; i<this.audioObjects.length; i++)
nicholas@8 164 {
nicholas@8 165 if (id == i) {
nicholas@8 166 this.audioObjects[i].outputGain.gain.value = 1.0;
nicholas@8 167 } else {
nicholas@8 168 this.audioObjects[i].outputGain.gain.value = 0.0;
nicholas@8 169 }
nicholas@8 170 }
n@16 171 };
nicholas@8 172
nicholas@8 173
nicholas@1 174 this.newTrack = function(url) {
nicholas@1 175 // Pull data from given URL into new audio buffer
nicholas@1 176 // URLs must either be from the same source OR be setup to 'Access-Control-Allow-Origin'
nicholas@7 177
nicholas@1 178 // Create the audioObject with ID of the new track length;
n@49 179 audioObjectId = this.audioObjects.length;
nicholas@1 180 this.audioObjects[audioObjectId] = new audioObject(audioObjectId);
nicholas@7 181
nicholas@7 182 // AudioObject will get track itself.
nicholas@7 183 this.audioObjects[audioObjectId].constructTrack(url);
n@16 184 };
nicholas@1 185
nicholas@1 186 }
nicholas@1 187
nicholas@1 188 function audioObject(id) {
nicholas@1 189 // The main buffer object with common control nodes to the AudioEngine
nicholas@1 190
nicholas@1 191 this.id = id;
nicholas@1 192 this.state = 0; // 0 - no data, 1 - ready
n@24 193 this.url = null; // Hold the URL given for the output back to the results.
n@49 194 this.metric = new metricTracker();
nicholas@1 195
nicholas@1 196 // Create a buffer and external gain control to allow internal patching of effects and volume leveling.
nicholas@1 197 this.bufferNode = audioContext.createBufferSource();
nicholas@1 198 this.outputGain = audioContext.createGain();
nicholas@1 199
nicholas@8 200 // Default output gain to be zero
nicholas@8 201 this.outputGain.gain.value = 0.0;
nicholas@8 202
nicholas@1 203 // Connect buffer to the audio graph
nicholas@1 204 this.bufferNode.connect(this.outputGain);
nicholas@1 205 this.outputGain.connect(audioEngineContext.outputGain);
nicholas@1 206
nicholas@1 207 // the audiobuffer is not designed for multi-start playback
nicholas@1 208 // When stopeed, the buffer node is deleted and recreated with the stored buffer.
nicholas@1 209 this.buffer;
nicholas@1 210
nicholas@1 211 this.play = function(startTime) {
nicholas@1 212 this.bufferNode.start(startTime);
n@16 213 };
nicholas@1 214
nicholas@1 215 this.stop = function() {
nicholas@1 216 this.bufferNode.stop(0);
nicholas@1 217 this.bufferNode = audioContext.createBufferSource();
nicholas@1 218 this.bufferNode.connect(this.outputGain);
nicholas@1 219 this.bufferNode.buffer = this.buffer;
nicholas@7 220 this.bufferNode.loop = true;
n@16 221 };
nicholas@8 222
nicholas@7 223 this.constructTrack = function(url) {
nicholas@7 224 var request = new XMLHttpRequest();
n@24 225 this.url = url;
nicholas@7 226 request.open('GET',url,true);
nicholas@7 227 request.responseType = 'arraybuffer';
nicholas@7 228
nicholas@7 229 var audioObj = this;
nicholas@7 230
nicholas@7 231 // Create callback to decode the data asynchronously
nicholas@7 232 request.onloadend = function() {
nicholas@7 233 audioContext.decodeAudioData(request.response, function(decodedData) {
nicholas@7 234 audioObj.buffer = decodedData;
nicholas@7 235 audioObj.bufferNode.buffer = audioObj.buffer;
nicholas@7 236 audioObj.bufferNode.loop = true;
nicholas@7 237 audioObj.state = 1;
nicholas@7 238 }, function(){
nicholas@7 239 // Should only be called if there was an error, but sometimes gets called continuously
nicholas@7 240 // Check here if the error is genuine
nicholas@7 241 if (audioObj.state == 0 || audioObj.buffer == undefined) {
nicholas@7 242 // Genuine error
nicholas@7 243 console.log('FATAL - Error loading buffer on '+audioObj.id);
nicholas@7 244 }
nicholas@7 245 });
n@16 246 };
nicholas@7 247 request.send();
n@16 248 };
nicholas@7 249
n@49 250 }
n@49 251
n@49 252 function timer()
n@49 253 {
n@49 254 /* Timer object used in audioEngine to keep track of session timings
n@49 255 * Uses the timer of the web audio API, so sample resolution
n@49 256 */
n@49 257 this.testStarted = false;
n@49 258 this.testStartTime = 0;
n@49 259 this.testDuration = 0;
n@49 260 this.minimumTestTime = 0; // No minimum test time
n@49 261 this.startTest = function()
n@49 262 {
n@49 263 if (this.testStarted == false)
n@49 264 {
n@49 265 this.testStartTime = audioContext.currentTime;
n@49 266 this.testStarted = true;
n@49 267 this.updateTestTime();
n@49 268 }
n@49 269 };
n@49 270 this.stopTest = function()
n@49 271 {
n@49 272 if (this.testStarted)
n@49 273 {
n@49 274 this.testDuration = this.getTestTime();
n@49 275 this.testStarted = false;
n@49 276 } else {
n@49 277 console.log('ERR: Test tried to end before beginning');
n@49 278 }
n@49 279 };
n@49 280 this.updateTestTime = function()
n@49 281 {
n@49 282 if (this.testStarted)
n@49 283 {
n@49 284 this.testDuration = audioContext.currentTime - this.testStartTime;
n@49 285 }
n@49 286 };
n@49 287 this.getTestTime = function()
n@49 288 {
n@49 289 this.updateTestTime();
n@49 290 return this.testDuration;
n@49 291 };
n@49 292 }
n@49 293
n@49 294 function sessionMetrics(engine)
n@49 295 {
n@49 296 /* Used by audioEngine to link to audioObjects to minimise the timer call timers;
n@49 297 */
n@49 298 this.engine = engine;
n@49 299 this.lastClicked = -1;
n@49 300 this.data = -1;
n@49 301
n@49 302 this.sliderMoveStart = function(id)
n@49 303 {
n@49 304 if (this.data == -1)
n@49 305 {
n@49 306 this.data = id;
n@49 307 } else {
n@49 308 console.log('ERROR: Metric tracker detecting two moves!');
n@49 309 this.data = -1;
n@49 310 }
n@49 311 };
n@49 312 this.sliderMoved = function()
n@49 313 {
n@49 314 var time = engine.timer.getTestTime();
n@49 315 var id = this.data;
n@49 316 this.data = -1;
n@49 317 var sliderObj = document.getElementsByClassName('track-slider')[id];
n@49 318 var position = Number(sliderObj.style.left.substr(0,sliderObj.style.left.length-2));
n@49 319 if (engine.timer.testStarted)
n@49 320 {
n@49 321 engine.audioObjects[id].metric.moved(time,position);
n@49 322 } else {
n@49 323 engine.audioObjects[id].metric.initialised(position);
n@49 324 }
n@49 325 };
n@49 326
n@49 327 this.sliderPlayed = function(id)
n@49 328 {
n@49 329 var time = engine.timer.getTestTime();
n@49 330 if (engine.timer.testStarted)
n@49 331 {
n@49 332 if (this.lastClicked >= 0)
n@49 333 {
n@49 334 engine.audioObjects[this.lastClicked].metric.listening(time);
n@49 335 }
n@49 336 this.lastClicked = id;
n@49 337 engine.audioObjects[id].metric.listening(time);
n@49 338 }
n@49 339 };
n@49 340 }
n@49 341
n@49 342 function metricTracker()
n@49 343 {
n@49 344 /* Custom object to track and collect metric data
n@49 345 * Used only inside the audioObjects object.
n@49 346 */
n@49 347
n@49 348 this.listenedTimer = 0;
n@49 349 this.listenStart = 0;
n@49 350 this.initialPosition = 0;
n@49 351 this.movementTracker = [];
n@49 352 this.wasListenedTo = false;
n@49 353 this.wasMoved = false;
n@49 354 this.hasComments = false;
n@49 355
n@49 356 this.initialised = function(position)
n@49 357 {
n@49 358 this.initialPosition = position;
n@49 359 };
n@49 360
n@49 361 this.moved = function(time,position)
n@49 362 {
n@49 363 this.wasMoved = true;
n@49 364 this.movementTracker[this.movementTracker.length] = [time, position];
n@49 365 };
n@49 366
n@49 367 this.listening = function(time)
n@49 368 {
n@49 369 if (this.listenStart == 0)
n@49 370 {
n@49 371 this.wasListenedTo = true;
n@49 372 this.listenStart = time;
n@49 373 } else {
n@49 374 this.listenedTimer += (time - this.listenStart);
n@49 375 this.listenStart = 0;
n@49 376 }
n@49 377 };
n@50 378 }