annotate core.js @ 674:436db2f29f73

Updated metric functions. Completed Metric Exporting.
author Nicholas Jillings <n.g.r.jillings@se14.qmul.ac.uk>
date Mon, 13 Apr 2015 09:58:16 +0100
parents d4e55184f776
children 7e73d1cdcff8
rev   line source
n@656 1 /**
n@656 2 * core.js
n@656 3 *
n@656 4 * Main script to run, calls all other core functions and manages loading/store to backend.
n@656 5 * Also contains all global variables.
n@656 6 */
n@656 7
n@656 8
n@656 9 /*
n@656 10 *
n@656 11 * WARNING!!!
n@656 12 *
n@656 13 * YOU ARE VIEWING THE DEV VERSION. THERE IS NO GUARANTEE THIS WILL BE FULLY FUNCTIONAL
n@656 14 *
n@656 15 * WARNING!!!
n@656 16 *
n@656 17 */
n@656 18
n@656 19
n@656 20
n@656 21
n@656 22 /* create the web audio API context and store in audioContext*/
n@657 23 var audioContext; // Hold the browser web audio API
n@657 24 var projectXML; // Hold the parsed setup XML
n@662 25
n@668 26 var testXMLSetups = []; // Hold the parsed test instances
n@668 27 var testResultsHolders =[]; // Hold the results from each test for publishing to XML
n@669 28 var currentTrackOrder = []; // Hold the current XML tracks in their (randomised) order
n@670 29 var currentTestHolder; // Hold any intermediate results during test - metrics
n@657 30 var audioEngineContext; // The custome AudioEngine object
n@657 31 var projectReturn; // Hold the URL for the return
n@657 32 var preTestQuestions = document.createElement('PreTest'); // Store any pre-test question response
n@657 33 var postTestQuestions = document.createElement('PostTest'); // Store any post-test question response
n@656 34
n@656 35 window.onload = function() {
n@656 36 // Function called once the browser has loaded all files.
n@656 37 // This should perform any initial commands such as structure / loading documents
n@656 38
n@656 39 // Create a web audio API context
n@656 40 // Fixed for cross-browser support
n@656 41 var AudioContext = window.AudioContext || window.webkitAudioContext;
n@656 42 audioContext = new AudioContext;
n@656 43
n@656 44 // Create the audio engine object
n@656 45 audioEngineContext = new AudioEngine();
n@656 46 };
n@656 47
n@656 48 function loadProjectSpec(url) {
n@656 49 // Load the project document from the given URL, decode the XML and instruct audioEngine to get audio data
n@656 50 // If url is null, request client to upload project XML document
n@656 51 var r = new XMLHttpRequest();
n@656 52 r.open('GET',url,true);
n@656 53 r.onload = function() {
n@656 54 loadProjectSpecCallback(r.response);
n@656 55 };
n@656 56 r.send();
n@656 57 };
n@656 58
n@656 59 function loadProjectSpecCallback(response) {
n@656 60 // Function called after asynchronous download of XML project specification
n@656 61 var decode = $.parseXML(response);
n@656 62 projectXML = $(decode);
n@656 63
n@656 64 // Now extract the setup tag
n@656 65 var xmlSetup = projectXML.find('setup');
n@656 66 // Detect the interface to use and load the relevant javascripts.
n@656 67 var interfaceType = xmlSetup[0].attributes['interface'];
n@656 68 var interfaceJS = document.createElement('script');
n@656 69 interfaceJS.setAttribute("type","text/javascript");
n@656 70 if (interfaceType.value == 'APE') {
n@656 71 interfaceJS.setAttribute("src","ape.js");
n@657 72
n@657 73 // APE comes with a css file
n@657 74 var css = document.createElement('link');
n@657 75 css.rel = 'stylesheet';
n@657 76 css.type = 'text/css';
n@657 77 css.href = 'ape.css';
n@657 78
n@657 79 document.getElementsByTagName("head")[0].appendChild(css);
n@656 80 }
n@656 81 document.getElementsByTagName("head")[0].appendChild(interfaceJS);
n@656 82 }
n@656 83
n@656 84 function createProjectSave(destURL) {
n@656 85 // Save the data from interface into XML and send to destURL
n@656 86 // If destURL is null then download XML in client
n@656 87 // Now time to render file locally
n@656 88 var xmlDoc = interfaceXMLSave();
n@656 89 if (destURL == "null" || destURL == undefined) {
n@656 90 var parent = document.createElement("div");
n@656 91 parent.appendChild(xmlDoc);
n@656 92 var file = [parent.innerHTML];
n@656 93 var bb = new Blob(file,{type : 'application/xml'});
n@656 94 var dnlk = window.URL.createObjectURL(bb);
n@656 95 var a = document.createElement("a");
n@656 96 a.hidden = '';
n@656 97 a.href = dnlk;
n@656 98 a.download = "save.xml";
n@656 99 a.textContent = "Save File";
n@656 100
n@656 101 var submitDiv = document.getElementById('download-point');
n@656 102 submitDiv.appendChild(a);
n@656 103 }
n@667 104 return submitDiv;
n@656 105 }
n@656 106
n@656 107 function AudioEngine() {
n@656 108
n@656 109 // Create two output paths, the main outputGain and fooGain.
n@656 110 // Output gain is default to 1 and any items for playback route here
n@656 111 // Foo gain is used for analysis to ensure paths get processed, but are not heard
n@656 112 // because web audio will optimise and any route which does not go to the destination gets ignored.
n@656 113 this.outputGain = audioContext.createGain();
n@656 114 this.fooGain = audioContext.createGain();
n@656 115 this.fooGain.gain = 0;
n@656 116
n@656 117 // Use this to detect playback state: 0 - stopped, 1 - playing
n@656 118 this.status = 0;
n@656 119
n@656 120 // Connect both gains to output
n@656 121 this.outputGain.connect(audioContext.destination);
n@656 122 this.fooGain.connect(audioContext.destination);
n@656 123
n@673 124 // Create the timer Object
n@673 125 this.timer = new timer();
n@673 126 // Create session metrics
n@673 127 this.metric = new sessionMetrics(this);
n@673 128
n@656 129 // Create store for new audioObjects
n@656 130 this.audioObjects = [];
n@656 131
n@656 132 this.play = function() {
n@656 133 // Send play command to all playback buffers for synchronised start
n@656 134 // Also start timer callbacks to detect if playback has finished
n@656 135 if (this.status == 0) {
n@673 136 this.timer.startTest();
n@656 137 // First get current clock
n@656 138 var timer = audioContext.currentTime;
n@656 139 // Add 3 seconds
n@656 140 timer += 3.0;
n@656 141
n@656 142 // Send play to all tracks
n@656 143 for (var i=0; i<this.audioObjects.length; i++)
n@656 144 {
n@656 145 this.audioObjects[i].play(timer);
n@656 146 }
n@656 147 this.status = 1;
n@656 148 }
n@656 149 };
n@656 150
n@656 151 this.stop = function() {
n@656 152 // Send stop and reset command to all playback buffers
n@656 153 if (this.status == 1) {
n@656 154 for (var i=0; i<this.audioObjects.length; i++)
n@656 155 {
n@656 156 this.audioObjects[i].stop();
n@656 157 }
n@656 158 this.status = 0;
n@656 159 }
n@656 160 };
n@656 161
n@656 162 this.selectedTrack = function(id) {
n@656 163 for (var i=0; i<this.audioObjects.length; i++)
n@656 164 {
n@656 165 if (id == i) {
n@656 166 this.audioObjects[i].outputGain.gain.value = 1.0;
n@656 167 } else {
n@656 168 this.audioObjects[i].outputGain.gain.value = 0.0;
n@656 169 }
n@656 170 }
n@656 171 };
n@656 172
n@656 173
n@656 174 this.newTrack = function(url) {
n@656 175 // Pull data from given URL into new audio buffer
n@656 176 // URLs must either be from the same source OR be setup to 'Access-Control-Allow-Origin'
n@656 177
n@656 178 // Create the audioObject with ID of the new track length;
n@673 179 audioObjectId = this.audioObjects.length;
n@656 180 this.audioObjects[audioObjectId] = new audioObject(audioObjectId);
n@656 181
n@656 182 // AudioObject will get track itself.
n@656 183 this.audioObjects[audioObjectId].constructTrack(url);
n@656 184 };
n@656 185
n@656 186 }
n@656 187
n@656 188 function audioObject(id) {
n@656 189 // The main buffer object with common control nodes to the AudioEngine
n@656 190
n@656 191 this.id = id;
n@656 192 this.state = 0; // 0 - no data, 1 - ready
n@656 193 this.url = null; // Hold the URL given for the output back to the results.
n@673 194 this.metric = new metricTracker();
n@656 195
n@656 196 // Create a buffer and external gain control to allow internal patching of effects and volume leveling.
n@656 197 this.bufferNode = audioContext.createBufferSource();
n@656 198 this.outputGain = audioContext.createGain();
n@656 199
n@656 200 // Default output gain to be zero
n@656 201 this.outputGain.gain.value = 0.0;
n@656 202
n@656 203 // Connect buffer to the audio graph
n@656 204 this.bufferNode.connect(this.outputGain);
n@656 205 this.outputGain.connect(audioEngineContext.outputGain);
n@656 206
n@656 207 // the audiobuffer is not designed for multi-start playback
n@656 208 // When stopeed, the buffer node is deleted and recreated with the stored buffer.
n@656 209 this.buffer;
n@656 210
n@656 211 this.play = function(startTime) {
n@656 212 this.bufferNode.start(startTime);
n@656 213 };
n@656 214
n@656 215 this.stop = function() {
n@656 216 this.bufferNode.stop(0);
n@656 217 this.bufferNode = audioContext.createBufferSource();
n@656 218 this.bufferNode.connect(this.outputGain);
n@656 219 this.bufferNode.buffer = this.buffer;
n@656 220 this.bufferNode.loop = true;
n@656 221 };
n@656 222
n@656 223 this.constructTrack = function(url) {
n@656 224 var request = new XMLHttpRequest();
n@656 225 this.url = url;
n@656 226 request.open('GET',url,true);
n@656 227 request.responseType = 'arraybuffer';
n@656 228
n@656 229 var audioObj = this;
n@656 230
n@656 231 // Create callback to decode the data asynchronously
n@656 232 request.onloadend = function() {
n@656 233 audioContext.decodeAudioData(request.response, function(decodedData) {
n@656 234 audioObj.buffer = decodedData;
n@656 235 audioObj.bufferNode.buffer = audioObj.buffer;
n@656 236 audioObj.bufferNode.loop = true;
n@656 237 audioObj.state = 1;
n@656 238 }, function(){
n@656 239 // Should only be called if there was an error, but sometimes gets called continuously
n@656 240 // Check here if the error is genuine
n@656 241 if (audioObj.state == 0 || audioObj.buffer == undefined) {
n@656 242 // Genuine error
n@656 243 console.log('FATAL - Error loading buffer on '+audioObj.id);
n@656 244 }
n@656 245 });
n@656 246 };
n@656 247 request.send();
n@656 248 };
n@656 249
n@673 250 }
n@673 251
n@673 252 function timer()
n@673 253 {
n@673 254 /* Timer object used in audioEngine to keep track of session timings
n@673 255 * Uses the timer of the web audio API, so sample resolution
n@673 256 */
n@673 257 this.testStarted = false;
n@673 258 this.testStartTime = 0;
n@673 259 this.testDuration = 0;
n@673 260 this.minimumTestTime = 0; // No minimum test time
n@673 261 this.startTest = function()
n@673 262 {
n@673 263 if (this.testStarted == false)
n@673 264 {
n@673 265 this.testStartTime = audioContext.currentTime;
n@673 266 this.testStarted = true;
n@673 267 this.updateTestTime();
n@673 268 }
n@673 269 };
n@673 270 this.stopTest = function()
n@673 271 {
n@673 272 if (this.testStarted)
n@673 273 {
n@673 274 this.testDuration = this.getTestTime();
n@673 275 this.testStarted = false;
n@673 276 } else {
n@673 277 console.log('ERR: Test tried to end before beginning');
n@673 278 }
n@673 279 };
n@673 280 this.updateTestTime = function()
n@673 281 {
n@673 282 if (this.testStarted)
n@673 283 {
n@673 284 this.testDuration = audioContext.currentTime - this.testStartTime;
n@673 285 }
n@673 286 };
n@673 287 this.getTestTime = function()
n@673 288 {
n@673 289 this.updateTestTime();
n@673 290 return this.testDuration;
n@673 291 };
n@673 292 }
n@673 293
n@673 294 function sessionMetrics(engine)
n@673 295 {
n@673 296 /* Used by audioEngine to link to audioObjects to minimise the timer call timers;
n@673 297 */
n@673 298 this.engine = engine;
n@673 299 this.lastClicked = -1;
n@673 300 this.data = -1;
n@673 301
n@673 302 this.sliderMoveStart = function(id)
n@673 303 {
n@673 304 if (this.data == -1)
n@673 305 {
n@673 306 this.data = id;
n@673 307 } else {
n@673 308 console.log('ERROR: Metric tracker detecting two moves!');
n@673 309 this.data = -1;
n@673 310 }
n@673 311 };
n@673 312 this.sliderMoved = function()
n@673 313 {
n@673 314 var time = engine.timer.getTestTime();
n@673 315 var id = this.data;
n@673 316 this.data = -1;
n@673 317 var sliderObj = document.getElementsByClassName('track-slider')[id];
n@673 318 var position = Number(sliderObj.style.left.substr(0,sliderObj.style.left.length-2));
n@673 319 if (engine.timer.testStarted)
n@673 320 {
n@673 321 engine.audioObjects[id].metric.moved(time,position);
n@673 322 } else {
n@673 323 engine.audioObjects[id].metric.initialised(position);
n@673 324 }
n@673 325 };
n@673 326
n@673 327 this.sliderPlayed = function(id)
n@673 328 {
n@673 329 var time = engine.timer.getTestTime();
n@673 330 if (engine.timer.testStarted)
n@673 331 {
n@673 332 if (this.lastClicked >= 0)
n@673 333 {
n@673 334 engine.audioObjects[this.lastClicked].metric.listening(time);
n@673 335 }
n@673 336 this.lastClicked = id;
n@673 337 engine.audioObjects[id].metric.listening(time);
n@673 338 }
n@673 339 };
n@673 340 }
n@673 341
n@673 342 function metricTracker()
n@673 343 {
n@673 344 /* Custom object to track and collect metric data
n@673 345 * Used only inside the audioObjects object.
n@673 346 */
n@673 347
n@673 348 this.listenedTimer = 0;
n@673 349 this.listenStart = 0;
n@673 350 this.initialPosition = 0;
n@673 351 this.movementTracker = [];
n@673 352 this.wasListenedTo = false;
n@673 353 this.wasMoved = false;
n@673 354 this.hasComments = false;
n@673 355
n@673 356 this.initialised = function(position)
n@673 357 {
n@673 358 this.initialPosition = position;
n@673 359 };
n@673 360
n@673 361 this.moved = function(time,position)
n@673 362 {
n@673 363 this.wasMoved = true;
n@673 364 this.movementTracker[this.movementTracker.length] = [time, position];
n@673 365 };
n@673 366
n@673 367 this.listening = function(time)
n@673 368 {
n@673 369 if (this.listenStart == 0)
n@673 370 {
n@673 371 this.wasListenedTo = true;
n@673 372 this.listenStart = time;
n@673 373 } else {
n@673 374 this.listenedTimer += (time - this.listenStart);
n@673 375 this.listenStart = 0;
n@673 376 }
n@673 377 };
n@674 378 }