annotate core.js @ 681:ab56aa2fe064

Added Loop
author Nicholas Jillings <n.g.r.jillings@se14.qmul.ac.uk>
date Sat, 18 Apr 2015 15:36:10 +0100
parents 1e736dc124ab
children 06fbaccf3b58
rev   line source
n@656 1 /**
n@656 2 * core.js
n@656 3 *
n@656 4 * Main script to run, calls all other core functions and manages loading/store to backend.
n@656 5 * Also contains all global variables.
n@656 6 */
n@656 7
n@656 8
n@656 9 /*
n@656 10 *
n@656 11 * WARNING!!!
n@656 12 *
n@656 13 * YOU ARE VIEWING THE DEV VERSION. THERE IS NO GUARANTEE THIS WILL BE FULLY FUNCTIONAL
n@656 14 *
n@656 15 * WARNING!!!
n@656 16 *
n@656 17 */
n@656 18
n@656 19
n@656 20
n@656 21
n@656 22 /* create the web audio API context and store in audioContext*/
n@657 23 var audioContext; // Hold the browser web audio API
n@657 24 var projectXML; // Hold the parsed setup XML
n@662 25
n@668 26 var testXMLSetups = []; // Hold the parsed test instances
n@668 27 var testResultsHolders =[]; // Hold the results from each test for publishing to XML
n@669 28 var currentTrackOrder = []; // Hold the current XML tracks in their (randomised) order
n@670 29 var currentTestHolder; // Hold any intermediate results during test - metrics
n@657 30 var audioEngineContext; // The custome AudioEngine object
n@657 31 var projectReturn; // Hold the URL for the return
n@657 32 var preTestQuestions = document.createElement('PreTest'); // Store any pre-test question response
n@657 33 var postTestQuestions = document.createElement('PostTest'); // Store any post-test question response
n@656 34
n@681 35 // Add a prototype to the bufferSourceNode to reference to the audioObject holding it
n@681 36 AudioBufferSourceNode.prototype.owner = undefined;
n@681 37
n@656 38 window.onload = function() {
n@656 39 // Function called once the browser has loaded all files.
n@656 40 // This should perform any initial commands such as structure / loading documents
n@656 41
n@656 42 // Create a web audio API context
n@656 43 // Fixed for cross-browser support
n@656 44 var AudioContext = window.AudioContext || window.webkitAudioContext;
n@656 45 audioContext = new AudioContext;
n@656 46
n@656 47 // Create the audio engine object
n@656 48 audioEngineContext = new AudioEngine();
n@656 49 };
n@656 50
n@656 51 function loadProjectSpec(url) {
n@656 52 // Load the project document from the given URL, decode the XML and instruct audioEngine to get audio data
n@656 53 // If url is null, request client to upload project XML document
n@656 54 var r = new XMLHttpRequest();
n@656 55 r.open('GET',url,true);
n@656 56 r.onload = function() {
n@656 57 loadProjectSpecCallback(r.response);
n@656 58 };
n@656 59 r.send();
n@656 60 };
n@656 61
n@656 62 function loadProjectSpecCallback(response) {
n@656 63 // Function called after asynchronous download of XML project specification
n@656 64 var decode = $.parseXML(response);
n@656 65 projectXML = $(decode);
n@656 66
n@656 67 // Now extract the setup tag
n@656 68 var xmlSetup = projectXML.find('setup');
n@656 69 // Detect the interface to use and load the relevant javascripts.
n@656 70 var interfaceType = xmlSetup[0].attributes['interface'];
n@656 71 var interfaceJS = document.createElement('script');
n@656 72 interfaceJS.setAttribute("type","text/javascript");
n@656 73 if (interfaceType.value == 'APE') {
n@656 74 interfaceJS.setAttribute("src","ape.js");
n@657 75
n@657 76 // APE comes with a css file
n@657 77 var css = document.createElement('link');
n@657 78 css.rel = 'stylesheet';
n@657 79 css.type = 'text/css';
n@657 80 css.href = 'ape.css';
n@657 81
n@657 82 document.getElementsByTagName("head")[0].appendChild(css);
n@656 83 }
n@656 84 document.getElementsByTagName("head")[0].appendChild(interfaceJS);
n@656 85 }
n@656 86
n@656 87 function createProjectSave(destURL) {
n@656 88 // Save the data from interface into XML and send to destURL
n@656 89 // If destURL is null then download XML in client
n@656 90 // Now time to render file locally
n@656 91 var xmlDoc = interfaceXMLSave();
n@656 92 if (destURL == "null" || destURL == undefined) {
n@656 93 var parent = document.createElement("div");
n@656 94 parent.appendChild(xmlDoc);
n@656 95 var file = [parent.innerHTML];
n@656 96 var bb = new Blob(file,{type : 'application/xml'});
n@656 97 var dnlk = window.URL.createObjectURL(bb);
n@656 98 var a = document.createElement("a");
n@656 99 a.hidden = '';
n@656 100 a.href = dnlk;
n@656 101 a.download = "save.xml";
n@656 102 a.textContent = "Save File";
n@656 103
n@656 104 var submitDiv = document.getElementById('download-point');
n@656 105 submitDiv.appendChild(a);
n@656 106 }
n@667 107 return submitDiv;
n@656 108 }
n@656 109
n@656 110 function AudioEngine() {
n@656 111
n@656 112 // Create two output paths, the main outputGain and fooGain.
n@656 113 // Output gain is default to 1 and any items for playback route here
n@656 114 // Foo gain is used for analysis to ensure paths get processed, but are not heard
n@656 115 // because web audio will optimise and any route which does not go to the destination gets ignored.
n@656 116 this.outputGain = audioContext.createGain();
n@656 117 this.fooGain = audioContext.createGain();
n@656 118 this.fooGain.gain = 0;
n@656 119
n@656 120 // Use this to detect playback state: 0 - stopped, 1 - playing
n@656 121 this.status = 0;
n@656 122
n@656 123 // Connect both gains to output
n@656 124 this.outputGain.connect(audioContext.destination);
n@656 125 this.fooGain.connect(audioContext.destination);
n@656 126
n@673 127 // Create the timer Object
n@673 128 this.timer = new timer();
n@673 129 // Create session metrics
n@673 130 this.metric = new sessionMetrics(this);
n@673 131
n@681 132 this.loopPlayback = false;
n@681 133
n@656 134 // Create store for new audioObjects
n@656 135 this.audioObjects = [];
n@656 136
n@681 137 this.play = function(){};
n@656 138
n@681 139 this.stop = function(){};
n@656 140
n@656 141
n@656 142 this.newTrack = function(url) {
n@656 143 // Pull data from given URL into new audio buffer
n@656 144 // URLs must either be from the same source OR be setup to 'Access-Control-Allow-Origin'
n@656 145
n@656 146 // Create the audioObject with ID of the new track length;
n@673 147 audioObjectId = this.audioObjects.length;
n@656 148 this.audioObjects[audioObjectId] = new audioObject(audioObjectId);
n@656 149
n@656 150 // AudioObject will get track itself.
n@656 151 this.audioObjects[audioObjectId].constructTrack(url);
n@656 152 };
n@656 153
n@656 154 }
n@656 155
n@656 156 function audioObject(id) {
n@656 157 // The main buffer object with common control nodes to the AudioEngine
n@656 158
n@656 159 this.id = id;
n@656 160 this.state = 0; // 0 - no data, 1 - ready
n@656 161 this.url = null; // Hold the URL given for the output back to the results.
n@673 162 this.metric = new metricTracker();
n@656 163
n@656 164 // Create a buffer and external gain control to allow internal patching of effects and volume leveling.
n@681 165 this.bufferNode = undefined;
n@656 166 this.outputGain = audioContext.createGain();
n@656 167
n@656 168 // Default output gain to be zero
n@656 169 this.outputGain.gain.value = 0.0;
n@656 170
n@656 171 // Connect buffer to the audio graph
n@656 172 this.outputGain.connect(audioEngineContext.outputGain);
n@656 173
n@656 174 // the audiobuffer is not designed for multi-start playback
n@656 175 // When stopeed, the buffer node is deleted and recreated with the stored buffer.
n@656 176 this.buffer;
n@656 177
n@656 178 this.play = function(startTime) {
n@681 179 this.bufferNode = audioContext.createBufferSource();
n@681 180 this.bufferNode.connect(this.outputGain);
n@681 181 this.bufferNode.buffer = this.buffer;
n@681 182 this.bufferNode.loop = audioEngineContext.loopPlayback;
n@656 183 this.bufferNode.start(startTime);
n@656 184 };
n@656 185
n@656 186 this.stop = function() {
n@656 187 this.bufferNode.stop(0);
n@681 188 this.bufferNode = undefined;
n@656 189 };
n@656 190
n@656 191 this.constructTrack = function(url) {
n@656 192 var request = new XMLHttpRequest();
n@656 193 this.url = url;
n@656 194 request.open('GET',url,true);
n@656 195 request.responseType = 'arraybuffer';
n@656 196
n@656 197 var audioObj = this;
n@656 198
n@656 199 // Create callback to decode the data asynchronously
n@656 200 request.onloadend = function() {
n@656 201 audioContext.decodeAudioData(request.response, function(decodedData) {
n@656 202 audioObj.buffer = decodedData;
n@656 203 audioObj.state = 1;
n@656 204 }, function(){
n@656 205 // Should only be called if there was an error, but sometimes gets called continuously
n@656 206 // Check here if the error is genuine
n@656 207 if (audioObj.state == 0 || audioObj.buffer == undefined) {
n@656 208 // Genuine error
n@656 209 console.log('FATAL - Error loading buffer on '+audioObj.id);
n@656 210 }
n@656 211 });
n@656 212 };
n@656 213 request.send();
n@656 214 };
n@656 215
n@673 216 }
n@673 217
n@673 218 function timer()
n@673 219 {
n@673 220 /* Timer object used in audioEngine to keep track of session timings
n@673 221 * Uses the timer of the web audio API, so sample resolution
n@673 222 */
n@673 223 this.testStarted = false;
n@673 224 this.testStartTime = 0;
n@673 225 this.testDuration = 0;
n@673 226 this.minimumTestTime = 0; // No minimum test time
n@673 227 this.startTest = function()
n@673 228 {
n@673 229 if (this.testStarted == false)
n@673 230 {
n@673 231 this.testStartTime = audioContext.currentTime;
n@673 232 this.testStarted = true;
n@673 233 this.updateTestTime();
n@676 234 audioEngineContext.metric.initialiseTest();
n@673 235 }
n@673 236 };
n@673 237 this.stopTest = function()
n@673 238 {
n@673 239 if (this.testStarted)
n@673 240 {
n@673 241 this.testDuration = this.getTestTime();
n@673 242 this.testStarted = false;
n@673 243 } else {
n@673 244 console.log('ERR: Test tried to end before beginning');
n@673 245 }
n@673 246 };
n@673 247 this.updateTestTime = function()
n@673 248 {
n@673 249 if (this.testStarted)
n@673 250 {
n@673 251 this.testDuration = audioContext.currentTime - this.testStartTime;
n@673 252 }
n@673 253 };
n@673 254 this.getTestTime = function()
n@673 255 {
n@673 256 this.updateTestTime();
n@673 257 return this.testDuration;
n@673 258 };
n@673 259 }
n@673 260
n@673 261 function sessionMetrics(engine)
n@673 262 {
n@673 263 /* Used by audioEngine to link to audioObjects to minimise the timer call timers;
n@673 264 */
n@673 265 this.engine = engine;
n@673 266 this.lastClicked = -1;
n@673 267 this.data = -1;
n@676 268 this.initialiseTest = function(){};
n@673 269 }
n@673 270
n@673 271 function metricTracker()
n@673 272 {
n@673 273 /* Custom object to track and collect metric data
n@673 274 * Used only inside the audioObjects object.
n@673 275 */
n@673 276
n@673 277 this.listenedTimer = 0;
n@673 278 this.listenStart = 0;
n@675 279 this.initialPosition = -1;
n@673 280 this.movementTracker = [];
n@673 281 this.wasListenedTo = false;
n@673 282 this.wasMoved = false;
n@673 283 this.hasComments = false;
n@673 284
n@673 285 this.initialised = function(position)
n@673 286 {
n@675 287 if (this.initialPosition == -1) {
n@675 288 this.initialPosition = position;
n@675 289 }
n@673 290 };
n@673 291
n@673 292 this.moved = function(time,position)
n@673 293 {
n@673 294 this.wasMoved = true;
n@673 295 this.movementTracker[this.movementTracker.length] = [time, position];
n@673 296 };
n@673 297
n@673 298 this.listening = function(time)
n@673 299 {
n@673 300 if (this.listenStart == 0)
n@673 301 {
n@673 302 this.wasListenedTo = true;
n@673 303 this.listenStart = time;
n@673 304 } else {
n@673 305 this.listenedTimer += (time - this.listenStart);
n@673 306 this.listenStart = 0;
n@673 307 }
n@673 308 };
n@678 309 }
n@678 310
n@678 311 function randomiseOrder(input)
n@678 312 {
n@678 313 // This takes an array of information and randomises the order
n@678 314 var N = input.length;
n@678 315 var K = N;
n@678 316 var holdArr = [];
n@678 317 for (var n=0; n<N; n++)
n@678 318 {
n@678 319 // First pick a random number
n@678 320 var r = Math.random();
n@678 321 // Multiply and floor by the number of elements left
n@678 322 r = Math.floor(r*input.length);
n@678 323 // Pick out that element and delete from the array
n@678 324 holdArr.push(input.splice(r,1)[0]);
n@678 325 }
n@678 326 return holdArr;
n@674 327 }