n@656: /** n@656: * core.js n@656: * n@656: * Main script to run, calls all other core functions and manages loading/store to backend. n@656: * Also contains all global variables. n@656: */ n@656: n@656: n@656: /* n@656: * n@656: * WARNING!!! n@656: * n@656: * YOU ARE VIEWING THE DEV VERSION. THERE IS NO GUARANTEE THIS WILL BE FULLY FUNCTIONAL n@656: * n@656: * WARNING!!! n@656: * n@656: */ n@656: n@656: n@656: n@656: n@656: /* create the web audio API context and store in audioContext*/ n@657: var audioContext; // Hold the browser web audio API n@657: var projectXML; // Hold the parsed setup XML n@662: n@668: var testXMLSetups = []; // Hold the parsed test instances n@668: var testResultsHolders =[]; // Hold the results from each test for publishing to XML n@669: var currentTrackOrder = []; // Hold the current XML tracks in their (randomised) order n@670: var currentTestHolder; // Hold any intermediate results during test - metrics n@657: var audioEngineContext; // The custome AudioEngine object n@657: var projectReturn; // Hold the URL for the return n@657: var preTestQuestions = document.createElement('PreTest'); // Store any pre-test question response n@657: var postTestQuestions = document.createElement('PostTest'); // Store any post-test question response n@656: n@681: // Add a prototype to the bufferSourceNode to reference to the audioObject holding it n@681: AudioBufferSourceNode.prototype.owner = undefined; n@681: n@656: window.onload = function() { n@656: // Function called once the browser has loaded all files. n@656: // This should perform any initial commands such as structure / loading documents n@656: n@656: // Create a web audio API context n@656: // Fixed for cross-browser support n@656: var AudioContext = window.AudioContext || window.webkitAudioContext; n@656: audioContext = new AudioContext; n@656: n@656: // Create the audio engine object n@656: audioEngineContext = new AudioEngine(); n@656: }; n@656: n@656: function loadProjectSpec(url) { n@656: // Load the project document from the given URL, decode the XML and instruct audioEngine to get audio data n@656: // If url is null, request client to upload project XML document n@656: var r = new XMLHttpRequest(); n@656: r.open('GET',url,true); n@656: r.onload = function() { n@656: loadProjectSpecCallback(r.response); n@656: }; n@656: r.send(); n@656: }; n@656: n@656: function loadProjectSpecCallback(response) { n@656: // Function called after asynchronous download of XML project specification n@656: var decode = $.parseXML(response); n@656: projectXML = $(decode); n@656: n@656: // Now extract the setup tag n@656: var xmlSetup = projectXML.find('setup'); n@656: // Detect the interface to use and load the relevant javascripts. n@656: var interfaceType = xmlSetup[0].attributes['interface']; n@656: var interfaceJS = document.createElement('script'); n@656: interfaceJS.setAttribute("type","text/javascript"); n@656: if (interfaceType.value == 'APE') { n@656: interfaceJS.setAttribute("src","ape.js"); n@657: n@657: // APE comes with a css file n@657: var css = document.createElement('link'); n@657: css.rel = 'stylesheet'; n@657: css.type = 'text/css'; n@657: css.href = 'ape.css'; n@657: n@657: document.getElementsByTagName("head")[0].appendChild(css); n@656: } n@656: document.getElementsByTagName("head")[0].appendChild(interfaceJS); n@656: } n@656: n@656: function createProjectSave(destURL) { n@656: // Save the data from interface into XML and send to destURL n@656: // If destURL is null then download XML in client n@656: // Now time to render file locally n@656: var xmlDoc = interfaceXMLSave(); n@656: if (destURL == "null" || destURL == undefined) { n@656: var parent = document.createElement("div"); n@656: parent.appendChild(xmlDoc); n@656: var file = [parent.innerHTML]; n@656: var bb = new Blob(file,{type : 'application/xml'}); n@656: var dnlk = window.URL.createObjectURL(bb); n@656: var a = document.createElement("a"); n@656: a.hidden = ''; n@656: a.href = dnlk; n@656: a.download = "save.xml"; n@656: a.textContent = "Save File"; n@656: n@656: var submitDiv = document.getElementById('download-point'); n@656: submitDiv.appendChild(a); n@656: } n@667: return submitDiv; n@656: } n@656: n@656: function AudioEngine() { n@656: n@656: // Create two output paths, the main outputGain and fooGain. n@656: // Output gain is default to 1 and any items for playback route here n@656: // Foo gain is used for analysis to ensure paths get processed, but are not heard n@656: // because web audio will optimise and any route which does not go to the destination gets ignored. n@656: this.outputGain = audioContext.createGain(); n@656: this.fooGain = audioContext.createGain(); n@656: this.fooGain.gain = 0; n@656: n@656: // Use this to detect playback state: 0 - stopped, 1 - playing n@656: this.status = 0; n@656: n@656: // Connect both gains to output n@656: this.outputGain.connect(audioContext.destination); n@656: this.fooGain.connect(audioContext.destination); n@656: n@673: // Create the timer Object n@673: this.timer = new timer(); n@673: // Create session metrics n@673: this.metric = new sessionMetrics(this); n@673: n@681: this.loopPlayback = false; n@681: n@656: // Create store for new audioObjects n@656: this.audioObjects = []; n@656: n@681: this.play = function(){}; n@656: n@681: this.stop = function(){}; n@656: n@656: n@656: this.newTrack = function(url) { n@656: // Pull data from given URL into new audio buffer n@656: // URLs must either be from the same source OR be setup to 'Access-Control-Allow-Origin' n@656: n@656: // Create the audioObject with ID of the new track length; n@673: audioObjectId = this.audioObjects.length; n@656: this.audioObjects[audioObjectId] = new audioObject(audioObjectId); n@656: n@656: // AudioObject will get track itself. n@656: this.audioObjects[audioObjectId].constructTrack(url); n@656: }; n@656: n@656: } n@656: n@656: function audioObject(id) { n@656: // The main buffer object with common control nodes to the AudioEngine n@656: n@656: this.id = id; n@656: this.state = 0; // 0 - no data, 1 - ready n@656: this.url = null; // Hold the URL given for the output back to the results. n@673: this.metric = new metricTracker(); n@656: n@656: // Create a buffer and external gain control to allow internal patching of effects and volume leveling. n@681: this.bufferNode = undefined; n@656: this.outputGain = audioContext.createGain(); n@656: n@656: // Default output gain to be zero n@656: this.outputGain.gain.value = 0.0; n@656: n@656: // Connect buffer to the audio graph n@656: this.outputGain.connect(audioEngineContext.outputGain); n@656: n@656: // the audiobuffer is not designed for multi-start playback n@656: // When stopeed, the buffer node is deleted and recreated with the stored buffer. n@656: this.buffer; n@656: n@656: this.play = function(startTime) { n@681: this.bufferNode = audioContext.createBufferSource(); n@681: this.bufferNode.connect(this.outputGain); n@681: this.bufferNode.buffer = this.buffer; n@681: this.bufferNode.loop = audioEngineContext.loopPlayback; n@656: this.bufferNode.start(startTime); n@656: }; n@656: n@656: this.stop = function() { n@656: this.bufferNode.stop(0); n@681: this.bufferNode = undefined; n@656: }; n@656: n@656: this.constructTrack = function(url) { n@656: var request = new XMLHttpRequest(); n@656: this.url = url; n@656: request.open('GET',url,true); n@656: request.responseType = 'arraybuffer'; n@656: n@656: var audioObj = this; n@656: n@656: // Create callback to decode the data asynchronously n@656: request.onloadend = function() { n@656: audioContext.decodeAudioData(request.response, function(decodedData) { n@656: audioObj.buffer = decodedData; n@656: audioObj.state = 1; n@656: }, function(){ n@656: // Should only be called if there was an error, but sometimes gets called continuously n@656: // Check here if the error is genuine n@656: if (audioObj.state == 0 || audioObj.buffer == undefined) { n@656: // Genuine error n@656: console.log('FATAL - Error loading buffer on '+audioObj.id); n@656: } n@656: }); n@656: }; n@656: request.send(); n@656: }; n@656: n@673: } n@673: n@673: function timer() n@673: { n@673: /* Timer object used in audioEngine to keep track of session timings n@673: * Uses the timer of the web audio API, so sample resolution n@673: */ n@673: this.testStarted = false; n@673: this.testStartTime = 0; n@673: this.testDuration = 0; n@673: this.minimumTestTime = 0; // No minimum test time n@673: this.startTest = function() n@673: { n@673: if (this.testStarted == false) n@673: { n@673: this.testStartTime = audioContext.currentTime; n@673: this.testStarted = true; n@673: this.updateTestTime(); n@676: audioEngineContext.metric.initialiseTest(); n@673: } n@673: }; n@673: this.stopTest = function() n@673: { n@673: if (this.testStarted) n@673: { n@673: this.testDuration = this.getTestTime(); n@673: this.testStarted = false; n@673: } else { n@673: console.log('ERR: Test tried to end before beginning'); n@673: } n@673: }; n@673: this.updateTestTime = function() n@673: { n@673: if (this.testStarted) n@673: { n@673: this.testDuration = audioContext.currentTime - this.testStartTime; n@673: } n@673: }; n@673: this.getTestTime = function() n@673: { n@673: this.updateTestTime(); n@673: return this.testDuration; n@673: }; n@673: } n@673: n@673: function sessionMetrics(engine) n@673: { n@673: /* Used by audioEngine to link to audioObjects to minimise the timer call timers; n@673: */ n@673: this.engine = engine; n@673: this.lastClicked = -1; n@673: this.data = -1; n@676: this.initialiseTest = function(){}; n@673: } n@673: n@673: function metricTracker() n@673: { n@673: /* Custom object to track and collect metric data n@673: * Used only inside the audioObjects object. n@673: */ n@673: n@673: this.listenedTimer = 0; n@673: this.listenStart = 0; n@675: this.initialPosition = -1; n@673: this.movementTracker = []; n@673: this.wasListenedTo = false; n@673: this.wasMoved = false; n@673: this.hasComments = false; n@673: n@673: this.initialised = function(position) n@673: { n@675: if (this.initialPosition == -1) { n@675: this.initialPosition = position; n@675: } n@673: }; n@673: n@673: this.moved = function(time,position) n@673: { n@673: this.wasMoved = true; n@673: this.movementTracker[this.movementTracker.length] = [time, position]; n@673: }; n@673: n@673: this.listening = function(time) n@673: { n@673: if (this.listenStart == 0) n@673: { n@673: this.wasListenedTo = true; n@673: this.listenStart = time; n@673: } else { n@673: this.listenedTimer += (time - this.listenStart); n@673: this.listenStart = 0; n@673: } n@673: }; n@678: } n@678: n@678: function randomiseOrder(input) n@678: { n@678: // This takes an array of information and randomises the order n@678: var N = input.length; n@678: var K = N; n@678: var holdArr = []; n@678: for (var n=0; n