nickjillings@1642: /** nickjillings@1642: * core.js nickjillings@1642: * nickjillings@1642: * Main script to run, calls all other core functions and manages loading/store to backend. nickjillings@1642: * Also contains all global variables. nickjillings@1642: */ nickjillings@1642: nickjillings@1642: nickjillings@1642: /* nickjillings@1642: * nickjillings@1642: * WARNING!!! nickjillings@1642: * nickjillings@1642: * YOU ARE VIEWING THE DEV VERSION. THERE IS NO GUARANTEE THIS WILL BE FULLY FUNCTIONAL nickjillings@1642: * nickjillings@1642: * WARNING!!! nickjillings@1642: * nickjillings@1642: */ nickjillings@1642: nickjillings@1642: nickjillings@1642: nickjillings@1642: nickjillings@1642: /* create the web audio API context and store in audioContext*/ nickjillings@1643: var audioContext; // Hold the browser web audio API nickjillings@1643: var projectXML; // Hold the parsed setup XML nickjillings@1648: nickjillings@1654: var testXMLSetups = []; // Hold the parsed test instances nickjillings@1654: var testResultsHolders =[]; // Hold the results from each test for publishing to XML nickjillings@1655: var currentTrackOrder = []; // Hold the current XML tracks in their (randomised) order nickjillings@1656: var currentTestHolder; // Hold any intermediate results during test - metrics nickjillings@1643: var audioEngineContext; // The custome AudioEngine object nickjillings@1643: var projectReturn; // Hold the URL for the return nickjillings@1643: var preTestQuestions = document.createElement('PreTest'); // Store any pre-test question response nickjillings@1643: var postTestQuestions = document.createElement('PostTest'); // Store any post-test question response nickjillings@1642: nickjillings@1642: window.onload = function() { nickjillings@1642: // Function called once the browser has loaded all files. nickjillings@1642: // This should perform any initial commands such as structure / loading documents nickjillings@1642: nickjillings@1642: // Create a web audio API context nickjillings@1642: // Fixed for cross-browser support nickjillings@1642: var AudioContext = window.AudioContext || window.webkitAudioContext; nickjillings@1642: audioContext = new AudioContext; nickjillings@1642: nickjillings@1642: // Create the audio engine object nickjillings@1642: audioEngineContext = new AudioEngine(); nickjillings@1642: }; nickjillings@1642: nickjillings@1642: function loadProjectSpec(url) { nickjillings@1642: // Load the project document from the given URL, decode the XML and instruct audioEngine to get audio data nickjillings@1642: // If url is null, request client to upload project XML document nickjillings@1642: var r = new XMLHttpRequest(); nickjillings@1642: r.open('GET',url,true); nickjillings@1642: r.onload = function() { nickjillings@1642: loadProjectSpecCallback(r.response); nickjillings@1642: }; nickjillings@1642: r.send(); nickjillings@1642: }; nickjillings@1642: nickjillings@1642: function loadProjectSpecCallback(response) { nickjillings@1642: // Function called after asynchronous download of XML project specification nickjillings@1642: var decode = $.parseXML(response); nickjillings@1642: projectXML = $(decode); nickjillings@1642: nickjillings@1642: // Now extract the setup tag nickjillings@1642: var xmlSetup = projectXML.find('setup'); nickjillings@1642: // Detect the interface to use and load the relevant javascripts. nickjillings@1642: var interfaceType = xmlSetup[0].attributes['interface']; nickjillings@1642: var interfaceJS = document.createElement('script'); nickjillings@1642: interfaceJS.setAttribute("type","text/javascript"); nickjillings@1642: if (interfaceType.value == 'APE') { nickjillings@1642: interfaceJS.setAttribute("src","ape.js"); nickjillings@1643: nickjillings@1643: // APE comes with a css file nickjillings@1643: var css = document.createElement('link'); nickjillings@1643: css.rel = 'stylesheet'; nickjillings@1643: css.type = 'text/css'; nickjillings@1643: css.href = 'ape.css'; nickjillings@1643: nickjillings@1643: document.getElementsByTagName("head")[0].appendChild(css); nickjillings@1642: } nickjillings@1642: document.getElementsByTagName("head")[0].appendChild(interfaceJS); nickjillings@1642: } nickjillings@1642: nickjillings@1642: function createProjectSave(destURL) { nickjillings@1642: // Save the data from interface into XML and send to destURL nickjillings@1642: // If destURL is null then download XML in client nickjillings@1642: // Now time to render file locally nickjillings@1642: var xmlDoc = interfaceXMLSave(); nickjillings@1642: if (destURL == "null" || destURL == undefined) { nickjillings@1642: var parent = document.createElement("div"); nickjillings@1642: parent.appendChild(xmlDoc); nickjillings@1642: var file = [parent.innerHTML]; nickjillings@1642: var bb = new Blob(file,{type : 'application/xml'}); nickjillings@1642: var dnlk = window.URL.createObjectURL(bb); nickjillings@1642: var a = document.createElement("a"); nickjillings@1642: a.hidden = ''; nickjillings@1642: a.href = dnlk; nickjillings@1642: a.download = "save.xml"; nickjillings@1642: a.textContent = "Save File"; nickjillings@1642: nickjillings@1642: var submitDiv = document.getElementById('download-point'); nickjillings@1642: submitDiv.appendChild(a); nickjillings@1642: } nickjillings@1653: return submitDiv; nickjillings@1642: } nickjillings@1642: nickjillings@1642: function AudioEngine() { nickjillings@1642: nickjillings@1642: // Create two output paths, the main outputGain and fooGain. nickjillings@1642: // Output gain is default to 1 and any items for playback route here nickjillings@1642: // Foo gain is used for analysis to ensure paths get processed, but are not heard nickjillings@1642: // because web audio will optimise and any route which does not go to the destination gets ignored. nickjillings@1642: this.outputGain = audioContext.createGain(); nickjillings@1642: this.fooGain = audioContext.createGain(); nickjillings@1642: this.fooGain.gain = 0; nickjillings@1642: nickjillings@1642: // Use this to detect playback state: 0 - stopped, 1 - playing nickjillings@1642: this.status = 0; nickjillings@1642: nickjillings@1642: // Connect both gains to output nickjillings@1642: this.outputGain.connect(audioContext.destination); nickjillings@1642: this.fooGain.connect(audioContext.destination); nickjillings@1642: nickjillings@1659: // Create the timer Object nickjillings@1659: this.timer = new timer(); nickjillings@1659: // Create session metrics nickjillings@1659: this.metric = new sessionMetrics(this); nickjillings@1659: nickjillings@1642: // Create store for new audioObjects nickjillings@1642: this.audioObjects = []; nickjillings@1642: nickjillings@1642: this.play = function() { nickjillings@1642: // Send play command to all playback buffers for synchronised start nickjillings@1642: // Also start timer callbacks to detect if playback has finished nickjillings@1642: if (this.status == 0) { nickjillings@1659: this.timer.startTest(); nickjillings@1642: // First get current clock nickjillings@1642: var timer = audioContext.currentTime; nickjillings@1642: // Add 3 seconds nickjillings@1642: timer += 3.0; nickjillings@1642: nickjillings@1642: // Send play to all tracks nickjillings@1642: for (var i=0; i