djmoffat@693: /** djmoffat@693: * core.js djmoffat@693: * djmoffat@693: * Main script to run, calls all other core functions and manages loading/store to backend. djmoffat@693: * Also contains all global variables. djmoffat@693: */ djmoffat@693: djmoffat@693: /* create the web audio API context and store in audioContext*/ djmoffat@693: var audioContext; djmoffat@693: var projectXML; djmoffat@693: var audioEngineContext; djmoffat@693: var projectReturn; djmoffat@694: var preTestQuestions = document.createElement('PreTest'); djmoffat@694: var postTestQuestions = document.createElement('PostTest'); djmoffat@693: djmoffat@693: window.onload = function() { djmoffat@693: // Function called once the browser has loaded all files. djmoffat@693: // This should perform any initial commands such as structure / loading documents djmoffat@693: djmoffat@693: // Create a web audio API context djmoffat@694: // Fixed for cross-browser support djmoffat@694: var AudioContext = window.AudioContext || window.webkitAudioContext; djmoffat@693: audioContext = new AudioContext; djmoffat@693: djmoffat@693: // Create the audio engine object djmoffat@693: audioEngineContext = new AudioEngine(); djmoffat@693: }; djmoffat@693: djmoffat@693: function loadProjectSpec(url) { djmoffat@693: // Load the project document from the given URL, decode the XML and instruct audioEngine to get audio data djmoffat@693: // If url is null, request client to upload project XML document djmoffat@693: var r = new XMLHttpRequest(); djmoffat@693: r.open('GET',url,true); djmoffat@693: r.onload = function() { djmoffat@693: loadProjectSpecCallback(r.response); djmoffat@693: }; djmoffat@693: r.send(); djmoffat@693: }; djmoffat@693: djmoffat@693: function loadProjectSpecCallback(response) { djmoffat@693: // Function called after asynchronous download of XML project specification djmoffat@693: var decode = $.parseXML(response); djmoffat@693: projectXML = $(decode); djmoffat@693: djmoffat@693: // Now extract the setup tag djmoffat@693: var xmlSetup = projectXML.find('setup'); djmoffat@693: // Detect the interface to use and load the relevant javascripts. djmoffat@693: var interfaceType = xmlSetup[0].attributes['interface']; djmoffat@693: var interfaceJS = document.createElement('script'); djmoffat@693: interfaceJS.setAttribute("type","text/javascript"); djmoffat@693: if (interfaceType.value == 'APE') { djmoffat@693: interfaceJS.setAttribute("src","ape.js"); djmoffat@693: } djmoffat@693: document.getElementsByTagName("head")[0].appendChild(interfaceJS); djmoffat@693: } djmoffat@693: djmoffat@693: function createProjectSave(destURL) { djmoffat@693: // Save the data from interface into XML and send to destURL djmoffat@693: // If destURL is null then download XML in client djmoffat@693: // Now time to render file locally djmoffat@693: var xmlDoc = interfaceXMLSave(); djmoffat@693: if (destURL == "null" || destURL == undefined) { djmoffat@693: var parent = document.createElement("div"); djmoffat@693: parent.appendChild(xmlDoc); djmoffat@693: var file = [parent.innerHTML]; djmoffat@693: var bb = new Blob(file,{type : 'application/xml'}); djmoffat@693: var dnlk = window.URL.createObjectURL(bb); djmoffat@693: var a = document.createElement("a"); djmoffat@693: a.hidden = ''; djmoffat@693: a.href = dnlk; djmoffat@693: a.download = "save.xml"; djmoffat@693: a.textContent = "Save File"; djmoffat@693: djmoffat@693: var submitDiv = document.getElementById('download-point'); djmoffat@693: submitDiv.appendChild(a); djmoffat@693: } djmoffat@693: } djmoffat@693: djmoffat@693: function AudioEngine() { djmoffat@693: djmoffat@693: // Create two output paths, the main outputGain and fooGain. djmoffat@693: // Output gain is default to 1 and any items for playback route here djmoffat@693: // Foo gain is used for analysis to ensure paths get processed, but are not heard djmoffat@693: // because web audio will optimise and any route which does not go to the destination gets ignored. djmoffat@693: this.outputGain = audioContext.createGain(); djmoffat@693: this.fooGain = audioContext.createGain(); djmoffat@693: this.fooGain.gain = 0; djmoffat@693: djmoffat@693: // Use this to detect playback state: 0 - stopped, 1 - playing djmoffat@693: this.status = 0; djmoffat@693: djmoffat@693: // Connect both gains to output djmoffat@693: this.outputGain.connect(audioContext.destination); djmoffat@693: this.fooGain.connect(audioContext.destination); djmoffat@693: djmoffat@693: // Create store for new audioObjects djmoffat@693: this.audioObjects = []; djmoffat@693: djmoffat@693: this.play = function() { djmoffat@693: // Send play command to all playback buffers for synchronised start djmoffat@693: // Also start timer callbacks to detect if playback has finished djmoffat@693: if (this.status == 0) { djmoffat@693: // First get current clock djmoffat@693: var timer = audioContext.currentTime; djmoffat@693: // Add 3 seconds djmoffat@693: timer += 3.0; djmoffat@693: djmoffat@693: // Send play to all tracks djmoffat@693: for (var i=0; i