nickjillings@1682: /** nickjillings@1682: * core.js nickjillings@1682: * nickjillings@1682: * Main script to run, calls all other core functions and manages loading/store to backend. nickjillings@1682: * Also contains all global variables. nickjillings@1682: */ nickjillings@1682: nickjillings@1682: /* create the web audio API context and store in audioContext*/ nickjillings@1682: var audioContext; nickjillings@1683: var projectXML; nickjillings@1682: var audioEngineContext; nickjillings@1682: nickjillings@1682: window.onload = function() { nickjillings@1682: // Function called once the browser has loaded all files. nickjillings@1682: // This should perform any initial commands such as structure / loading documents nickjillings@1682: nickjillings@1682: // Create a web audio API context nickjillings@1682: // NORE: Currently this will only work with webkit browsers (Chrome/Safari)! nickjillings@1682: audioContext = new webkitAudioContext; nickjillings@1682: nickjillings@1682: // Create the audio engine object nickjillings@1682: audioEngineContext = new AudioEngine(); nickjillings@1682: } nickjillings@1682: nickjillings@1682: function loadProjectSpec(url) { nickjillings@1682: // Load the project document from the given URL, decode the XML and instruct audioEngine to get audio data nickjillings@1682: // If url is null, request client to upload project XML document nickjillings@1683: var r = new XMLHttpRequest(); nickjillings@1683: r.open('GET',url,true); nickjillings@1683: r.onload = function() { nickjillings@1683: loadProjectSpecCallback(r.response); nickjillings@1683: } nickjillings@1683: r.send(); nickjillings@1683: } nickjillings@1683: nickjillings@1683: function loadProjectSpecCallback(response) { nickjillings@1683: // Function called after asynchronous download of XML project specification nickjillings@1683: var decode = $.parseXML(response); nickjillings@1683: projectXML = $(decode); nickjillings@1683: nickjillings@1683: // Now extract the setup tag nickjillings@1683: var xmlSetup = projectXML.find('setup'); nickjillings@1683: var interfaceType = xmlSetup[0].attributes['interface']; nickjillings@1683: var interfaceJS = document.createElement('script'); nickjillings@1683: interfaceJS.setAttribute("type","text/javascript"); nickjillings@1683: if (interfaceType.value == 'APE') { nickjillings@1683: interfaceJS.setAttribute("src","ape.js"); nickjillings@1683: } nickjillings@1683: document.getElementsByTagName("head")[0].appendChild(interfaceJS); nickjillings@1682: } nickjillings@1682: nickjillings@1682: function createProjectSave(destURL) { nickjillings@1682: // Save the data from interface into XML and send to destURL nickjillings@1682: // If destURL is null then download XML in client nickjillings@1682: } nickjillings@1682: nickjillings@1682: function AudioEngine() { nickjillings@1682: nickjillings@1682: // Create two output paths, the main outputGain and fooGain. nickjillings@1682: // Output gain is default to 1 and any items for playback route here nickjillings@1682: // Foo gain is used for analysis to ensure paths get processed, but are not heard nickjillings@1682: // because web audio will optimise and any route which does not go to the destination gets ignored. nickjillings@1682: this.outputGain = audioContext.createGain(); nickjillings@1682: this.fooGain = audioContext.createGain(); nickjillings@1682: this.fooGain.gain = 0; nickjillings@1682: nickjillings@1682: // Connect both gains to output nickjillings@1682: this.outputGain.connect(audioContext.destination); nickjillings@1682: this.fooGain.connect(audioContext.destination); nickjillings@1682: nickjillings@1682: // Create store for new audioObjects nickjillings@1682: this.audioObjects = []; nickjillings@1682: nickjillings@1682: this.play = function() { nickjillings@1682: // Send play command to all playback buffers for synchronised start nickjillings@1682: // Also start timer callbacks to detect if playback has finished nickjillings@1682: } nickjillings@1682: nickjillings@1682: this.stop = function() { nickjillings@1682: // Send stop and reset command to all playback buffers nickjillings@1682: } nickjillings@1682: nickjillings@1682: this.newTrack = function(url) { nickjillings@1682: // Pull data from given URL into new audio buffer nickjillings@1682: // URLs must either be from the same source OR be setup to 'Access-Control-Allow-Origin' nickjillings@1682: var request = new XMLHttpRequest(); nickjillings@1682: request.open('GET',url,true); nickjillings@1682: request.responseType = 'arraybuffer'; nickjillings@1682: // Create the audioObject with ID of the new track length; nickjillings@1682: audioObjectId = this.audioObjects.length nickjillings@1682: this.audioObjects[audioObjectId] = new audioObject(audioObjectId); nickjillings@1682: nickjillings@1682: // Create callback to decode the data asynchronously nickjillings@1682: request.onload = function() { nickjillings@1682: audioContext.decodeAudioData(request.response, function(decodedData) { nickjillings@1682: audioObj = audioEngineContext.audioObjects[audioObjectId]; nickjillings@1682: audioObj.buffer = decodedData; nickjillings@1682: audioObj.bufferNode.buffer = audioObj.buffer; nickjillings@1682: audioObj.state = 1; nickjillings@1682: }, console.log("Err - Buffer not added to " + audioObjectId)); nickjillings@1682: } nickjillings@1682: request.send(); nickjillings@1682: } nickjillings@1682: nickjillings@1682: } nickjillings@1682: nickjillings@1682: function audioObject(id) { nickjillings@1682: // The main buffer object with common control nodes to the AudioEngine nickjillings@1682: nickjillings@1682: this.id = id; nickjillings@1682: this.state = 0; // 0 - no data, 1 - ready nickjillings@1682: nickjillings@1682: // Create a buffer and external gain control to allow internal patching of effects and volume leveling. nickjillings@1682: this.bufferNode = audioContext.createBufferSource(); nickjillings@1682: this.outputGain = audioContext.createGain(); nickjillings@1682: nickjillings@1682: // Connect buffer to the audio graph nickjillings@1682: this.bufferNode.connect(this.outputGain); nickjillings@1682: this.outputGain.connect(audioEngineContext.outputGain); nickjillings@1682: nickjillings@1682: // the audiobuffer is not designed for multi-start playback nickjillings@1682: // When stopeed, the buffer node is deleted and recreated with the stored buffer. nickjillings@1682: this.buffer; nickjillings@1682: nickjillings@1682: this.play = function(startTime) { nickjillings@1682: this.bufferNode.start(startTime); nickjillings@1682: } nickjillings@1682: nickjillings@1682: this.stop = function() { nickjillings@1682: this.bufferNode.stop(0); nickjillings@1682: this.bufferNode = audioContext.createBufferSource(); nickjillings@1682: this.bufferNode.connect(this.outputGain); nickjillings@1682: this.bufferNode.buffer = this.buffer; nickjillings@1682: } nickjillings@1682: nickjillings@1682: }