nicholas@1: /** nicholas@1: * core.js nicholas@1: * nicholas@1: * Main script to run, calls all other core functions and manages loading/store to backend. nicholas@1: * Also contains all global variables. nicholas@1: */ nicholas@1: nicholas@1: /* create the web audio API context and store in audioContext*/ nicholas@1: var audioContext; nicholas@2: var projectXML; nicholas@1: var audioEngineContext; nicholas@1: nicholas@1: window.onload = function() { nicholas@1: // Function called once the browser has loaded all files. nicholas@1: // This should perform any initial commands such as structure / loading documents nicholas@1: nicholas@1: // Create a web audio API context nicholas@1: // NORE: Currently this will only work with webkit browsers (Chrome/Safari)! nicholas@1: audioContext = new webkitAudioContext; nicholas@1: nicholas@1: // Create the audio engine object nicholas@1: audioEngineContext = new AudioEngine(); nicholas@1: } nicholas@1: nicholas@1: function loadProjectSpec(url) { nicholas@1: // Load the project document from the given URL, decode the XML and instruct audioEngine to get audio data nicholas@1: // If url is null, request client to upload project XML document nicholas@2: var r = new XMLHttpRequest(); nicholas@2: r.open('GET',url,true); nicholas@2: r.onload = function() { nicholas@2: loadProjectSpecCallback(r.response); nicholas@2: } nicholas@2: r.send(); nicholas@2: } nicholas@2: nicholas@2: function loadProjectSpecCallback(response) { nicholas@2: // Function called after asynchronous download of XML project specification nicholas@2: var decode = $.parseXML(response); nicholas@2: projectXML = $(decode); nicholas@2: nicholas@2: // Now extract the setup tag nicholas@2: var xmlSetup = projectXML.find('setup'); nicholas@2: var interfaceType = xmlSetup[0].attributes['interface']; nicholas@2: var interfaceJS = document.createElement('script'); nicholas@2: interfaceJS.setAttribute("type","text/javascript"); nicholas@2: if (interfaceType.value == 'APE') { nicholas@2: interfaceJS.setAttribute("src","ape.js"); nicholas@2: } nicholas@2: document.getElementsByTagName("head")[0].appendChild(interfaceJS); nicholas@1: } nicholas@1: nicholas@1: function createProjectSave(destURL) { nicholas@1: // Save the data from interface into XML and send to destURL nicholas@1: // If destURL is null then download XML in client nicholas@1: } nicholas@1: nicholas@1: function AudioEngine() { nicholas@1: nicholas@1: // Create two output paths, the main outputGain and fooGain. nicholas@1: // Output gain is default to 1 and any items for playback route here nicholas@1: // Foo gain is used for analysis to ensure paths get processed, but are not heard nicholas@1: // because web audio will optimise and any route which does not go to the destination gets ignored. nicholas@1: this.outputGain = audioContext.createGain(); nicholas@1: this.fooGain = audioContext.createGain(); nicholas@1: this.fooGain.gain = 0; nicholas@1: nicholas@1: // Connect both gains to output nicholas@1: this.outputGain.connect(audioContext.destination); nicholas@1: this.fooGain.connect(audioContext.destination); nicholas@1: nicholas@1: // Create store for new audioObjects nicholas@1: this.audioObjects = []; nicholas@1: nicholas@1: this.play = function() { nicholas@1: // Send play command to all playback buffers for synchronised start nicholas@1: // Also start timer callbacks to detect if playback has finished nicholas@1: } nicholas@1: nicholas@1: this.stop = function() { nicholas@1: // Send stop and reset command to all playback buffers nicholas@1: } nicholas@1: nicholas@1: this.newTrack = function(url) { nicholas@1: // Pull data from given URL into new audio buffer nicholas@1: // URLs must either be from the same source OR be setup to 'Access-Control-Allow-Origin' nicholas@1: var request = new XMLHttpRequest(); nicholas@1: request.open('GET',url,true); nicholas@1: request.responseType = 'arraybuffer'; nicholas@1: // Create the audioObject with ID of the new track length; nicholas@1: audioObjectId = this.audioObjects.length nicholas@1: this.audioObjects[audioObjectId] = new audioObject(audioObjectId); nicholas@1: nicholas@1: // Create callback to decode the data asynchronously nicholas@1: request.onload = function() { nicholas@1: audioContext.decodeAudioData(request.response, function(decodedData) { nicholas@1: audioObj = audioEngineContext.audioObjects[audioObjectId]; nicholas@1: audioObj.buffer = decodedData; nicholas@1: audioObj.bufferNode.buffer = audioObj.buffer; nicholas@1: audioObj.state = 1; nicholas@1: }, console.log("Err - Buffer not added to " + audioObjectId)); nicholas@1: } nicholas@1: request.send(); nicholas@1: } nicholas@1: nicholas@1: } nicholas@1: nicholas@1: function audioObject(id) { nicholas@1: // The main buffer object with common control nodes to the AudioEngine nicholas@1: nicholas@1: this.id = id; nicholas@1: this.state = 0; // 0 - no data, 1 - ready nicholas@1: nicholas@1: // Create a buffer and external gain control to allow internal patching of effects and volume leveling. nicholas@1: this.bufferNode = audioContext.createBufferSource(); nicholas@1: this.outputGain = audioContext.createGain(); nicholas@1: nicholas@1: // Connect buffer to the audio graph nicholas@1: this.bufferNode.connect(this.outputGain); nicholas@1: this.outputGain.connect(audioEngineContext.outputGain); nicholas@1: nicholas@1: // the audiobuffer is not designed for multi-start playback nicholas@1: // When stopeed, the buffer node is deleted and recreated with the stored buffer. nicholas@1: this.buffer; nicholas@1: nicholas@1: this.play = function(startTime) { nicholas@1: this.bufferNode.start(startTime); nicholas@1: } nicholas@1: nicholas@1: this.stop = function() { nicholas@1: this.bufferNode.stop(0); nicholas@1: this.bufferNode = audioContext.createBufferSource(); nicholas@1: this.bufferNode.connect(this.outputGain); nicholas@1: this.bufferNode.buffer = this.buffer; nicholas@1: } nicholas@1: nicholas@1: }