Mercurial > hg > webaudioevaluationtool
comparison core.js @ 1410:c0098cee84f2
Buffers loaded into a pool and picked when needed.
author | Nicholas Jillings <nickjillings@users.noreply.github.com> |
---|---|
date | Mon, 07 Dec 2015 18:26:12 +0000 |
parents | 8e19255b85b3 |
children | 67c6048d920f |
comparison
equal
deleted
inserted
replaced
1409:6d5977ba2724 | 1410:c0098cee84f2 |
---|---|
38 // Create the specification object | 38 // Create the specification object |
39 specification = new Specification(); | 39 specification = new Specification(); |
40 | 40 |
41 // Create the interface object | 41 // Create the interface object |
42 interfaceContext = new Interface(specification); | 42 interfaceContext = new Interface(specification); |
43 // Define window callbacks for interface | |
44 window.onresize = function(event){interfaceContext.resizeWindow(event);}; | |
43 }; | 45 }; |
44 | 46 |
45 function loadProjectSpec(url) { | 47 function loadProjectSpec(url) { |
46 // Load the project document from the given URL, decode the XML and instruct audioEngine to get audio data | 48 // Load the project document from the given URL, decode the XML and instruct audioEngine to get audio data |
47 // If url is null, request client to upload project XML document | 49 // If url is null, request client to upload project XML document |
62 projectXML = parse.parseFromString(response,'text/xml'); | 64 projectXML = parse.parseFromString(response,'text/xml'); |
63 | 65 |
64 // Build the specification | 66 // Build the specification |
65 specification.decode(projectXML); | 67 specification.decode(projectXML); |
66 | 68 |
67 // Create the audio engine object | |
68 audioEngineContext = new AudioEngine(specification); | |
69 | |
70 testState.stateMap.push(specification.preTest); | |
71 | |
72 $(specification.audioHolders).each(function(index,elem){ | |
73 testState.stateMap.push(elem); | |
74 }); | |
75 | |
76 testState.stateMap.push(specification.postTest); | |
77 | |
78 | |
79 | |
80 // Detect the interface to use and load the relevant javascripts. | 69 // Detect the interface to use and load the relevant javascripts. |
81 var interfaceJS = document.createElement('script'); | 70 var interfaceJS = document.createElement('script'); |
82 interfaceJS.setAttribute("type","text/javascript"); | 71 interfaceJS.setAttribute("type","text/javascript"); |
83 if (specification.interfaceType == 'APE') { | 72 if (specification.interfaceType == 'APE') { |
84 interfaceJS.setAttribute("src","ape.js"); | 73 interfaceJS.setAttribute("src","ape.js"); |
102 | 91 |
103 document.getElementsByTagName("head")[0].appendChild(css); | 92 document.getElementsByTagName("head")[0].appendChild(css); |
104 } | 93 } |
105 document.getElementsByTagName("head")[0].appendChild(interfaceJS); | 94 document.getElementsByTagName("head")[0].appendChild(interfaceJS); |
106 | 95 |
107 // Define window callbacks for interface | 96 // Create the audio engine object |
108 window.onresize = function(event){interfaceContext.resizeWindow(event);}; | 97 audioEngineContext = new AudioEngine(specification); |
98 | |
99 testState.stateMap.push(specification.preTest); | |
100 | |
101 $(specification.audioHolders).each(function(index,elem){ | |
102 testState.stateMap.push(elem); | |
103 $(elem.audioElements).each(function(i,audioElem){ | |
104 var URL = audioElem.parent.hostURL + audioElem.url; | |
105 var buffer = null; | |
106 for (var i=0; i<audioEngineContext.buffers.length; i++) | |
107 { | |
108 if (URL == audioEngineContext.buffers[i].url) | |
109 { | |
110 buffer = audioEngineContext.buffers[i]; | |
111 break; | |
112 } | |
113 } | |
114 if (buffer == null) | |
115 { | |
116 buffer = new audioEngineContext.bufferObj(URL); | |
117 audioEngineContext.buffers.push(buffer); | |
118 } | |
119 }); | |
120 }); | |
121 | |
122 testState.stateMap.push(specification.postTest); | |
109 } | 123 } |
110 | 124 |
111 function createProjectSave(destURL) { | 125 function createProjectSave(destURL) { |
112 // Save the data from interface into XML and send to destURL | 126 // Save the data from interface into XML and send to destURL |
113 // If destURL is null then download XML in client | 127 // If destURL is null then download XML in client |
701 this.loopPlayback = false; | 715 this.loopPlayback = false; |
702 | 716 |
703 // Create store for new audioObjects | 717 // Create store for new audioObjects |
704 this.audioObjects = []; | 718 this.audioObjects = []; |
705 | 719 |
720 this.buffers = []; | |
721 this.bufferObj = function(url) | |
722 { | |
723 this.url = url; | |
724 this.buffer = null; | |
725 this.xmlRequest = new XMLHttpRequest(); | |
726 this.users = []; | |
727 this.xmlRequest.open('GET',this.url,true); | |
728 this.xmlRequest.responseType = 'arraybuffer'; | |
729 | |
730 var bufferObj = this; | |
731 | |
732 // Create callback to decode the data asynchronously | |
733 this.xmlRequest.onloadend = function() { | |
734 audioContext.decodeAudioData(bufferObj.xmlRequest.response, function(decodedData) { | |
735 bufferObj.buffer = decodedData; | |
736 for (var i=0; i<bufferObj.users.length; i++) | |
737 { | |
738 bufferObj.users[i].state = 1; | |
739 if (bufferObj.users[i].interfaceDOM != null) | |
740 { | |
741 bufferObj.users[i].interfaceDOM.enable(); | |
742 } | |
743 } | |
744 }, function(){ | |
745 // Should only be called if there was an error, but sometimes gets called continuously | |
746 // Check here if the error is genuine | |
747 if (bufferObj.buffer == undefined) { | |
748 // Genuine error | |
749 console.log('FATAL - Error loading buffer on '+audioObj.id); | |
750 if (request.status == 404) | |
751 { | |
752 console.log('FATAL - Fragment '+audioObj.id+' 404 error'); | |
753 console.log('URL: '+audioObj.url); | |
754 errorSessionDump('Fragment '+audioObj.id+' 404 error'); | |
755 } | |
756 } | |
757 }); | |
758 }; | |
759 this.xmlRequest.send(); | |
760 }; | |
761 | |
706 this.play = function(id) { | 762 this.play = function(id) { |
707 // Start the timer and set the audioEngine state to playing (1) | 763 // Start the timer and set the audioEngine state to playing (1) |
708 if (this.status == 0 && this.loopPlayback) { | 764 if (this.status == 0 && this.loopPlayback) { |
709 // Check if all audioObjects are ready | 765 // Check if all audioObjects are ready |
710 if(this.checkAllReady()) | 766 if(this.checkAllReady()) |
770 | 826 |
771 // Create the audioObject with ID of the new track length; | 827 // Create the audioObject with ID of the new track length; |
772 audioObjectId = this.audioObjects.length; | 828 audioObjectId = this.audioObjects.length; |
773 this.audioObjects[audioObjectId] = new audioObject(audioObjectId); | 829 this.audioObjects[audioObjectId] = new audioObject(audioObjectId); |
774 | 830 |
775 // AudioObject will get track itself. | 831 // Check if audioObject buffer is currently stored by full URL |
832 var URL = element.parent.hostURL + element.url; | |
833 var buffer = null; | |
834 for (var i=0; i<this.buffers.length; i++) | |
835 { | |
836 if (URL == this.buffers[i].url) | |
837 { | |
838 buffer = this.buffers[i]; | |
839 break; | |
840 } | |
841 } | |
842 if (buffer == null) | |
843 { | |
844 console.log("[WARN]: Buffer was not loaded in pre-test!"); | |
845 buffer = new this.bufferObj(URL); | |
846 this.buffers.push(buffer); | |
847 } | |
776 this.audioObjects[audioObjectId].specification = element; | 848 this.audioObjects[audioObjectId].specification = element; |
777 this.audioObjects[audioObjectId].constructTrack(element.parent.hostURL + element.url); | 849 this.audioObjects[audioObjectId].buffer = buffer; |
850 if (buffer.buffer != null) | |
851 { | |
852 this.audioObjects[audioObjectId].state = 1; | |
853 } | |
854 buffer.users.push(this.audioObjects[audioObjectId]); | |
778 return this.audioObjects[audioObjectId]; | 855 return this.audioObjects[audioObjectId]; |
779 }; | 856 }; |
780 | 857 |
781 this.newTestPage = function() { | 858 this.newTestPage = function() { |
782 this.state = 0; | 859 this.state = 0; |
783 this.audioObjectsReady = false; | 860 this.audioObjectsReady = false; |
784 this.metric.reset(); | 861 this.metric.reset(); |
862 for (var i=0; i < this.buffers.length; i++) | |
863 { | |
864 this.buffers[i].users = []; | |
865 } | |
785 this.audioObjects = []; | 866 this.audioObjects = []; |
786 }; | 867 }; |
787 | 868 |
788 this.checkAllPlayed = function() { | 869 this.checkAllPlayed = function() { |
789 arr = []; | 870 arr = []; |
883 this.metric.stopListening(audioEngineContext.timer.getTestTime()); | 964 this.metric.stopListening(audioEngineContext.timer.getTestTime()); |
884 } | 965 } |
885 }; | 966 }; |
886 | 967 |
887 this.play = function(startTime) { | 968 this.play = function(startTime) { |
888 if (this.bufferNode == undefined) { | 969 if (this.bufferNode == undefined && this.buffer.buffer != undefined) { |
889 this.bufferNode = audioContext.createBufferSource(); | 970 this.bufferNode = audioContext.createBufferSource(); |
890 this.bufferNode.owner = this; | 971 this.bufferNode.owner = this; |
891 this.bufferNode.connect(this.outputGain); | 972 this.bufferNode.connect(this.outputGain); |
892 this.bufferNode.buffer = this.buffer; | 973 this.bufferNode.buffer = this.buffer.buffer; |
893 this.bufferNode.loop = audioEngineContext.loopPlayback; | 974 this.bufferNode.loop = audioEngineContext.loopPlayback; |
894 this.bufferNode.onended = function(event) { | 975 this.bufferNode.onended = function(event) { |
895 // Safari does not like using 'this' to reference the calling object! | 976 // Safari does not like using 'this' to reference the calling object! |
896 //event.currentTarget.owner.metric.stopListening(audioEngineContext.timer.getTestTime(),event.currentTarget.owner.getCurrentPosition()); | 977 //event.currentTarget.owner.metric.stopListening(audioEngineContext.timer.getTestTime(),event.currentTarget.owner.getCurrentPosition()); |
897 event.currentTarget.owner.stop(); | 978 event.currentTarget.owner.stop(); |
915 this.getCurrentPosition = function() { | 996 this.getCurrentPosition = function() { |
916 var time = audioEngineContext.timer.getTestTime(); | 997 var time = audioEngineContext.timer.getTestTime(); |
917 if (this.bufferNode != undefined) { | 998 if (this.bufferNode != undefined) { |
918 if (this.bufferNode.loop == true) { | 999 if (this.bufferNode.loop == true) { |
919 if (audioEngineContext.status == 1) { | 1000 if (audioEngineContext.status == 1) { |
920 return (time-this.metric.listenStart)%this.buffer.duration; | 1001 return (time-this.metric.listenStart)%this.buffer.buffer.duration; |
921 } else { | 1002 } else { |
922 return 0; | 1003 return 0; |
923 } | 1004 } |
924 } else { | 1005 } else { |
925 if (this.metric.listenHold) { | 1006 if (this.metric.listenHold) { |
2432 this.playbackObject; | 2513 this.playbackObject; |
2433 | 2514 |
2434 this.setTimePerPixel = function(audioObject) { | 2515 this.setTimePerPixel = function(audioObject) { |
2435 //maxTime must be in seconds | 2516 //maxTime must be in seconds |
2436 this.playbackObject = audioObject; | 2517 this.playbackObject = audioObject; |
2437 this.maxTime = audioObject.buffer.duration; | 2518 this.maxTime = audioObject.buffer.buffer.duration; |
2438 var width = 490; //500 - 10, 5 each side of the tracker head | 2519 var width = 490; //500 - 10, 5 each side of the tracker head |
2439 this.timePerPixel = this.maxTime/490; | 2520 this.timePerPixel = this.maxTime/490; |
2440 if (this.maxTime < 60) { | 2521 if (this.maxTime < 60) { |
2441 this.curTimeSpan.textContent = '0.00'; | 2522 this.curTimeSpan.textContent = '0.00'; |
2442 } else { | 2523 } else { |