Mercurial > hg > webaudioevaluationtool
comparison js/core.js @ 2575:249a1152e525
Merge branch 'master' into Dev_main
# Conflicts:
# interfaces/AB.js
# js/core.js
# js/specification.js
author | Nicholas Jillings <nicholas.jillings@mail.bcu.ac.uk> |
---|---|
date | Tue, 18 Oct 2016 15:49:58 +0100 |
parents | 161d63a60b9e 9b536838a962 |
children | bf17cc19c1c0 |
comparison
equal
deleted
inserted
replaced
2574:b6bc57a2a681 | 2575:249a1152e525 |
---|---|
1111 this.currentIndex++; | 1111 this.currentIndex++; |
1112 if (this.currentIndex < this.popupOptions.length) { | 1112 if (this.currentIndex < this.popupOptions.length) { |
1113 this.postNode(); | 1113 this.postNode(); |
1114 } else { | 1114 } else { |
1115 // Reached the end of the popupOptions | 1115 // Reached the end of the popupOptions |
1116 this.popupContent.innerHTML = ""; | 1116 this.popupTitle.textContent = ""; |
1117 this.popupResponse.innerHTML = ""; | |
1117 this.hidePopup(); | 1118 this.hidePopup(); |
1118 for (var node of this.popupOptions) { | 1119 for (var node of this.popupOptions) { |
1119 this.store.postResult(node); | 1120 this.store.postResult(node); |
1120 } | 1121 } |
1121 this.store.complete(); | 1122 this.store.complete(); |
1385 // Output gain is default to 1 and any items for playback route here | 1386 // Output gain is default to 1 and any items for playback route here |
1386 // Foo gain is used for analysis to ensure paths get processed, but are not heard | 1387 // Foo gain is used for analysis to ensure paths get processed, but are not heard |
1387 // because web audio will optimise and any route which does not go to the destination gets ignored. | 1388 // because web audio will optimise and any route which does not go to the destination gets ignored. |
1388 this.outputGain = audioContext.createGain(); | 1389 this.outputGain = audioContext.createGain(); |
1389 this.fooGain = audioContext.createGain(); | 1390 this.fooGain = audioContext.createGain(); |
1390 this.fooGain.gain = 0; | 1391 this.fooGain.gain.value = 0; |
1391 | 1392 |
1392 // Use this to detect playback state: 0 - stopped, 1 - playing | 1393 // Use this to detect playback state: 0 - stopped, 1 - playing |
1393 this.status = 0; | 1394 this.status = 0; |
1394 | 1395 |
1395 // Connect both gains to output | 1396 // Connect both gains to output |
1404 this.loopPlayback = false; | 1405 this.loopPlayback = false; |
1405 this.synchPlayback = false; | 1406 this.synchPlayback = false; |
1406 this.pageSpecification = null; | 1407 this.pageSpecification = null; |
1407 | 1408 |
1408 this.pageStore = null; | 1409 this.pageStore = null; |
1410 | |
1411 // Chrome 53+ Error solution | |
1412 // Empty buffer for keep-alive | |
1413 var nullBuffer = audioContext.createBuffer(1, audioContext.sampleRate, audioContext.sampleRate); | |
1414 this.nullBufferSource = audioContext.createBufferSource(); | |
1415 this.nullBufferSource.buffer = nullBuffer; | |
1416 this.nullBufferSource.loop = true; | |
1417 this.nullBufferSource.start(0); | |
1409 | 1418 |
1410 // Create store for new audioObjects | 1419 // Create store for new audioObjects |
1411 this.audioObjects = []; | 1420 this.audioObjects = []; |
1412 | 1421 |
1413 this.buffers = []; | 1422 this.buffers = []; |
1557 if (typeof copybuffer.copyToChannel == "function") { | 1566 if (typeof copybuffer.copyToChannel == "function") { |
1558 copybuffer.copyToChannel(sub_frame, c); | 1567 copybuffer.copyToChannel(sub_frame, c); |
1559 } else { | 1568 } else { |
1560 var dst = copybuffer.getChannelData(c); | 1569 var dst = copybuffer.getChannelData(c); |
1561 for (var n = 0; n < newLength; n++) | 1570 for (var n = 0; n < newLength; n++) |
1562 dst[n] = src[n + start_sample]; | 1571 dst[n] = buffer[n + start_sample]; |
1563 } | 1572 } |
1564 } | 1573 } |
1565 return copybuffer; | 1574 return copybuffer; |
1566 } | 1575 } |
1567 }; | 1576 }; |
1768 | 1777 |
1769 this.onplayGain = 1.0; | 1778 this.onplayGain = 1.0; |
1770 | 1779 |
1771 // Connect buffer to the audio graph | 1780 // Connect buffer to the audio graph |
1772 this.outputGain.connect(audioEngineContext.outputGain); | 1781 this.outputGain.connect(audioEngineContext.outputGain); |
1782 audioEngineContext.nullBufferSource.connect(this.outputGain); | |
1773 | 1783 |
1774 // the audiobuffer is not designed for multi-start playback | 1784 // the audiobuffer is not designed for multi-start playback |
1775 // When stopeed, the buffer node is deleted and recreated with the stored buffer. | 1785 // When stopeed, the buffer node is deleted and recreated with the stored buffer. |
1776 this.buffer; | 1786 this.buffer; |
1777 | 1787 |
1791 var preSilenceTime = this.specification.preSilence || this.specification.parent.preSilence || specification.preSilence || 0.0; | 1801 var preSilenceTime = this.specification.preSilence || this.specification.parent.preSilence || specification.preSilence || 0.0; |
1792 var postSilenceTime = this.specification.postSilence || this.specification.parent.postSilence || specification.postSilence || 0.0; | 1802 var postSilenceTime = this.specification.postSilence || this.specification.parent.postSilence || specification.postSilence || 0.0; |
1793 var startTime = this.specification.startTime; | 1803 var startTime = this.specification.startTime; |
1794 var stopTime = this.specification.stopTime; | 1804 var stopTime = this.specification.stopTime; |
1795 var copybuffer = new callee.constructor(); | 1805 var copybuffer = new callee.constructor(); |
1796 if (isFinite(startTime) || isFinite(stopTime)) { | 1806 |
1797 copybuffer.buffer = callee.cropBuffer(startTime, stopTime); | 1807 copybuffer.buffer = callee.cropBuffer(startTime || 0, stopTime || callee.buffer.duration); |
1798 } | |
1799 if (preSilenceTime != 0 || postSilenceTime != 0) { | 1808 if (preSilenceTime != 0 || postSilenceTime != 0) { |
1800 if (copybuffer.buffer == undefined) { | 1809 copybuffer.buffer = copybuffer.copyBuffer(preSilenceTime, postSilenceTime); |
1801 copybuffer.buffer = callee.copyBuffer(preSilenceTime, postSilenceTime); | 1810 } |
1802 } else { | 1811 |
1803 copybuffer.buffer = copybuffer.copyBuffer(preSilenceTime, postSilenceTime); | 1812 copybuffer.lufs = callee.buffer.lufs; |
1804 } | 1813 this.buffer = copybuffer; |
1805 } | |
1806 | 1814 |
1807 var targetLUFS = this.specification.parent.loudness || specification.loudness; | 1815 var targetLUFS = this.specification.parent.loudness || specification.loudness; |
1808 if (typeof targetLUFS === "number" && isFinite(targetLUFS)) { | 1816 if (typeof targetLUFS === "number" && isFinite(targetLUFS)) { |
1809 this.buffer.buffer.playbackGain = decibelToLinear(targetLUFS - this.buffer.buffer.lufs); | 1817 this.buffer.buffer.playbackGain = decibelToLinear(targetLUFS - this.buffer.buffer.lufs); |
1810 } else { | 1818 } else { |
1858 //event.currentTarget.owner.metric.stopListening(audioEngineContext.timer.getTestTime(),event.currentTarget.owner.getCurrentPosition()); | 1866 //event.currentTarget.owner.metric.stopListening(audioEngineContext.timer.getTestTime(),event.currentTarget.owner.getCurrentPosition()); |
1859 if (event.currentTarget != null) { | 1867 if (event.currentTarget != null) { |
1860 event.currentTarget.owner.stop(audioContext.currentTime + 1); | 1868 event.currentTarget.owner.stop(audioContext.currentTime + 1); |
1861 } | 1869 } |
1862 }; | 1870 }; |
1871 this.outputGain.gain.cancelScheduledValues(audioContext.currentTime); | |
1863 if (!audioEngineContext.loopPlayback || !audioEngineContext.synchPlayback) { | 1872 if (!audioEngineContext.loopPlayback || !audioEngineContext.synchPlayback) { |
1864 this.metric.startListening(audioEngineContext.timer.getTestTime()); | 1873 this.metric.startListening(audioEngineContext.timer.getTestTime()); |
1865 this.outputGain.gain.setValueAtTime(this.onplayGain, 0.0); | 1874 this.outputGain.gain.setValueAtTime(this.onplayGain, startTime); |
1866 this.interfaceDOM.startPlayback(); | 1875 this.interfaceDOM.startPlayback(); |
1867 } else { | 1876 } else { |
1868 this.outputGain.gain.setValueAtTime(0.0, startTime); | 1877 this.outputGain.gain.setValueAtTime(0.0, startTime); |
1869 } | 1878 } |
1870 this.bufferNode.start(startTime, this.specification.startTime || 0, this.specification.stopTime - this.specification.startTime || this.buffer.buffer.duration); | 1879 if (audioEngineContext.loopPlayback) { |
1880 this.bufferNode.loopStart = this.specification.startTime || 0; | |
1881 this.bufferNode.loopEnd = this.specification.stopTime - this.specification.startTime || this.buffer.buffer.duration; | |
1882 this.bufferNode.start(startTime); | |
1883 } else { | |
1884 this.bufferNode.start(startTime, this.specification.startTime || 0, this.specification.stopTime - this.specification.startTime || this.buffer.buffer.duration); | |
1885 } | |
1871 this.bufferNode.playbackStartTime = audioEngineContext.timer.getTestTime(); | 1886 this.bufferNode.playbackStartTime = audioEngineContext.timer.getTestTime(); |
1872 } | 1887 } |
1873 }; | 1888 }; |
1874 | 1889 |
1875 this.stop = function (stopTime) { | 1890 this.stop = function (stopTime) { |
1877 if (this.bufferNode != undefined) { | 1892 if (this.bufferNode != undefined) { |
1878 this.metric.stopListening(audioEngineContext.timer.getTestTime(), this.getCurrentPosition()); | 1893 this.metric.stopListening(audioEngineContext.timer.getTestTime(), this.getCurrentPosition()); |
1879 this.bufferNode.stop(stopTime); | 1894 this.bufferNode.stop(stopTime); |
1880 this.bufferNode = undefined; | 1895 this.bufferNode = undefined; |
1881 } | 1896 } |
1882 this.outputGain.gain.value = 0.0; | 1897 this.outputGain.gain.setValueAtTime(0.0, stopTime); |
1883 this.interfaceDOM.stopPlayback(); | 1898 this.interfaceDOM.stopPlayback(); |
1884 }; | 1899 }; |
1885 | 1900 |
1886 this.getCurrentPosition = function () { | 1901 this.getCurrentPosition = function () { |
1887 var time = audioEngineContext.timer.getTestTime(); | 1902 var time = audioEngineContext.timer.getTestTime(); |