# HG changeset patch # User Nicholas Jillings # Date 1444937953 -3600 # Node ID 60fffb9e291c39a6db08008c82986c54a6c4f040 # Parent 1100a18eb3f501773ef0509bef62808edbef3088# Parent 378726f0ac9111a3542eac04fe3d6eb6b332b6f0 Merged. diff -r 1100a18eb3f5 -r 60fffb9e291c analyse.html --- a/analyse.html Thu Oct 15 20:36:35 2015 +0100 +++ b/analyse.html Thu Oct 15 20:39:13 2015 +0100 @@ -18,32 +18,33 @@ google.load("visualization", "1", {packages:["corechart"]}); /************* - * SETUP * + * SETUP * *************/ // folder where to find the XML files - xmlFileFolder = "saves"; + xmlFileFolder = "analysis_test"; // array of XML files - var xmlFiles = ['McG-A-2014-03.xml','McG-B-2014-03.xml','McG-C-2014-03.xml']; + var xmlFiles = ['QM-1-1.xml','QM-2-1.xml','QM-2-2.xml','QM-2-3.xml','QM-3-1.xml','QM-3-2.xml','QM-4-1.xml','QM-5-1.xml','QM-5-2.xml','QM-6-1.xml','QM-6-2.xml','QM-7-1.xml','QM-7-2.xml','QM-8-1.xml','QM-9-1.xml','QM-10-1.xml','QM-11-1.xml','QM-12-1.xml','QM-12-2.xml','QM-13-1.xml','QM-14-1.xml','QM-15-1.xml','QM-16-1.xml','QM-17-1.xml','QM-18-1.xml','QM-18-2.xml','QM-18-3.xml','QM-19-1.xml','QM-20-1.xml','QM-20-2.xml','QM-20-3.xml','QM-21-1.xml','QM-21-2.xml']; + //['McG-A-2014-03.xml','McG-B-2014-03.xml','McG-C-2014-03.xml','McG-D-2014-03.xml','McG-E-2014-03.xml','McG-F-2014-03.xml','McG-G-2014-03.xml','McG-H-2014-03.xml']; //TODO: make retrieval of file names automatic / drag files on here /**************** - * VARIABLES * + * VARIABLES * ****************/ // Counters - // How many files, audioHolders, audioElementes and statements annotated (don't count current one) + // How many files, audioholders, audioelementes and statements annotated (don't count current one) var numberOfFiles = -1; - var numberOfaudioHolders = -1; - var numberOfaudioElementes = -1; + var numberOfaudioholders = -1; + var numberOfaudioelementes = -1; var numberOfStatements = -1; var numberOfSkippedComments = 0; // Object arrays var fileNameArray = []; var subjectArray = []; - var audioHolderArray = []; - var audioElementArray = []; + var audioholderArray = []; + var audioelementArray = []; // End of (file, audioholder, audioelement) flags var newFile = true; @@ -51,8 +52,8 @@ var newAudioElement = true; var fileCounter = 0; // file index - var audioHolderCounter=0; // audioholder index (current XML file) - var audioElementCounter=0; // audioelement index (current audioholder) + var audioholderCounter=0; // audioholder index (current XML file) + var audioelementCounter=0; // audioelement index (current audioholder) var statementNumber=0; // total number of statements var root; // root of XML file @@ -92,7 +93,7 @@ } } - function median(values) { + function median(values) { // TODO: replace code by '50th percentile' - should be the same? values.sort( function(a,b) {return a - b;} ); var half = Math.floor(values.length/2); if(values.length % 2) @@ -100,9 +101,16 @@ else return (values[half-1] + values[half]) / 2.0; } + + function percentile(values, n) { + values.sort( function(a,b) {return a - b;} ); + // get ordinal rank + var rank = Math.min(Math.floor(values.length*n/100), values.length-1); + return values[rank]; + } /*********************** - * TIME MEASUREMENT * + * TIME MEASUREMENT * ************************/ // measure time since last time this function was called @@ -197,71 +205,71 @@ /******************************** - * PLAYBACK OF AUDIO * + * PLAYBACK OF AUDIO * ********************************/ - //PLAYaudioElement + //PLAYaudioelement // Keep track of whether audio should be played function playFlagChanged(){ playAudio = playFlag.checked; // global variable if (!playAudio){ // if audio needs to stop audio.pause(); // stop audio - if anything is playing - currently_playing = ''; // back to empty string so playaudioElement knows nothing's playing + currently_playing = ''; // back to empty string so playaudioelement knows nothing's playing } } - // audioHolder that's currently playing - var currently_playing_audioHolder = ''; // at first: empty string - var currently_playing_audioElement = ''; + // audioholder that's currently playing + var currently_playing_audioholder = ''; // at first: empty string + var currently_playing_audioelement = ''; var audio; - // Play audioElement of audioHolder if available, from start or from same position - function playaudioElement(audioHolderName, audioElementerName){ + // Play audioelement of audioholder if available, from start or from same position + function playaudioelement(audioholderName, audioelementerName){ if (playAudio) { // if enabled // get corresponding file from folder - var file_location = 'audio/'+audioHolderName + '/' + audioElementerName + '.mp3'; // fixed path and file name format + var file_location = 'audio/'+audioholderName + '/' + audioelementerName + '.mp3'; // fixed path and file name format // if not available, show error/warning message //TODO ... // if nothing playing yet, start playing - if (currently_playing_audioHolder == ''){ // signal that nothing is playing + if (currently_playing_audioholder == ''){ // signal that nothing is playing //playSound(audioBuffer); audio = new Audio(file_location); audio.loop = true; // loop when end is reached audio.play(); - currently_playing_audioHolder = audioHolderName; - currently_playing_audioElement = audioElementerName; - } else if (currently_playing_audioHolder != audioHolderName) { - // if different audioHolder playing, stop that and start playing + currently_playing_audioholder = audioholderName; + currently_playing_audioelement = audioelementerName; + } else if (currently_playing_audioholder != audioholderName) { + // if different audioholder playing, stop that and start playing audio.pause(); // stop audio audio = new Audio(file_location); // load new file audio.loop = true; // loop when end is reached audio.play(); // play audio from the start - currently_playing_audioHolder = audioHolderName; - currently_playing_audioElement = audioElementerName; - } else if (currently_playing_audioElement != audioElementerName) { - // if same audioHolder playing, start playing from where it left off + currently_playing_audioholder = audioholderName; + currently_playing_audioelement = audioelementerName; + } else if (currently_playing_audioelement != audioelementerName) { + // if same audioholder playing, start playing from where it left off skipTime = audio.currentTime; // time to skip to audio.pause(); // stop audio audio = new Audio(file_location); audio.addEventListener('loadedmetadata', function() { this.currentTime = skipTime; - console.log('Loaded '+audioHolderName+'-'+audioElementerName+', playing from '+skipTime); + console.log('Loaded '+audioholderName+'-'+audioelementerName+', playing from '+skipTime); }, false); // skip to same time when audio is loaded! audio.loop = true; // loop when end is reached audio.play(); // play from that time audio.currentTime = skipTime; - currently_playing_audioHolder = audioHolderName; - currently_playing_audioElement = audioElementerName; + currently_playing_audioholder = audioholderName; + currently_playing_audioelement = audioelementerName; } - // if same audioElement playing: keep on playing (i.e. do nothing) + // if same audioelement playing: keep on playing (i.e. do nothing) } } /******************** - * READING FILES * + * READING FILES * ********************/ // Read necessary data from XML file @@ -303,24 +311,24 @@ subjectArray.push(subjectID.textContent); // append to array } - // go over all audioHolders, add to array if not already there - audioHolderNodes = root.getElementsByTagName('audioholder'); - // go over audioHolderNodes and append audioHolder name when not present yet - for (audioHolderIndex = 0; audioHolderIndex < audioHolderNodes.length; audioHolderIndex++) { - audioHolderName = audioHolderNodes[audioHolderIndex].getAttribute('id'); - if (audioHolderArray.indexOf(audioHolderName) == -1) { // if not already in array - audioHolderArray.push(audioHolderName); // append to array + // go over all audioholders, add to array if not already there + audioholderNodes = root.getElementsByTagName('audioholder'); + // go over audioholderNodes and append audioholder name when not present yet + for (audioholderIndex = 0; audioholderIndex < audioholderNodes.length; audioholderIndex++) { + audioholderName = audioholderNodes[audioholderIndex].getAttribute('id'); + if (audioholderArray.indexOf(audioholderName) == -1) { // if not already in array + audioholderArray.push(audioholderName); // append to array } - // within each audioHolder, go over all audioElement IDs, add to array if not already there - audioElementNodes = audioHolderNodes[audioHolderIndex].getElementsByTagName('audioelement'); - for (audioElementIndex = 0; audioElementIndex < audioElementNodes.length; audioElementIndex++) { - audioElementName = audioElementNodes[audioElementIndex].getAttribute('id'); - if (audioElementArray.indexOf(audioElementName) == -1) { // if not already in array - audioElementArray.push(audioElementName); // append to array + // within each audioholder, go over all audioelement IDs, add to array if not already there + audioelementNodes = audioholderNodes[audioholderIndex].getElementsByTagName('audioelement'); + for (audioelementIndex = 0; audioelementIndex < audioelementNodes.length; audioelementIndex++) { + audioelementName = audioelementNodes[audioelementIndex].getAttribute('id'); + if (audioelementArray.indexOf(audioelementName) == -1) { // if not already in array + audioelementArray.push(audioelementName); // append to array } } } - // count occurrences of each audioHolder + // count occurrences of each audioholder // ... } else { @@ -331,8 +339,8 @@ // sort alphabetically fileNameArray.sort(); subjectArray.sort(); - audioHolderArray.sort(); - audioElementArray.sort(); + audioholderArray.sort(); + audioelementArray.sort(); // display all information in HTML // show XML file folder @@ -341,19 +349,22 @@ document.getElementById('numberOfFiles_span').innerHTML = fileNameArray.length; // show list of subject names document.getElementById('subjectArray_span').innerHTML = subjectArray.toString(); - // show list of audioHolders - document.getElementById('audioHolderArray_span').innerHTML = audioHolderArray.toString(); - // show list of audioElementes - document.getElementById('audioElementArray_span').innerHTML = audioElementArray.toString(); + // show list of audioholders + document.getElementById('audioholderArray_span').innerHTML = audioholderArray.toString(); + // show list of audioelementes + document.getElementById('audioelementArray_span').innerHTML = audioelementArray.toString(); } - function makePlots() { + function makePlots() { //TODO: split into different functions + // TEMPORARY + makeTimeline(xmlFileFolder+"/"+xmlFiles[7]); + // create value array - var ratings = []; // 3D matrix of ratings (audioHolder, audioElement, subject) - for (audioHolderIndex = 0; audioHolderIndex < audioHolderNodes.length; audioHolderIndex++) { + var ratings = []; // 3D matrix of ratings (audioholder, audioelement, subject) + for (audioholderIndex = 0; audioholderIndex < audioholderArray.length; audioholderIndex++) { ratings.push([]); - for (audioElementIndex = 0; audioElementIndex < audioElementNodes.length; audioElementIndex++) { - ratings[audioHolderIndex].push([]); + for (audioelementIndex = 0; audioelementIndex < audioelementArray.length; audioelementIndex++) { + ratings[audioholderIndex].push([]); } } @@ -364,49 +375,48 @@ if (xml != null) { // if file exists // get root of XML file root = xml.getElementsByTagName('browserevaluationresult')[0]; - // go over all audioHolders - audioHolderNodes = root.getElementsByTagName('audioholder'); - for (audioHolderIndex = 0; audioHolderIndex < audioHolderNodes.length; audioHolderIndex++) { - audioHolderName = audioHolderNodes[audioHolderIndex].getAttribute('id'); - audioElementNodes = audioHolderNodes[audioHolderIndex].getElementsByTagName('audioelement'); + // go over all audioholders + audioholderNodes = root.getElementsByTagName('audioholder'); + for (audioholderIndex = 0; audioholderIndex < audioholderNodes.length; audioholderIndex++) { + audioholderName = audioholderNodes[audioholderIndex].getAttribute('id'); + audioelementNodes = audioholderNodes[audioholderIndex].getElementsByTagName('audioelement'); // go over all audioelements - for (audioElementIndex = 0; audioElementIndex < audioElementNodes.length; audioElementIndex++) { - audioElementName = audioElementNodes[audioElementIndex].getAttribute('id'); + for (audioelementIndex = 0; audioelementIndex < audioelementNodes.length; audioelementIndex++) { + audioelementName = audioelementNodes[audioelementIndex].getAttribute('id'); // get value - var value = audioElementNodes[audioElementIndex].getElementsByTagName("value")[0].textContent; + var value = audioelementNodes[audioelementIndex].getElementsByTagName("value")[0].textContent; if (value) { // if not empty, null, undefined... ratingValue = parseFloat(value); - // add to matrix - ratings[audioHolderIndex][audioElementIndex].push(ratingValue) + // add to matrix at proper position + aHidx = audioholderArray.indexOf(audioholderName); + aEidx = audioelementArray.indexOf(audioelementName); + ratings[aHidx][aEidx].push(ratingValue); } } } - // go over all audioHolders + // go over all audioholders - // go over all audioElements within audioHolder, see if present in idMatrix, add if not + // go over all audioelements within audioholder, see if present in idMatrix, add if not // add corresponding rating to 'ratings', at position corresponding with position in idMatrix } } - for (audioHolderIndex = 0; audioHolderIndex < audioHolderArray.length; audioHolderIndex++) { - audioHolderName = audioHolderArray[audioHolderIndex]; // for this song + for (audioholderIndex = 0; audioholderIndex < audioholderArray.length; audioholderIndex++) { + audioholderName = audioholderArray[audioholderIndex]; // for this song tickArray = [] - medianOfAudioElement = [] raw_data = [['SubjectID', 'Rating']]; audioElIdx = 0; - for (audioElementIndex = 0; audioElementIndex0) { + for (audioelementIndex = 0; audioelementIndex0) { audioElIdx++; // increase if not empty // make tick label - tickArray.push({v:audioElIdx, f: audioElementArray[audioElementIndex]}); - // add median - medianOfAudioElement.push(median(ratings[audioHolderIndex][audioElementIndex])); + tickArray.push({v:audioElIdx, f: audioelementArray[audioelementIndex]}); } - for (subject = 0; subject pctl75[pctl75.length-1]+1.5*IQR || + ratings[audioholderIndex][audioelementIndex][idx] < pctl25[pctl25.length-1]-1.5*IQR){ + outliers.push(ratings[audioholderIndex][audioelementIndex][idx]); + } + else { + rest.push(ratings[audioholderIndex][audioelementIndex][idx]); + } + } + outlierArray.push(outliers); + max_n_outliers = Math.max(max_n_outliers, outliers.length); // update max mber + // max: maximum value which is not outlier + max.push(Math.max.apply(null, rest)); + // min: minimum value which is not outlier + min.push(Math.min.apply(null, rest)); + } + + // Build data array + boxplot_data = [['ID', 'Span', '', '', '', 'Median']]; + for (idx = 0; idx < max_n_outliers; idx++) { + boxplot_data[0].push('Outlier'); + } + for (audioelementIndex = 0; audioelementIndex0) { // if rating array not empty for this audioelement + data_array = [ + audioelementArray[audioelementIndex], // name + min[audioelementIndex], // minimum + pctl75[audioelementIndex], + pctl25[audioelementIndex], + max[audioelementIndex], // maximum + med[audioelementIndex] + ]; + for (idx = 0; idx < max_n_outliers; idx++) { + if (idxResult XML files:
- Audioholders in dataset: + Audioholders in dataset:
Subjects in dataset:
- Audioelements in dataset: + Audioelements in dataset:

diff -r 1100a18eb3f5 -r 60fffb9e291c docs/WAC2016/WAC2016.pdf Binary file docs/WAC2016/WAC2016.pdf has changed diff -r 1100a18eb3f5 -r 60fffb9e291c docs/WAC2016/WAC2016.tex --- a/docs/WAC2016/WAC2016.tex Thu Oct 15 20:36:35 2015 +0100 +++ b/docs/WAC2016/WAC2016.tex Thu Oct 15 20:39:13 2015 +0100 @@ -188,6 +188,13 @@ %[Talking about what we do in the various sections of this paper. Referring to \cite{waet}. ] To meet the need for a cross-platform, versatile and easy-to-use listening test tool, we previously developed the Web Audio Evaluation Tool \cite{waet} which at the time of its inception was capable of running a listening test in the browser from an XML configuration file, and storing an XML file as well, with one particular interface. We have now expanded this into a tool with which a wide range of listening test types can easily be constructed and set up remotely, without any need for manually altering code or configuration files, and which allows visualisation of the collected results in the browser. In this paper, we discuss these different aspects and explore which future improvements would be possible. Specifically, in Section \ref{sec:architecture} we cover the general implementation aspects, with a focus on the Web Audio API, followed by a discussion of the requirements for successful remote tests in Section \ref{sec:remote}. Section \ref{sec:interfaces} describes the various interfaces the tool supports, as well as how to keep this manageable. Finally, in Section \ref{sec:analysis} we provide an overview of the analysis capabilities in the browser, before summarising our findings and listing future research directions in Section \ref{sec:conclusion}. + \begin{figure}[tb] + \centering + \includegraphics[width=.5\textwidth]{interface.png} + \caption{A simple example of a multi-stimulus, single attribute, single rating scale test with a reference and comment fields.} + \label{fig:interface} + \end{figure} + \begin{comment} % MEETING 8 OCTOBER \subsection{Meeting 8 October} @@ -393,18 +400,19 @@ \label{sec:analysis} % don't mention Python scripts There are several benefits to providing basic analysis tools in the browser: they allow diagnosing problems, with the interface or with the test subject; they may be sufficient for many researchers' purposes; and test subjects may enjoy seeing an overview of their own results and/or results thus far at the end of their tests. - % \begin{figure*}[bhf] - % \centering - % \includegraphics[width=.7\textwidth]{timeline.pdf} - % \caption{This timeline of a single subject's listening test shows playback of fragments (red segments) and marker movements on the rating axis in function of time. } - % \label{fig:timeline} - % \end{figure*} + \begin{figure}[bhf] + \centering + \includegraphics[width=.5\textwidth]{boxplot.png} + %\caption{This timeline of a single subject's listening test shows playback of fragments (red segments) and marker movements on the rating axis in function of time. } + \caption{Box and whisker plot showing the aggregated numerical ratings of six stimuli by a group of subjects.} + \label{fig:timeline} + \end{figure} For this reason, we include a proof-of-concept web page with: \begin{itemize}[noitemsep,nolistsep] \item All audioholder IDs, file names, subject IDs, audio element IDs, ... in the collected XMLs so far (\texttt{saves/*.xml}) \item Selection of subjects and/or test samples to zoom in on a subset of the data %Check/uncheck each of the above for analysis (e.g. zoom in on a certain song, or exclude a subset of subjects) \item Embedded audio to hear corresponding test samples % (follow path in XML setup file, which is also embedded in the XML result file) - \item Box plot, confidence plot, and scatter plot of rating values + \item Scatter plot, confidence plot and box plot of rating values (see Figure ) \item Timeline for a specific subject %(see Figure \ref{fig:timeline})%, perhaps re-playing the experiment in X times realtime. (If actual realtime, you could replay the audio...) \item Distribution plots of any radio button and number questions in pre- and post-test survey %(drop-down menu with `pretest', `posttest', ...; then drop-down menu with question `IDs' like `gender', `age', ...; make pie chart/histogram of these values over selected range of XMLs) \item All `comments' on a specific audioelement