annotate docs/ProjectSpecificationDocument.tex @ 972:565ffb272d82

Added tag 1.0.0 for changeset 7f9da387e1ce
author Brecht De Man <BrechtDeMan@users.noreply.github.com>
date Wed, 22 Apr 2015 19:08:32 +0100
parents 55e8ab191227
children 2b8c36924bfd
rev   line source
nicholas@9 1 \documentclass{article}
nicholas@9 2
nicholas@9 3 \usepackage[margin=2cm]{geometry}
nicholas@11 4 \usepackage{listings}
nicholas@9 5
nicholas@9 6 \begin{document}
nicholas@9 7
nicholas@9 8 \large APE Browser Tool - Project Specification Document
nicholas@9 9
nicholas@9 10 \section{Document}
nicholas@9 11
nicholas@9 12 An XML file containing all project information to load and execute the project on the client. Certain interfaces are optional, however others are mandatory. This guide should reflect the changes in the XML project and keep track of the versions. Hopwfully this can remain simple!
nicholas@9 13
nicholas@9 14 \section{Root}
nicholas@9 15
nicholas@9 16 The XML root must be \texttt{<BrowserEvalProjectDocument>}. This should be sufficiently identifiable in both itself and in the JavaScript decoding as it will create an object called the root name.
nicholas@9 17
nicholas@9 18 There must also be a \texttt{<version>} tag which has the attribute \texttt{id} containing a numerical representation of the version. Currently everything in this document can be assumed to be version 1. If future updates or corrections are made post delivery this should give the flexibility to ensure past projects still work.
nicholas@9 19
nicholas@9 20 The root will also contain the following tags: setup and tracks.
nicholas@9 21
nicholas@9 22 \section{Setup tag}
nicholas@9 23
nicholas@9 24 The setup tag specifies certain global test settings including: the interface type to use, the project return location and any other setup instructions.
n@677 25 Any general pre/post test questions must be specified in the relevant children tag. Any enabled metrics must also be specified in the metric child node.
nicholas@9 26
nicholas@9 27 \subsection{Attributes}
nicholas@9 28 \begin{itemize}
n@698 29 \item \texttt{interface} - Mandatory, String. Defaults to APE, otherwise use to load any of the available interfaces. Currently only valid string is APE.
n@698 30 \item \texttt{projectReturn} - Mandatory, String. Specify the URL to return the test results. If null client will generate XML locally and prompt user to return the file.
n@711 31 \item \texttt{randomiseOrder} - Optional, default to false. Specify if the order of the tests can be randomised.
n@711 32 \item \texttt{collectMetrics} - Optional, Boolean. Default to false. Determine if the test metrics should be collected. These include how long each test session took etc. The full metrics list can be modified in the 'metrics' tag.
nicholas@9 33 \end{itemize}
nicholas@9 34
nicholas@9 35 \subsection{Elements}
nicholas@9 36 None
nicholas@9 37
n@698 38 \section{AudioHolder tag}
nicholas@9 39
n@711 40 There should be one audioHolder tag per test session, inside which each audioElement is specified as children. The audioHolder tag can help to generalise certain objects. Each audioHolder instance specifies a separate listening test to be paged, each with their own specific requirements.
nicholas@9 41
nicholas@9 42 \subsection{Attributes}
nicholas@9 43 \begin{itemize}
n@711 44 \item \texttt{id} - Mandatory, String. Give an ID string or number to identify the test in the result.
n@698 45 \item \texttt{hostURL} - Optional, String. If all tracks are hosted from the same folder on a server, you can put in the lead here. For instance, if loading http://test.com/tracks/track1.wav and http://test.com/tracks/track2.wav, this could equal http://test.com/tracks/ and the url attribute in the track tag can be track1.wav or track2.wav. Equally http://test.com/ and then using tracks/track1.wav and tracks/track2.wav is valid.
n@698 46 \item \texttt{sampleRate} - Optional, Number. If your test requires a specific sample rate, this should be set to the desired sample rate in Hertz. This does not set the browser to the correct sample rate, but forces the browser to check the sample rate matches. If this is undefined, no sample rate matching will occur.
n@711 47 \item \texttt{randomiseOrder} - Optional, Boolean String. Defaults to false. Determine if the track order should be randomised. Must be true or false.
n@711 48 \item \texttt{repeatCount} - Optional, Number. Defaults to 0 (ie: no repeats). The number of times a test should be repeated.
n@681 49 \item \texttt{loop} - Optional, Boolean String. Defaults to false. Enable if audioElements should loop their playback or not.
nicholas@9 50 \end{itemize}
nicholas@9 51
nicholas@9 52 \subsection{Elements}
n@711 53 Contain the audioElements tags and the interfaceSetup tag.
nicholas@9 54
n@698 55 \section{audioElements tag}
nicholas@9 56
n@698 57 This must reside as children in the audioHolder tag. There must be one audioElement tag per sound sample to load into the test.
nicholas@9 58
nicholas@9 59 \subsection{Attributes}
nicholas@9 60 \begin{itemize}
n@677 61 \item \texttt{id} - Mandatory, String. Must give a string or number to identify each audio element. This id is used in the output to identify each track once randomised.
n@698 62 \item \texttt{url} - Mandatory, String. Contain the full URL to the track. If the Tracks tag hostURL is set, concatenate this tag with the hostURL attribute to obtain the full URL.
nicholas@9 63 \end{itemize}
nicholas@9 64
n@672 65 \section{interface tag}
n@711 66
n@672 67 This is contained within the audioHolder tag and outlines test instance specific requirements. These include the following children tags:
n@672 68 \begin{itemize}
n@677 69 \item 'title' - Contains the test title to be shown at the top of the page. Can only be one title node per interface.
n@677 70 \item 'scale' - Takes the attribute position to be a value between 0 and 100 indicating where on the scale to place the text contained inside. Can be multiple scale tags per interface.
n@672 71 \end{itemize}
n@711 72
n@698 73 \section {CommentQuestion tag}
n@698 74
n@677 75 This is a 1st level tag (same level as AudioHolder and setup). This allows another question and comment box to be presented on the page. The results of these are passed back in the results XML with both the comment and the question. The id attribute is set to keep track at the results XML.
n@698 76
n@698 77 \section {PreTest tag and PostTest tag}
n@698 78
n@698 79 These are 1st level tags. The PreTest tag allows for the specifying of pre test instructions and questions. These appear as a pop-up style window with next buttons and other automatic GUI. The postTest tag allows for specifying post test instructions, questions and resources. These appear as a pop-up style window after the submit button is pressed.
n@698 80
n@698 81 \subsection{Attributes}
n@698 82 None.
n@698 83
n@698 84 \subsection{Elements}
n@698 85 Takes the \texttt{statement} and \texttt{question} tags. The order these are presented in the XML define the order they appear on the screen.
n@698 86
n@698 87 \subsubsection{Statement}
n@698 88
n@698 89 The statement tag simply prints the included string verbatim on a 'pop-up' window with a next button.
n@698 90
n@698 91 \subsubsection{Question}
n@698 92
n@707 93 This allows for a question to be asked pre/post the test. This is added to the response XML in the same location as the other common/global questions. The response includes both the question asked and the response. This takes two attributes, id and mandatory. ID is a mandatory field. The same ID will be used in the results so it is important it is properly entered. Mandatory is optional. True means the field must be entered before continuing.
n@698 94
n@698 95 \subsubsection{Resource}
n@698 96
n@698 97 The resource tag is only available in the postTest tag. This allows for the linking to some external resource via the href attribute.
n@698 98
n@711 99 \section{Metric tag}
n@711 100 A 1st level tag, metrics must be declared in the setup tag. This takes a set of children 'metricEnable' to define which metrics to collect and present.
n@711 101
n@711 102 \subsection{metricEnable tag}
n@711 103 This takes a single attribute to determine which metric to enable for collection. Some of these are a global, per track or per test instance.
n@711 104 \begin{itemize}
n@711 105 \item testTimer - Return the global test timer and test instance timers. Measures the time between the first start and final submit.
n@711 106 \item elementTimer - Return the total time each audioElement in each test was listened too. Measures time between successive clicks on the track changer
n@711 107 \item elementTracker - Return the initial position of each track
n@711 108 \item elementTrackerFull - Return an enumerated pair of time and position. Track the entire movement of each element position. NOTE: Will override the elementTracker option above and throw an error into the browser console.
n@711 109 \item elementFlagListenedTo - Return a boolean per elementck to see if the element was listened to
n@711 110 \item elementFlagMoved - Return a boolean per element to see if the element slider was moved.
n@711 111 \item elementFlagComments - Return a boolean per element to see if the element has comments.
n@711 112 \end{itemize}
n@711 113
djmoffat@694 114 \section{Feature List}
djmoffat@694 115 \begin{itemize}
djmoffat@694 116 \item Paging listening tests - eg. Ask multiple questions in each experiment
djmoffat@694 117 \item Labels on X axis - scale
djmoffat@694 118 \item Input questions/comment at top to guide towards the question being asked.
djmoffat@694 119 \item Randomise track numbers -(inc. comment boxes and relate back to correct reference track)
djmoffat@694 120 \item Randomise order of individual tests
djmoffat@694 121 \item Save output XML file to remote server
djmoffat@694 122 \item Tests Metrics
djmoffat@694 123 \begin{itemize}
djmoffat@694 124 \item Duration of listening to each track
djmoffat@694 125 \item Time spent on each individual test
djmoffat@694 126 \item Start and end position of every track
djmoffat@694 127 \item Flags on each track, to ensure each track (but may not restrict users from submitting)
djmoffat@694 128 \begin{itemize}
djmoffat@694 129 \item Has been listened to
djmoffat@694 130 \item Has been moved
djmoffat@694 131 \item Has comments about it
djmoffat@694 132 \end{itemize}
djmoffat@694 133 \end{itemize}
djmoffat@694 134 \end{itemize}
djmoffat@694 135
djmoffat@694 136 \subsection{Advanced feature list}
djmoffat@694 137 \begin{itemize}
djmoffat@694 138 \item Repeat each tests number of times (2 or 3?) to remove learning / experience bias and ensure that the order is consistent
djmoffat@694 139 \item Perform Loudness equalisation on all tracks
djmoffat@694 140 \item Selection of test type
djmoffat@694 141 \item Pre-test of some basic hearing test
djmoffat@694 142 \begin{itemize}
djmoffat@694 143 \item MUSHRA (with vertical slider per track)
djmoffat@694 144 \item APE (Single horizontal slider)
djmoffat@694 145 \item AB Test
djmoffat@694 146 \end{itemize}
djmoffat@694 147 \end{itemize}
djmoffat@694 148
djmoffat@694 149
djmoffat@694 150
nicholas@9 151 \section{Example}
nicholas@9 152
nicholas@11 153 Here is an example XML structure
nicholas@9 154
nicholas@11 155 \begin{lstlisting}
nicholas@11 156 <?xml version="1.0" encoding="utf-8"?>
nicholas@11 157 <BrowserEvalProjectDocument>
n@672 158 <setup interface="APE" projectReturn="null" randomiseOrder='true' collectMetrics='true'>
n@672 159 <PreTest>
n@672 160 <statement>Please listen to all mixes</statement>
n@672 161 <question id="location" mandatory="true">Please enter your listening location</question>
n@672 162 </PreTest>
n@672 163 <PostTest>
n@672 164 <statement>Thank you for taking this listening test.</statement>
n@672 165 <question id="SessionID">Please enter your name.</question>
n@672 166 </PostTest>
n@672 167 <Metric>
n@672 168 <metricEnable>testTimer</metricEnable>
n@672 169 <metricEnable>elementTimer</metricEnable>
n@672 170 <metricEnable>elementTracker</metricEnable>
n@672 171 <metricEnable>elementFlagListenedTo</metricEnable>
n@672 172 <metricEnable>elementFlagMoved</metricEnable>
n@672 173 </Metric>
n@672 174 </setup>
n@672 175 <audioHolder id='0' hostURL="example_eval/" sampleRate="44100" randomiseOrder='true' repeatCount='1'>
n@672 176 <interface>
n@672 177 <title>Example Test Question</title>
n@672 178 <scale position="0">Min</scale>
n@672 179 <scale position="100">Max</scale>
n@672 180 <scale position="50">Middle</scale>
n@672 181 <scale position="20">20</scale>
n@672 182 </interface>
n@672 183 <audioElements url="0.wav" id="0"/>
n@672 184 <audioElements url="1.wav" id="1"/>
n@672 185 <audioElements url="2.wav" id="2"/>
n@672 186 <audioElements url="3.wav" id="3"/>
n@672 187 <audioElements url="4.wav" id="4"/>
n@672 188 <audioElements url="5.wav" id="5"/>
n@672 189 <audioElements url="6.wav" id="6"/>
n@672 190 <audioElements url="7.wav" id="7"/>
n@672 191 <audioElements url="8.wav" id="8"/>
n@672 192 <audioElements url="9.wav" id="9"/>
n@672 193 <audioElements url="10.wav" id="10"/>
n@672 194 <CommentQuestion id='mixingExperiance'>What is your mixing experiance</CommentQuestion>
n@672 195 <PreTest>
n@672 196 <statement>Start the Test 3</statement>
n@672 197 </PreTest>
n@672 198 <PostTest>
n@672 199 <statement>Please take a break before the next test</statement>
n@672 200 <question id="testComment">How did you find the test</question>
n@672 201 </PostTest>
n@672 202 </audioHolder>
nicholas@9 203 </BrowserEvalProjectDocument>
nicholas@11 204 \end{lstlisting}
nicholas@11 205
nicholas@9 206
nicholas@9 207
nicholas@9 208 \end{document}