annotate docs/ProjectSpecificationDocument.tex @ 681:ab56aa2fe064

Added Loop
author Nicholas Jillings <n.g.r.jillings@se14.qmul.ac.uk>
date Sat, 18 Apr 2015 15:36:10 +0100
parents 89e08a7e0b6b
children 06fbaccf3b58
rev   line source
n@656 1 \documentclass{article}
n@656 2
n@656 3 \usepackage[margin=2cm]{geometry}
n@656 4 \usepackage{listings}
n@656 5
n@656 6 \begin{document}
n@656 7
n@656 8 \large APE Browser Tool - Project Specification Document
n@656 9
n@656 10 \section{Document}
n@656 11
n@656 12 An XML file containing all project information to load and execute the project on the client. Certain interfaces are optional, however others are mandatory. This guide should reflect the changes in the XML project and keep track of the versions. Hopwfully this can remain simple!
n@656 13
n@656 14 \section{Root}
n@656 15
n@656 16 The XML root must be \texttt{<BrowserEvalProjectDocument>}. This should be sufficiently identifiable in both itself and in the JavaScript decoding as it will create an object called the root name.
n@656 17
n@656 18 There must also be a \texttt{<version>} tag which has the attribute \texttt{id} containing a numerical representation of the version. Currently everything in this document can be assumed to be version 1. If future updates or corrections are made post delivery this should give the flexibility to ensure past projects still work.
n@656 19
n@656 20 The root will also contain the following tags: setup and tracks.
n@656 21
n@656 22 \section{Setup tag}
n@656 23
n@656 24 The setup tag specifies certain global test settings including: the interface type to use, the project return location and any other setup instructions.
n@677 25 Any general pre/post test questions must be specified in the relevant children tag. Any enabled metrics must also be specified in the metric child node.
n@656 26
n@656 27 \subsection{Attributes}
n@656 28 \begin{itemize}
n@656 29 \item \texttt{interface} - Mandatory, String. Defaults to APE, otherwise use to load any of the available interfaces. Currently only valid string is APE.
n@656 30 \item \texttt{projectReturn} - Mandatory, String. Specify the URL to return the test results. If null client will generate XML locally and prompt user to return the file.
n@656 31 \item \texttt{randomiseOrder} - Optional, default to false. Specify if the order of the tests can be randomised.
n@656 32 \item \texttt{collectMetrics} - Optional, Boolean. Default to false. Determine if the test metrics should be collected. These include how long each test session took etc. The full metrics list can be modified in the 'metrics' tag.
n@656 33 \end{itemize}
n@656 34
n@656 35 \subsection{Elements}
n@656 36 None
n@656 37
n@656 38 \section{AudioHolder tag}
n@656 39
n@656 40 There should be one audioHolder tag per test session, inside which each audioElement is specified as children. The audioHolder tag can help to generalise certain objects. Each audioHolder instance specifies a separate listening test to be paged, each with their own specific requirements.
n@656 41
n@656 42 \subsection{Attributes}
n@656 43 \begin{itemize}
n@656 44 \item \texttt{id} - Mandatory, String. Give an ID string or number to identify the test in the result.
n@656 45 \item \texttt{hostURL} - Optional, String. If all tracks are hosted from the same folder on a server, you can put in the lead here. For instance, if loading http://test.com/tracks/track1.wav and http://test.com/tracks/track2.wav, this could equal http://test.com/tracks/ and the url attribute in the track tag can be track1.wav or track2.wav. Equally http://test.com/ and then using tracks/track1.wav and tracks/track2.wav is valid.
n@656 46 \item \texttt{sampleRate} - Optional, Number. If your test requires a specific sample rate, this should be set to the desired sample rate in Hertz. This does not set the browser to the correct sample rate, but forces the browser to check the sample rate matches. If this is undefined, no sample rate matching will occur.
n@656 47 \item \texttt{randomiseOrder} - Optional, Boolean String. Defaults to false. Determine if the track order should be randomised. Must be true or false.
n@656 48 \item \texttt{repeatCount} - Optional, Number. Defaults to 0 (ie: no repeats). The number of times a test should be repeated.
n@681 49 \item \texttt{loop} - Optional, Boolean String. Defaults to false. Enable if audioElements should loop their playback or not.
n@656 50 \end{itemize}
n@656 51
n@656 52 \subsection{Elements}
n@656 53 Contain the audioElements tags and the interfaceSetup tag.
n@656 54
n@656 55 \section{audioElements tag}
n@656 56
n@656 57 This must reside as children in the audioHolder tag. There must be one audioElement tag per sound sample to load into the test.
n@656 58
n@656 59 \subsection{Attributes}
n@656 60 \begin{itemize}
n@677 61 \item \texttt{id} - Mandatory, String. Must give a string or number to identify each audio element. This id is used in the output to identify each track once randomised.
n@656 62 \item \texttt{url} - Mandatory, String. Contain the full URL to the track. If the Tracks tag hostURL is set, concatenate this tag with the hostURL attribute to obtain the full URL.
n@656 63 \end{itemize}
n@656 64
n@672 65 \section{interface tag}
n@656 66
n@672 67 This is contained within the audioHolder tag and outlines test instance specific requirements. These include the following children tags:
n@672 68 \begin{itemize}
n@677 69 \item 'title' - Contains the test title to be shown at the top of the page. Can only be one title node per interface.
n@677 70 \item 'scale' - Takes the attribute position to be a value between 0 and 100 indicating where on the scale to place the text contained inside. Can be multiple scale tags per interface.
n@672 71 \end{itemize}
n@656 72
n@656 73 \section {CommentQuestion tag}
n@656 74
n@677 75 This is a 1st level tag (same level as AudioHolder and setup). This allows another question and comment box to be presented on the page. The results of these are passed back in the results XML with both the comment and the question. The id attribute is set to keep track at the results XML.
n@656 76
n@656 77 \section {PreTest tag and PostTest tag}
n@656 78
n@656 79 These are 1st level tags. The PreTest tag allows for the specifying of pre test instructions and questions. These appear as a pop-up style window with next buttons and other automatic GUI. The postTest tag allows for specifying post test instructions, questions and resources. These appear as a pop-up style window after the submit button is pressed.
n@656 80
n@656 81 \subsection{Attributes}
n@656 82 None.
n@656 83
n@656 84 \subsection{Elements}
n@656 85 Takes the \texttt{statement} and \texttt{question} tags. The order these are presented in the XML define the order they appear on the screen.
n@656 86
n@656 87 \subsubsection{Statement}
n@656 88
n@656 89 The statement tag simply prints the included string verbatim on a 'pop-up' window with a next button.
n@656 90
n@656 91 \subsubsection{Question}
n@656 92
n@656 93 This allows for a question to be asked pre/post the test. This is added to the response XML in the same location as the other common/global questions. The response includes both the question asked and the response. This takes two attributes, id and mandatory. ID is a mandatory field. The same ID will be used in the results so it is important it is properly entered. Mandatory is optional. True means the field must be entered before continuing.
n@656 94
n@656 95 \subsubsection{Resource}
n@656 96
n@656 97 The resource tag is only available in the postTest tag. This allows for the linking to some external resource via the href attribute.
n@656 98
n@656 99 \section{Metric tag}
n@656 100 A 1st level tag, metrics must be declared in the setup tag. This takes a set of children 'metricEnable' to define which metrics to collect and present.
n@656 101
n@656 102 \subsection{metricEnable tag}
n@656 103 This takes a single attribute to determine which metric to enable for collection. Some of these are a global, per track or per test instance.
n@656 104 \begin{itemize}
n@656 105 \item testTimer - Return the global test timer and test instance timers. Measures the time between the first start and final submit.
n@656 106 \item elementTimer - Return the total time each audioElement in each test was listened too. Measures time between successive clicks on the track changer
n@656 107 \item elementTracker - Return the initial position of each track
n@656 108 \item elementTrackerFull - Return an enumerated pair of time and position. Track the entire movement of each element position. NOTE: Will override the elementTracker option above and throw an error into the browser console.
n@656 109 \item elementFlagListenedTo - Return a boolean per elementck to see if the element was listened to
n@656 110 \item elementFlagMoved - Return a boolean per element to see if the element slider was moved.
n@656 111 \item elementFlagComments - Return a boolean per element to see if the element has comments.
n@656 112 \end{itemize}
n@656 113
n@656 114 \section{Feature List}
n@656 115 \begin{itemize}
n@656 116 \item Paging listening tests - eg. Ask multiple questions in each experiment
n@656 117 \item Labels on X axis - scale
n@656 118 \item Input questions/comment at top to guide towards the question being asked.
n@656 119 \item Randomise track numbers -(inc. comment boxes and relate back to correct reference track)
n@656 120 \item Randomise order of individual tests
n@656 121 \item Save output XML file to remote server
n@656 122 \item Tests Metrics
n@656 123 \begin{itemize}
n@656 124 \item Duration of listening to each track
n@656 125 \item Time spent on each individual test
n@656 126 \item Start and end position of every track
n@656 127 \item Flags on each track, to ensure each track (but may not restrict users from submitting)
n@656 128 \begin{itemize}
n@656 129 \item Has been listened to
n@656 130 \item Has been moved
n@656 131 \item Has comments about it
n@656 132 \end{itemize}
n@656 133 \end{itemize}
n@656 134 \end{itemize}
n@656 135
n@656 136 \subsection{Advanced feature list}
n@656 137 \begin{itemize}
n@656 138 \item Repeat each tests number of times (2 or 3?) to remove learning / experience bias and ensure that the order is consistent
n@656 139 \item Perform Loudness equalisation on all tracks
n@656 140 \item Selection of test type
n@656 141 \item Pre-test of some basic hearing test
n@656 142 \begin{itemize}
n@656 143 \item MUSHRA (with vertical slider per track)
n@656 144 \item APE (Single horizontal slider)
n@656 145 \item AB Test
n@656 146 \end{itemize}
n@656 147 \end{itemize}
n@656 148
n@656 149
n@656 150
n@656 151 \section{Example}
n@656 152
n@656 153 Here is an example XML structure
n@656 154
n@656 155 \begin{lstlisting}
n@656 156 <?xml version="1.0" encoding="utf-8"?>
n@656 157 <BrowserEvalProjectDocument>
n@672 158 <setup interface="APE" projectReturn="null" randomiseOrder='true' collectMetrics='true'>
n@672 159 <PreTest>
n@672 160 <statement>Please listen to all mixes</statement>
n@672 161 <question id="location" mandatory="true">Please enter your listening location</question>
n@672 162 </PreTest>
n@672 163 <PostTest>
n@672 164 <statement>Thank you for taking this listening test.</statement>
n@672 165 <question id="SessionID">Please enter your name.</question>
n@672 166 </PostTest>
n@672 167 <Metric>
n@672 168 <metricEnable>testTimer</metricEnable>
n@672 169 <metricEnable>elementTimer</metricEnable>
n@672 170 <metricEnable>elementTracker</metricEnable>
n@672 171 <metricEnable>elementFlagListenedTo</metricEnable>
n@672 172 <metricEnable>elementFlagMoved</metricEnable>
n@672 173 </Metric>
n@672 174 </setup>
n@672 175 <audioHolder id='0' hostURL="example_eval/" sampleRate="44100" randomiseOrder='true' repeatCount='1'>
n@672 176 <interface>
n@672 177 <title>Example Test Question</title>
n@672 178 <scale position="0">Min</scale>
n@672 179 <scale position="100">Max</scale>
n@672 180 <scale position="50">Middle</scale>
n@672 181 <scale position="20">20</scale>
n@672 182 </interface>
n@672 183 <audioElements url="0.wav" id="0"/>
n@672 184 <audioElements url="1.wav" id="1"/>
n@672 185 <audioElements url="2.wav" id="2"/>
n@672 186 <audioElements url="3.wav" id="3"/>
n@672 187 <audioElements url="4.wav" id="4"/>
n@672 188 <audioElements url="5.wav" id="5"/>
n@672 189 <audioElements url="6.wav" id="6"/>
n@672 190 <audioElements url="7.wav" id="7"/>
n@672 191 <audioElements url="8.wav" id="8"/>
n@672 192 <audioElements url="9.wav" id="9"/>
n@672 193 <audioElements url="10.wav" id="10"/>
n@672 194 <CommentQuestion id='mixingExperiance'>What is your mixing experiance</CommentQuestion>
n@672 195 <PreTest>
n@672 196 <statement>Start the Test 3</statement>
n@672 197 </PreTest>
n@672 198 <PostTest>
n@672 199 <statement>Please take a break before the next test</statement>
n@672 200 <question id="testComment">How did you find the test</question>
n@672 201 </PostTest>
n@672 202 </audioHolder>
n@656 203 </BrowserEvalProjectDocument>
n@656 204 \end{lstlisting}
n@656 205
n@656 206
n@656 207
n@656 208 \end{document}