Mercurial > hg > webaudioevaluationtool
changeset 2281:185232d01324
Change all instances of 'metricResult' to 'metricresult'
author | Brecht De Man <b.deman@qmul.ac.uk> |
---|---|
date | Wed, 20 Apr 2016 21:41:49 +0200 |
parents | 17b7ab4f93f2 |
children | ace330a89910 169f08dc9634 |
files | analysis/analyse.html docs/ResultsSpecificationDocument.tex js/core.js python/generate_report.py python/timeline_view.py python/timeline_view_movement.py |
diffstat | 6 files changed, 19 insertions(+), 19 deletions(-) [+] |
line wrap: on
line diff
--- a/analysis/analyse.html Wed Apr 20 21:19:54 2016 +0200 +++ b/analysis/analyse.html Wed Apr 20 21:41:49 2016 +0200 @@ -613,13 +613,13 @@ // subtract total audioholder length from subsequent audioholder event times audioholder_children = audioholderNodes[audioholderIndex].childNodes; foundIt = false; - console.log(audioholder_children[2].getElementsByTagName("metricResult")) // not working! + console.log(audioholder_children[2].getElementsByTagName("metricresult")) // not working! for (idx = 0; idx<audioholder_children.length; idx++) { // go over children - if (audioholder_children[idx].getElementsByTagName('metricResult').length) { - console.log(audioholder_children[idx].getElementsByTagName('metricResult')[0]); - if (audioholder_children[idx].getElementsByTagName('metricResult')[0].getAttribute('id') == "testTime"){ - audioholder_time = parseFloat(audioholder_children[idx].getElementsByTagName('metricResult')[0].textContent); + if (audioholder_children[idx].getElementsByTagName('metricresult').length) { + console.log(audioholder_children[idx].getElementsByTagName('metricresult')[0]); + if (audioholder_children[idx].getElementsByTagName('metricresult')[0].getAttribute('id') == "testTime"){ + audioholder_time = parseFloat(audioholder_children[idx].getElementsByTagName('metricresult')[0].textContent); console.log(audioholder_time); foundIt = true; }
--- a/docs/ResultsSpecificationDocument.tex Wed Apr 20 21:19:54 2016 +0200 +++ b/docs/ResultsSpecificationDocument.tex Wed Apr 20 21:41:49 2016 +0200 @@ -44,12 +44,12 @@ \end{itemize} \subsubsection{metrics} -One of these holders per audioElement, containing the results from any of the enabled per element metrics in metricResult tags. The ID of each element represents the metricEnable tag element. The inner value contains the results. +One of these holders per audioElement, containing the results from any of the enabled per element metrics in metricresult tags. The ID of each element represents the metricEnable tag element. The inner value contains the results. % Will list specific response structures per metric! \subsection{metrics} -One of these holders per testResults tag, containing the results from any of the enabled per test metrics in metricResult tags. The ID of each element represents the metricEnable tag element. The inner value contains the results. +One of these holders per testResults tag, containing the results from any of the enabled per test metrics in metricresult tags. The ID of each element represents the metricEnable tag element. The inner value contains the results. % Will list specific response structures per metric!
--- a/js/core.js Wed Apr 20 21:19:54 2016 +0200 +++ b/js/core.js Wed Apr 20 21:41:49 2016 +0200 @@ -1816,7 +1816,7 @@ storeDOM.push(mElementTimer); } if (audioEngineContext.metric.enableElementTracker) { - var elementTrackerFull = storage.document.createElement('metricResult'); + var elementTrackerFull = storage.document.createElement('metricresult'); elementTrackerFull.setAttribute('name','elementTrackerFull'); for (var k=0; k<this.movementTracker.length; k++) { @@ -1828,7 +1828,7 @@ storeDOM.push(elementTrackerFull); } if (audioEngineContext.metric.enableElementListenTracker) { - var elementListenTracker = storage.document.createElement('metricResult'); + var elementListenTracker = storage.document.createElement('metricresult'); elementListenTracker.setAttribute('name','elementListenTracker'); for (var k=0; k<this.listenTracker.length; k++) { elementListenTracker.appendChild(this.listenTracker[k]); @@ -1836,25 +1836,25 @@ storeDOM.push(elementListenTracker); } if (audioEngineContext.metric.enableElementInitialPosition) { - var elementInitial = storage.document.createElement('metricResult'); + var elementInitial = storage.document.createElement('metricresult'); elementInitial.setAttribute('name','elementInitialPosition'); elementInitial.textContent = this.initialPosition; storeDOM.push(elementInitial); } if (audioEngineContext.metric.enableFlagListenedTo) { - var flagListenedTo = storage.document.createElement('metricResult'); + var flagListenedTo = storage.document.createElement('metricresult'); flagListenedTo.setAttribute('name','elementFlagListenedTo'); flagListenedTo.textContent = this.wasListenedTo; storeDOM.push(flagListenedTo); } if (audioEngineContext.metric.enableFlagMoved) { - var flagMoved = storage.document.createElement('metricResult'); + var flagMoved = storage.document.createElement('metricresult'); flagMoved.setAttribute('name','elementFlagMoved'); flagMoved.textContent = this.wasMoved; storeDOM.push(flagMoved); } if (audioEngineContext.metric.enableFlagComments) { - var flagComments = storage.document.createElement('metricResult'); + var flagComments = storage.document.createElement('metricresult'); flagComments.setAttribute('name','elementFlagComments'); if (this.parent.commentDOM == null) {flag.textContent = 'false';}
--- a/python/generate_report.py Wed Apr 20 21:19:54 2016 +0200 +++ b/python/generate_report.py Wed Apr 20 21:41:49 2016 +0200 @@ -179,8 +179,8 @@ # number of comments (interesting if comments not mandatory) for audioelement in audioelements: response = audioelement.find("./comment/response") - was_played = audioelement.find("./metric/metricResult/[@name='elementFlagListenedTo']") - was_moved = audioelement.find("./metric/metricResult/[@name='elementFlagMoved']") + was_played = audioelement.find("./metric/metricresult/[@name='elementFlagListenedTo']") + was_moved = audioelement.find("./metric/metricresult/[@name='elementFlagMoved']") if response is not None and response.text is not None and len(response.text) > 1: number_of_comments += 1 else:
--- a/python/timeline_view.py Wed Apr 20 21:19:54 2016 +0200 +++ b/python/timeline_view.py Wed Apr 20 21:41:49 2016 +0200 @@ -102,7 +102,7 @@ audioelements_names.append(audio_id) # for this audioelement, loop over all listen events - listen_events = audioelement.findall("./metric/metricResult/[@name='elementListenTracker']/event") + listen_events = audioelement.findall("./metric/metricresult/[@name='elementListenTracker']/event") for event in listen_events: # mark this plot as not empty plot_empty = False
--- a/python/timeline_view_movement.py Wed Apr 20 21:19:54 2016 +0200 +++ b/python/timeline_view_movement.py Wed Apr 20 21:41:49 2016 +0200 @@ -109,20 +109,20 @@ audio_id = str(audioelement.get('ref')) # break if no initial position or move events registered - initial_position_temp = audioelement.find("./metric/metricResult/[@name='elementInitialPosition']") + initial_position_temp = audioelement.find("./metric/metricresult/[@name='elementInitialPosition']") if initial_position_temp is None: print("Skipping "+page_name+" from "+subject_id+": does not have initial positions specified.") break # get move events, initial and eventual position initial_position = float(initial_position_temp.text) - move_events = audioelement.findall("./metric/metricResult/[@name='elementTrackerFull']/movement") + move_events = audioelement.findall("./metric/metricresult/[@name='elementTrackerFull']/movement") final_position = float(audioelement.find("./value").text) # get listen events start_times_global = [] stop_times_global = [] - listen_events = audioelement.findall("./metric/metricResult/[@name='elementListenTracker']/event") + listen_events = audioelement.findall("./metric/metricresult/[@name='elementListenTracker']/event") for event in listen_events: # get testtime: start and stop start_times_global.append(float(event.find('testtime').get('start'))-time_offset)