annotate cpack/dml/api/perspectives.pl @ 0:718306e29690 tip

commiting public release
author Daniel Wolff
date Tue, 09 Feb 2016 21:05:06 +0100
parents
children
rev   line source
Daniel@0 1 /* Part of DML (Digital Music Laboratory)
Daniel@0 2 Copyright 2014-2015 Samer Abdallah, University of London
Daniel@0 3
Daniel@0 4 This program is free software; you can redistribute it and/or
Daniel@0 5 modify it under the terms of the GNU General Public License
Daniel@0 6 as published by the Free Software Foundation; either version 2
Daniel@0 7 of the License, or (at your option) any later version.
Daniel@0 8
Daniel@0 9 This program is distributed in the hope that it will be useful,
Daniel@0 10 but WITHOUT ANY WARRANTY; without even the implied warranty of
Daniel@0 11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
Daniel@0 12 GNU General Public License for more details.
Daniel@0 13
Daniel@0 14 You should have received a copy of the GNU General Public
Daniel@0 15 License along with this library; if not, write to the Free Software
Daniel@0 16 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Daniel@0 17 */
Daniel@0 18
Daniel@0 19 :- module(perspectives, []).
Daniel@0 20
Daniel@0 21 /** <module> VIS API Perspectives
Daniel@0 22
Daniel@0 23 Todo
Daniel@0 24
Daniel@0 25 - Chord sequences
Daniel@0 26 - Standardise data structures
Daniel@0 27 */
Daniel@0 28 :- use_module(library(http/http_dispatch), [http_link_to_id/3]).
Daniel@0 29 :- use_module(library(semweb/rdf_db)).
Daniel@0 30 :- use_module(library(semweb/rdf_label)).
Daniel@0 31 :- use_module(library(dcg_core)).
Daniel@0 32 :- use_module(library(insist)).
Daniel@0 33 :- use_module(library(computations)).
Daniel@0 34 :- use_module(library(backend_json)).
Daniel@0 35 :- use_module(library(dataset)).
Daniel@0 36 :- use_module(library(memo)).
Daniel@0 37 :- use_module(library(async)).
Daniel@0 38 :- use_module(library(mlserver)).
Daniel@0 39 :- use_module(api(dmlvis)).
Daniel@0 40 :- use_module(api(archive)).
Daniel@0 41
Daniel@0 42 % :- setting(memoise_failures,boolean,false,"Whether or not to record failed computations to avoid retrying").
Daniel@0 43 :- setting(default_recompute_policy,oneof([none,failed,force]),none,'Default policy on recomputing memoised computations').
Daniel@0 44 :- setting(default_vamp_on_demand,boolean,false,'Default policy on doing VAMP computations on demand').
Daniel@0 45
Daniel@0 46 % registry of perspectives.
Daniel@0 47 dmlvis:perspective( getRecordingPerspective, Name,
Daniel@0 48 [+uri(URI),vamp_on_demand(V)-false|Params],
Daniel@0 49 cc(perspectives:rla(Pred,[vamp_on_demand(V)],URI))
Daniel@0 50 ) :- rec_persp(Name, Params, Pred).
Daniel@0 51
Daniel@0 52 dmlvis:perspective( getCollectionPerspective, Name,
Daniel@0 53 [+cid(CID),recompute(R)-none,vamp_on_demand(V)-false,coverage(C)-summary|Params],
Daniel@0 54 cc(perspectives:cla(Pred,[recompute(R),vamp_on_demand(V),coverage(C)],CID))
Daniel@0 55 ) :- coll_persp(Name, Params, Pred).
Daniel@0 56
Daniel@0 57 :- meta_predicate rla(2,+,+,-,-), cla(2,+,+,-,-).
Daniel@0 58
Daniel@0 59 rla(Pred,Opts,URI,Result,stable) :-
Daniel@0 60 option(vamp_on_demand(V), Opts, false),
Daniel@0 61 with_global(vamp_on_demand, V, call(Pred,URI,Result)).
Daniel@0 62
Daniel@0 63 cla(Pred,Opts,CID,Result,stable) :-
Daniel@0 64 check_collection(CID),
Daniel@0 65 call(Pred,Opts,CID,Result1),
Daniel@0 66 option(coverage(Cov),Opts,full),
Daniel@0 67 insist(filter_coverage(Cov,Result1,Result), invalid_coverage_parameter(Cov)).
Daniel@0 68
Daniel@0 69 :- op(1050,xfy,=>).
Daniel@0 70 G1 => G2 --> (call_dcg(G1) -> call_dcg(G2); []).
Daniel@0 71
Daniel@0 72 filter_coverage(full) --> [].
Daniel@0 73 filter_coverage(summary) --> dtrans(coverage,C1,C2) => {summarise_coverage(C1,C2)}.
Daniel@0 74 summarise_coverage --> foldl(replace_list_with_length,[failed,errors],[failed_count,errors_count]).
Daniel@0 75 replace_list_with_length(Key) --> dtrans(Key,List,Length) => {length(List,Length)}.
Daniel@0 76 replace_list_with_length(Key1,Key2) --> ddel(Key1,List) => {length(List,Len)}, dput(Key2,Len).
Daniel@0 77 dtrans(Key,Val1,Val2,D1,D2) :- get_dict(Key,D1,Val1,D2,Val2).
Daniel@0 78 ddel(Key,Val,D1,D2) :- del_dict(Key,D1,Val,D2).
Daniel@0 79 dput(Key,Val,D1,D2) :- put_dict(Key,D1,Val,D2).
Daniel@0 80 % dget(Key,Val,D,D) :- get_dict(Key,D,Val).
Daniel@0 81
Daniel@0 82 check_collection(CID) :-
Daniel@0 83 insist(dataset_size(CID,Size), unknown_collection(CID)),
Daniel@0 84 debug(dmlvis(perspective),'Doing collection level analysis on ~d items.',[Size]).
Daniel@0 85
Daniel@0 86 rec_persp( transcription, [], output_link(transcription(0))).
Daniel@0 87 rec_persp( transcription_fine, [], output_link(transcription(1))).
Daniel@0 88 rec_persp( chords, [], output_link(chords)).
Daniel@0 89 rec_persp( chord_notes, [], output_link(chord_notes)).
Daniel@0 90 rec_persp( beatroot, [], output_link(beats(beatroot))).
Daniel@0 91 rec_persp( key, [], output_link(key)).
Daniel@0 92 rec_persp( key_tonic, [], output_link(tonic)).
Daniel@0 93 rec_persp( beats, [], output_link(beats(qm))).
Daniel@0 94 rec_persp( tempo, [], output_link(tempo)).
Daniel@0 95 rec_persp( chromagram, [], output_link(chromagram)).
Daniel@0 96 rec_persp( mfcc, [], output_link(mfcc)).
Daniel@0 97
Daniel@0 98 rec_persp( spectrogram, [offset(O)-0,length(L)-60], spectrogram_link(O,L)).
Daniel@0 99 rec_persp( tempo_nonuniform, [], nonuniform_tempo).
Daniel@0 100 rec_persp( tempo_uniform, [period(DT)-1,lang(L)-ml ], uniform_tempo(L,DT)).
Daniel@0 101 rec_persp( tempo_normalised, [num_samples(N)-20,lang(L)-ml ], normalised_tempo(L,N)).
Daniel@0 102 rec_persp( chord_histogram, [], chord_histogram).
Daniel@0 103 rec_persp( midi_pitch_histogram, [weighting(W)-none], pitch_histogram(W)).
Daniel@0 104 rec_persp( pitch_histogram, [weighting(W)-none, quant(Q)-5, min(Min)-0, max(Max)-127, lang(L)-ml ],
Daniel@0 105 freq_histogram(L,Min,Max,Q,W)).
Daniel@0 106 rec_persp( tempo_histogram, [period(DT)-1, num_bins(N)-50, min(Min)-20, max(Max)-360, lang(L)-ml ],
Daniel@0 107 tempo_histogram(L,DT,Min,Max,N)).
Daniel@0 108
Daniel@0 109 %% coll_persp(P:perspective(A), Params:list(param), Pred:pred(+options,+dataset,-A)) is nondet.
Daniel@0 110 %
Daniel@0 111 % Database of collection perspectives. The first argument is an atom denoting a perspective
Daniel@0 112 % which returns results of type A. Params must be defined as in dmlvis:options_optspec/2.
Daniel@0 113 % Pred must accept a list of options and a collection (dataset) id and produce a result.
Daniel@0 114 coll_persp( mean_tempo_curve, [num_samples(N)-20,lang(L)-ml], mem(collection_tempo_curve(L,N))).
Daniel@0 115 coll_persp( midi_pitch_histogram, [weighting(W)-none], mem(collection_pitch_histogram(W))).
Daniel@0 116 coll_persp( pitch_histogram, [weighting(W)-none, quant(Q)-5, min(Min)-20, max(Max)-100, lang(L)-ml],
Daniel@0 117 mem(collection_freq_histogram(L,Min,Max,Q,W))).
Daniel@0 118 coll_persp( tempo_histogram, [period(DT)-1, num_bins(N)-50, min(Min)-20, max(Max)-100, lang(L)-ml],
Daniel@0 119 mem(collection_tempo_histogram(L,DT,Min,Max,N))).
Daniel@0 120 coll_persp( pitch_lookup, [+midi_pitch(P), weighting(W)-none, limit(Lim)-5000,offset(Off)-0],
Daniel@0 121 nomem(collection_pitch_lookup(W,P,Lim,Off))).
Daniel@0 122
Daniel@0 123 % using python back-end
Daniel@0 124 coll_persp( tonic_relative_pitch_class_histogram, [],
Daniel@0 125 mem(py_hist(transcription_tonic_duration, tonic_norm_semitone_hist:aggregate, [opts{normalisation:piece}]))).
Daniel@0 126 coll_persp( tonic_histogram, [], mem(py_hist(tagged(tonic), key_tonic_hist:aggregate, []))).
Daniel@0 127 coll_persp( pitch_class_histogram, [], mem(py_hist(tagged(transcription), semitone_hist:aggregate, []))).
Daniel@0 128 coll_persp( tuning_stats, [], mem(py_cla(tagged(transcription(1)),tuning_stats:per_file,[]))).
Daniel@0 129 coll_persp( tuning_stats_by_year, [], mem(py_cla(transcription_date,tuning_stats_byyear:per_file,[]))).
Daniel@0 130 coll_persp( places_hist, [], nomem(py_cla(list_places,places_hist:per_file,[]))).
Daniel@0 131 coll_persp( key_relative_chord_seq,
Daniel@0 132 [ spm_minlen(MinLen)-2, spm_maxseqs(MaxSeqs)-500, spm_algorithm(Alg)-'CM-SPADE',
Daniel@0 133 spm_ignore_n(Ignn)-1, spm_maxtime(Smaxt)-60, spm_minsupport(Smins)-50 ],
Daniel@0 134 mem(py_cla( keys_chords,chord_seq_key_relative:aggregate,
Daniel@0 135 [opts{ spm_minlen:MinLen, spm_maxseqs:MaxSeqs, spm_algorithm:Alg,
Daniel@0 136 spm_ignore_n:Ignn,spm_maxtime:Smaxt,spm_minsupport:Smins } ]))).
Daniel@0 137
Daniel@0 138 coll_persp( similarity,
Daniel@0 139 [ sim_downsample(SimDown)-1,sim_clusters(SimClusters)-40,sim_reclimit(Limo)-2000,
Daniel@0 140 sim_type(SimType)-'euclidean',sim_features(SimFeat)-'chromagram',
Daniel@0 141 sim_compressor(SimComp)-'zlib'],
Daniel@0 142 mem(py_cla(similarity_bundle,similarity:per_file,
Daniel@0 143 [opts{sim_type:SimType,sim_clusters:SimClusters,sim_downsample:SimDown,
Daniel@0 144 sim_reclimit:Limo,sim_features:SimFeat,sim_compressor:SimComp}]))).
Daniel@0 145
Daniel@0 146 % adaptor to ignore collection perspective options parameter
Daniel@0 147 nomem(Goal,Opts,CID,Result) :-
Daniel@0 148 option(vamp_on_demand(V), Opts, false),
Daniel@0 149 with_global(vamp_on_demand, V,
Daniel@0 150 with_progress_stack(call(Goal,CID,Result))).
Daniel@0 151
Daniel@0 152 dmlvis:param( recompute, [oneof([none,failed,force]), default(Def),
Daniel@0 153 description('Controls handling of memoised collection level results')]) :-
Daniel@0 154 setting(default_recompute_policy,Def).
Daniel@0 155 dmlvis:param( vamp_on_demand, [boolean, default(Def),
Daniel@0 156 description('Whether to run VAMP plugins if results are not already available')]) :-
Daniel@0 157 setting(default_vamp_on_demand,Def).
Daniel@0 158 dmlvis:param( coverage, [oneof([full,summary]), default(summary),
Daniel@0 159 description('How much detail to provide about recordings not successfully included in CLA')]).
Daniel@0 160 dmlvis:param( offset, [number, default(0), description('Offset into signal in seconds')]).
Daniel@0 161 dmlvis:param( length, [number, default(60), description('Length of signal extract in seconds')]).
Daniel@0 162 dmlvis:param( weighting, [oneof([none,dur,vel]), default(none), description('Weighting for pitch_histogram perspective')]).
Daniel@0 163 dmlvis:param( quant, [nonneg, default(5), description('Subdivisions of a semitone for freq_histogram')]).
Daniel@0 164 dmlvis:param( period, [number, default(1), description('Sampling period in seconds')]).
Daniel@0 165 dmlvis:param( num_bins, [nonneg, default(50), description('Number of bins for histogram')]).
Daniel@0 166 dmlvis:param( num_samples, [nonneg, default(50), description('Number of samples for normalised histogram')]).
Daniel@0 167 dmlvis:param( max, [nonneg, default(100), description('Max pitch for pitch histogram')]).
Daniel@0 168 dmlvis:param( min, [nonneg, default(20), description('Min pitch for pitch histogram')]).
Daniel@0 169 dmlvis:param( lang, [oneof([ml,r]), default(r), description('Numerical computations language')]).
Daniel@0 170
Daniel@0 171 /* chord sequence parameters */
Daniel@0 172 dmlvis:param( spm_minlen, [nonneg, default(2), description('Minimum length of chord sequence')]).
Daniel@0 173 dmlvis:param( spm_maxseqs, [nonneg, default(500), description('Maximum number of sequences to return')]).
Daniel@0 174 dmlvis:param( spm_algorithm, [atom, default('CM-SPADE'), description('CM-SPADE, TKS or ClaSP')]).
Daniel@0 175 dmlvis:param( spm_ignore_n, [nonneg, default(1), description('Ignore failed chord detections')]).
Daniel@0 176 dmlvis:param( spm_maxtime, [nonneg, default(60), description('Max. runtime for SPM algorithm')]).
Daniel@0 177 dmlvis:param( spm_minsupport, [nonneg, default(50), description('Minimal Support in Percent')]).
Daniel@0 178
Daniel@0 179
Daniel@0 180 /* similarity parameters */
Daniel@0 181 dmlvis:param( sim_type, [atom, default('euclidean'), description('Tpye of similarity measure: euclidean, compression')]).
Daniel@0 182 dmlvis:param( sim_clusters, [nonneg, default(40), description('Number of clusters for vector Quantisation (40-200)')]).
Daniel@0 183 dmlvis:param( sim_downsample, [nonneg, default(1), description('Downsample the audio analysis to a resolution of 1 second')]).
Daniel@0 184 dmlvis:param( sim_reclimit, [nonneg, default(2000), description('Maximum number of recordings in dataset')]).
Daniel@0 185 dmlvis:param( sim_features, [atom, default('chromagram'), description('Feature basis of the similarity estimation, any combination, separated by comma: chromagram,mfcc,chords')]).
Daniel@0 186 dmlvis:param( sim_compressor, [atom, default('zlib'), description('Compressor for similarity estimation: zlib, zxd')]).
Daniel@0 187
Daniel@0 188
Daniel@0 189 :- rdf_meta transform_computation(+,r,r).
Daniel@0 190 transform_computation(Class,In,Out) :-
Daniel@0 191 ( transform(Class,Fn), computation(Fn,In,Out) *-> true
Daniel@0 192 ; ( nb_current(vamp_on_demand,true)
Daniel@0 193 -> insist(transform(Class,Fn), unrecognised_transform_class(Class)), % picks first match
Daniel@0 194 format(string(Desc),"Running computation ~w on ~w.",[Fn,In]),
Daniel@0 195 simple_task(Desc,computation_memo(Fn,In,Out))
Daniel@0 196 ; throw(missing_computation(Class,In))
Daniel@0 197 )
Daniel@0 198 ).
Daniel@0 199
Daniel@0 200 :- rdf_meta transform_op(+,+,r,-).
Daniel@0 201 transform_op(TName,Op,In,Out) :-
Daniel@0 202 transform_computation(TName,In,X),
Daniel@0 203 csv_op(Op,X,Out).
Daniel@0 204
Daniel@0 205 % ------- recording level perspectives ------------
Daniel@0 206
Daniel@0 207 spectrogram_link(Offs,Len,URI,_{image_url:Link}) :-
Daniel@0 208 http_link_to_id(spectrogram_window, [uri(URI), offset(Offs), length(Len)], Link).
Daniel@0 209
Daniel@0 210 output_link(TransformName,Input,_{csv:Output}) :-
Daniel@0 211 transform_computation(TransformName,Input,Output).
Daniel@0 212
Daniel@0 213 chord_histogram(URI,_{values:Chords, counts:Counts}) :-
Daniel@0 214 transform_op(chords,chord_hist,URI,Hist),
Daniel@0 215 unzip(Hist,Chords,Counts).
Daniel@0 216
Daniel@0 217 pitch_histogram(W,URI,_{values:NNs, counts:Counts}) :-
Daniel@0 218 transform_op(transcription,pitch_hist(W),URI,Hist),
Daniel@0 219 unzip(Hist,NNs,Counts).
Daniel@0 220
Daniel@0 221 freq_histogram(ml,Min,Max,Q,W,URI,_{edges:Edges, counts:Counts}) :-
Daniel@0 222 microtone_map(Min,Max,Q,Map),
Daniel@0 223 transform_op(transcription(1),freq_hist(Map,W),URI,Counts),
Daniel@0 224 map_edges(ml,Map,Edges).
Daniel@0 225 freq_histogram(r,Min,Max,Q,W,URI,_{edges:Edges, counts:Counts}) :-
Daniel@0 226 microtone_map(Min,Max,Q,Map),
Daniel@0 227 transform_op(transcription(1),freq_hist_r(Map,W),URI,Counts),
Daniel@0 228 map_edges(r,Map,Edges).
Daniel@0 229
Daniel@0 230 nonuniform_tempo(URI,_{times:Times, values:Values}) :-
Daniel@0 231 transform_op(tempo,tempo,URI,Result),
Daniel@0 232 unzip(Result,Times,Values).
Daniel@0 233
Daniel@0 234 uniform_tempo(ml,DT,URI,_{times:Times, values:Values}) :-
Daniel@0 235 transform_op(tempo,uniform_tempo(DT),URI,Result),
Daniel@0 236 Result=Times-Values.
Daniel@0 237
Daniel@0 238 uniform_tempo(r,DT,URI,_{times:Times, values:Values}) :-
Daniel@0 239 transform_op(tempo,uniform_tempo_r(DT),URI,Result),
Daniel@0 240 Result=Times-Values.
Daniel@0 241
Daniel@0 242 normalised_tempo(ml,N,URI,_{times:Times, values:Values}) :-
Daniel@0 243 transform_op(tempo,normalised_tempo(N),URI,Result),
Daniel@0 244 Result=Times-Values.
Daniel@0 245
Daniel@0 246 normalised_tempo(r,N,URI,_{times:Times, values:Values}) :-
Daniel@0 247 transform_op(tempo,normalised_tempo_r(N),URI,Result),
Daniel@0 248 Result=Times-Values.
Daniel@0 249
Daniel@0 250 tempo_histogram(ml,DT,Min,Max,N,URI,_{edges:Edges, counts:Counts}) :-
Daniel@0 251 insist(Min>0, domain_error(min,"positive value",Min)),
Daniel@0 252 Map=expmap(Min,Max,N),
Daniel@0 253 map_edges(ml,Map,Edges),
Daniel@0 254 transform_op(tempo,tempo_hist(DT,Map),URI,Result),
Daniel@0 255 Result=_-Counts.
Daniel@0 256 tempo_histogram(r,DT,Min,Max,N,URI,_{edges:Edges, counts:Counts}) :-
Daniel@0 257 insist(Min>0, domain_error(min,"positive value",Min)),
Daniel@0 258 Map=expmap(Min,Max,N),
Daniel@0 259 map_edges(r,Map,Edges),
Daniel@0 260 transform_op(tempo,tempo_hist_r(DT,Map),URI,Result),
Daniel@0 261 Result=_-Counts.
Daniel@0 262
Daniel@0 263
Daniel@0 264 % ------- collection level perspectives ------------
Daniel@0 265
Daniel@0 266 collection_pitch_histogram(W,CID,Result) :-
Daniel@0 267 Min-Max = 20-100, % !!! FIXME
Daniel@0 268 numlist(Min,Max,NNs),
Daniel@0 269 dataset_histogram(CID, dense_pitch_hist(Min,Max,W), _{values:NNs}, Result).
Daniel@0 270
Daniel@0 271 collection_pitch_lookup(Weighting, Pitch, Lim, Offset, CID, Result) :-
Daniel@0 272 map_reduce_dataset(rec_pitch_hist(Weighting), pitch_lookup_cont(Pitch,Lim,Offset), CID, Result).
Daniel@0 273
Daniel@0 274 pitch_lookup_cont(Pitch,Lim,Offset,RecHists, _{items:Items}) :-
Daniel@0 275 findall( _{ uri: Rec, label:Label, count:Count, prob:Prob },
Daniel@0 276 offset(Offset, limit(Lim, order_by( [desc(Prob)],
Daniel@0 277 ( member(Rec-Hist,RecHists),
Daniel@0 278 rdf_display_label(Rec,Label),
Daniel@0 279 pitch_hist_prob(Hist,Pitch,Count,Prob)
Daniel@0 280 )))),
Daniel@0 281 Items).
Daniel@0 282
Daniel@0 283 % collection_pitch_lookup_alt(Weighting, Pitch, Lim, Offset, CID, _{ items:Items, coverage:Coverage}) :-
Daniel@0 284 % findall_map_coverage(dataset_item(CID), rec_transcription, RecTrans, Coverage),
Daniel@0 285 % findall( _{ uri: Rec, label:Label, count:Count, prob:Prob },
Daniel@0 286 % offset(Offset, limit(Lim, order_by( [desc(Prob)],
Daniel@0 287 % ( csv_pitch_count_prob(Weighting,Trans,Pitch,Count,Prob),
Daniel@0 288 % member(Rec-Trans,RecTrans),
Daniel@0 289 % rdf_display_label(Rec,Label)
Daniel@0 290 % )))),
Daniel@0 291 % Items).
Daniel@0 292 %
Daniel@0 293 % rec_transcription(Rec,Rec-Transcription) :- transform_computation(transcription,Rec,Transcription).
Daniel@0 294
Daniel@0 295 collection_freq_histogram(Lang,Min,Max,Q,W,CID,Result) :-
Daniel@0 296 Map=binmap(Min,Max,(Max-Min)*Q+1),
Daniel@0 297 map_edges(Lang,Map,Edges),
Daniel@0 298 dataset_histogram(CID, dense_freq_hist(Lang,Map,W),_{edges:Edges}, Result).
Daniel@0 299
Daniel@0 300 collection_tempo_histogram(Lang,DT,Min,Max,N,CID,Result) :-
Daniel@0 301 insist(Min>0, domain_error(min,"positive value",Min)),
Daniel@0 302 Map=expmap(Min,Max,N),
Daniel@0 303 map_edges(Lang,Map,Edges),
Daniel@0 304 dataset_histogram(CID, tempo_hist(Lang,DT,Map), _{edges:Edges}, Result).
Daniel@0 305
Daniel@0 306 collection_tempo_curve(Lang,N,CID, Result) :-
Daniel@0 307 map_reduce_dataset(tempo_curve(Lang,N), tempo_curves_stats(Lang), CID, Result).
Daniel@0 308
Daniel@0 309
Daniel@0 310 dataset_histogram(CID, Mapper, Dict, Result) :-
Daniel@0 311 dataset_map_fold_reduce(CID,Mapper,with_dl(fold_hist),finish_hist(Dict),nothing,Result).
Daniel@0 312
Daniel@0 313 fold_hist([], S, S) :- !.
Daniel@0 314 fold_hist(Xs, just(C1), just(C2)) :- !, insist(seqmap(maplist(add),Xs,C1,C2)).
Daniel@0 315 fold_hist([X|Xs], nothing, just(C)) :- insist(seqmap(maplist(add),Xs,X,C)).
Daniel@0 316
Daniel@0 317 finish_hist(Dict,just(Counts),Hist) :- put_dict(counts,Dict,Counts,Hist).
Daniel@0 318
Daniel@0 319 py_hist(Mapper, PyFunction, Args, CID, _{counts:H,values:D,coverage:C,py_coverage:PYC}) :-
Daniel@0 320 py_cla(Mapper,PyFunction,Args, CID, _{stats:_{counts:H,domain:D},coverage:C,py_coverage:PYC}).
Daniel@0 321
Daniel@0 322 py_cla(Mapper, PyFunction, Args, CID, Result) :-
Daniel@0 323 map_reduce_dataset(Mapper, py_cla_cont(PyFunction,Args), CID, Result).
Daniel@0 324
Daniel@0 325 py_cla_cont(PyFunction,Args, Ok, _{stats:Result, py_coverage:Coverage}) :-
Daniel@0 326 python_apply(PyFunction,[Ok|Args],Reply),
Daniel@0 327 Reply = _{result:Result, stats:Coverage}.
Daniel@0 328
Daniel@0 329
Daniel@0 330 % CLA mappers
Daniel@0 331 rec_pitch_hist(W,Rec,Rec-Hist) :- transform_op(transcription,pitch_hist(W),Rec,Hist).
Daniel@0 332
Daniel@0 333 dense_pitch_hist(Min,Max,W,Rec,DenseHist) :-
Daniel@0 334 transform_op(transcription,pitch_hist(W),Rec,SparseHist),
Daniel@0 335 sparse_to_dense(Min,Max,SparseHist,DenseHist).
Daniel@0 336
Daniel@0 337 dense_freq_hist(ml,Map,W,Rec,Counts) :-
Daniel@0 338 transform_op(transcription(1),freq_hist(Map,W),Rec,Counts).
Daniel@0 339 dense_freq_hist(r,Map,W,Rec,Counts) :-
Daniel@0 340 transform_op(transcription(1),freq_hist_r(Map,W),Rec,Counts).
Daniel@0 341
Daniel@0 342 tempo_hist(ml,DT,Map,Rec,Counts) :-
Daniel@0 343 transform_op(tempo,tempo_hist(DT,Map),Rec,Result),
Daniel@0 344 Result=_-Counts.
Daniel@0 345 tempo_hist(r,DT,Map,Rec,Counts) :-
Daniel@0 346 transform_op(tempo,tempo_hist_r(DT,Map),Rec,Result),
Daniel@0 347 Result=_-Counts.
Daniel@0 348
Daniel@0 349 tempo_curve(ml,N,Rec,Values) :-
Daniel@0 350 transform_op(tempo,normalised_tempo(N),Rec,Result),
Daniel@0 351 Result=_-Values.
Daniel@0 352 tempo_curve(r,N,Rec,Values) :-
Daniel@0 353 transform_op(tempo,normalised_tempo_r(N),Rec,Result),
Daniel@0 354 Result=_-Values.
Daniel@0 355
Daniel@0 356 transcription_tonic_duration(Rec, _{transcription: Transcription, tonic: Tonic, duration:0 }) :-
Daniel@0 357 tagged(transcription,Rec,Transcription),
Daniel@0 358 tagged(tonic,Rec,Tonic).
Daniel@0 359
Daniel@0 360 transcription_date(Rec, _{transcription: Transcription, date:Date}) :-
Daniel@0 361 tagged(transcription(1),Rec,Transcription),
Daniel@0 362 insist(recording_property(Rec,date,Date),missing_property(Rec,date)).
Daniel@0 363
Daniel@0 364 keys_chords(Rec, _{keys: Keys, chords:Chords}) :-
Daniel@0 365 tagged(key,Rec,Keys),
Daniel@0 366 tagged(chords,Rec,Chords).
Daniel@0 367
Daniel@0 368 similarity_bundle(Rec, _{chromagram: Chromagram, mfcc:Mfcc, keys: Keys, chords:Chords, list:_{uri:Rec, label:Label}}) :-
Daniel@0 369 % nb_getval(vamp_on_demand,Vamp),
Daniel@0 370 % concurrent_maplist(tagged_parallel(Vamp,Rec),[chromagram,mfcc,key,chords],[Chromagram,Mfcc,Keys,Chords]),
Daniel@0 371 maplist(tagged,[chromagram,mfcc,key,chords],[Rec,Rec,Rec,Rec],[Chromagram,Mfcc,Keys,Chords]),
Daniel@0 372 insist(recording_property(Rec,label,Label),missing_property(Rec,label)).
Daniel@0 373
Daniel@0 374 tagged_parallel(Vamp,Rec,Transform,Result) :-
Daniel@0 375 nb_setval(vamp_on_demand,Vamp),
Daniel@0 376 with_progress_stack(tagged(Transform,Rec,Result)).
Daniel@0 377
Daniel@0 378 list_places(Rec, _{place:Place,list:_{uri:Rec, label:Label}}) :-
Daniel@0 379 insist(recording_property(Rec,place,Place),missing_property(Rec,place)),
Daniel@0 380 insist(recording_property(Rec,label,Label),missing_property(Rec,label)).
Daniel@0 381
Daniel@0 382 % for later...
Daniel@0 383 % tagged_list(Spec,Rec,Dict) :-
Daniel@0 384 % maplist(tagged_item(Rec),Spec,Pairs),
Daniel@0 385 % dict_create(Dict,_,Pairs).
Daniel@0 386 % tagged_item(Rec,Key:Transform,Key:Value) :- tagged(Transform,Rec,Value).
Daniel@0 387
Daniel@0 388 tagged(Transform,Input,csv{value:Path}) :-
Daniel@0 389 transform_computation(Transform,Input,R), uri_absolute_path(R,Path).
Daniel@0 390
Daniel@0 391 % ---------------------------------------------------
Daniel@0 392
Daniel@0 393 :- initialization time(memo_attach(memo(perspectives),[])).
Daniel@0 394
Daniel@0 395 :- persistent_memo cla_memo(+spec:ground,+cid:atom,-result:any).
Daniel@0 396 cla_memo(Spec,CID,Result) :-
Daniel@0 397 debug(perspectives(cla),'cla_mem: ~q',[call(Spec,CID,Result)]),
Daniel@0 398 with_progress_stack(call(Spec,CID,Result)).
Daniel@0 399
Daniel@0 400 %% mem(+Spec:pred(+cid,-A),+Opts:options,+CID:cid,-Result:A) is det.
Daniel@0 401 %
Daniel@0 402 % Asynchronous memoised collection-level computation.
Daniel@0 403 % Spec must be a ground term that can be called with two arguments: the id of a
Daniel@0 404 % collection and a variable, which must be bound to an arbitrary result term on exit.
Daniel@0 405 % If the computation has already be done and memoised in cla_memo/3, then the result is
Daniel@0 406 % retrieved. Otherwise, the computation is started asynchronously and an exception
Daniel@0 407 % describing the state of the computation will be thrown.
Daniel@0 408 % ==
Daniel@0 409 % * dml_error(10, _{status:already_waiting, position:n})
Daniel@0 410 % the goal was previously added and is now waiting at position n in the queue.
Daniel@0 411 % * dml_error(11, _{status:already_running, progress:Progress})
Daniel@0 412 % the goal was previously added and is currently running, with some progress information.
Daniel@0 413 % * dml_error(12, _{status:initiate, position:N})
Daniel@0 414 % Means the goal has been added to the work queue of the thread pool at position N.
Daniel@0 415 % ==
Daniel@0 416 % Options are options passed to control interaction with async_memo.
Daniel@0 417
Daniel@0 418 mem(Spec,Opts,CID,Result) :-
Daniel@0 419 option(vamp_on_demand(V), Opts, false),
Daniel@0 420 async_memo(vis_cla,cla_memo(Spec,CID,Result),Status,
Daniel@0 421 [ progress_levels([elapsed,summary,partial_result]),
Daniel@0 422 globals([vamp_on_demand-V])|Opts ]),
Daniel@0 423 ( Status=done(_-ok) -> true
Daniel@0 424 ; status_response(Status,Code,Dict),
Daniel@0 425 ( Status=done(_) -> Dict1=Dict
Daniel@0 426 ; estimate_run_time(Spec,CID,ERT),
Daniel@0 427 put_dict(ert,Dict,ERT,Dict1)
Daniel@0 428 ),
Daniel@0 429 throw(dml_error(Code,Dict1))
Daniel@0 430 ).
Daniel@0 431
Daniel@0 432 % very crude estimate
Daniel@0 433 estimate_run_time(Spec,CID0,ERT) :-
Daniel@0 434 findall(Size-Dur, ( browse(cla_memo(Spec,CID,_),comp(_,_,Dur)-ok),
Daniel@0 435 dataset_size(CID,Size)), Pairs),
Daniel@0 436 length(Pairs,N),
Daniel@0 437 ( N=0 -> ERT is -1
Daniel@0 438 ; maplist(computations:pair,Sizes,Durs,Pairs),
Daniel@0 439 sumlist(Sizes,TotalSize),
Daniel@0 440 sumlist(Durs,TotalDur),
Daniel@0 441 dataset_size(CID0,Size),
Daniel@0 442 ERT is TotalDur*Size/TotalSize
Daniel@0 443 ).
Daniel@0 444
Daniel@0 445 status_response(spawned(ID,Pos), 12, Info) :-
Daniel@0 446 Info = _{status:initiated, id:ID, position:Pos }.
Daniel@0 447 status_response(waiting(ID,T,Pos), 10, Info) :-
Daniel@0 448 Info = _{status:already_waiting, id:ID, submit_time:TS, position:Pos },
Daniel@0 449 time_to_string(T,TS).
Daniel@0 450 status_response(running(ID,TStart,_,nothing), 11, Info) :-
Daniel@0 451 Info = _{ id:ID, status:already_running, start_time:TS },
Daniel@0 452 time_to_string(TStart,TS).
Daniel@0 453 status_response(running(ID,TStart,_,just(Time-[Progress,Partial])), 11, Info) :-
Daniel@0 454 maplist(progress_json,Progress,Progs),
Daniel@0 455 time_to_string(TStart,TS),
Daniel@0 456 Elapsed is Time-TStart,
Daniel@0 457 ( member(stepwise(_,Done/Total),Progress), Done>0
Daniel@0 458 -> ETA is Elapsed*(Total-Done)/Done
Daniel@0 459 ; ETA is -1
Daniel@0 460 ),
Daniel@0 461 Info1 = _{ id:ID
Daniel@0 462 , status:already_running
Daniel@0 463 , start_time:TS
Daniel@0 464 , elapsed_time:Elapsed
Daniel@0 465 , progress:Progs
Daniel@0 466 , eta:ETA
Daniel@0 467 },
Daniel@0 468 ( Partial=just(R)
Daniel@0 469 -> put_dict(partial_result,Info1,R,Info)
Daniel@0 470 ; Info=Info1
Daniel@0 471 ).
Daniel@0 472
Daniel@0 473 status_response(recomputing(ID,Pos,Meta), 13, Info) :-
Daniel@0 474 Info = _{status:recomputing, id:ID, position:Pos, meta:MD},
Daniel@0 475 meta_dict(Meta,MD).
Daniel@0 476 status_response(done(Meta), 14, _{status:failed, meta:MD}) :-
Daniel@0 477 meta_dict(Meta,MD).
Daniel@0 478
Daniel@0 479 meta_dict(comp(_,Time,Dur)-Result, _{ date: Date, duration:Dur, reason:Reason}) :-
Daniel@0 480 format_result(Result,Reason),
Daniel@0 481 time_to_string(Time,Date).
Daniel@0 482
Daniel@0 483 format_result(fail,'Unspecified failure').
Daniel@0 484 format_result(ex(Ex),Description) :- message_to_string(Ex,Description).
Daniel@0 485
Daniel@0 486 time_to_string(Time,String) :- format_time(string(String),'%FT%T%:z',Time).
Daniel@0 487
Daniel@0 488 progress_json(A,A) :- atomic(A), !.
Daniel@0 489 progress_json(stepwise(Desc,Done/Total), _{ task:Task, total:Total, done:Done }) :- !,
Daniel@0 490 progress_json(Desc,Task).
Daniel@0 491 progress_json(T,A) :- message_to_string(T,A).
Daniel@0 492
Daniel@0 493 prolog:message(map_fold(_Mapper,_Folder)) --> ['Map-fold'].
Daniel@0 494
Daniel@0 495 :- multifile thread_pool:create_pool/1.
Daniel@0 496 thread_pool:create_pool(vis_cla) :-
Daniel@0 497 current_prolog_flag(cpu_count,N),
Daniel@0 498 thread_pool_create(vis_cla, N, [backlog(20)]).
Daniel@0 499
Daniel@0 500 % ------------ computations with progress ------------------
Daniel@0 501
Daniel@0 502 map_reduce_dataset(Mapper,Reducer,CID,Result) :-
Daniel@0 503 dataset_map_fold_reduce(CID,Mapper,append_dl,with_dl(Reducer),H-H,Result).
Daniel@0 504
Daniel@0 505 append_dl(HH-TT,H-HH,H-TT).
Daniel@0 506 with_dl(P,H-[],A) :- call(P,H,A).
Daniel@0 507 with_dl(P,H-[],A,B) :- call(P,H,A,B).
Daniel@0 508
Daniel@0 509 dataset_map_fold_reduce(CID,Mapper,Folder,Reducer,S0,Result) :-
Daniel@0 510 dataset_items(CID,Items),
Daniel@0 511 with_cont( 'Map-fold-reduce',
Daniel@0 512 map_fold_with_progress( safe_call(Mapper),
Daniel@0 513 safe_fold(Folder),
Daniel@0 514 Items, s(0,F-F,E-E,S0)),
Daniel@0 515 reduce_cont(Reducer), Result).
Daniel@0 516
Daniel@0 517 reduce_cont(Reducer,s(NOk,Failed-[],Erroneous-[],S), R) :-
Daniel@0 518 ( NOk>0
Daniel@0 519 -> simple_task(reducing(Reducer),call(Reducer,S,R1)),
Daniel@0 520 put_coverage(NOk,Failed,Erroneous,R1,R)
Daniel@0 521 ; put_coverage(NOk,Failed,Erroneous,_{status:'no successfully mapped items'},D),
Daniel@0 522 throw(dml_error(20, D))
Daniel@0 523 ).
Daniel@0 524
Daniel@0 525 put_coverage(NOk,Failed,Erroneous,R1,R) :-
Daniel@0 526 put_dict(coverage,R1,_{ok_count:NOk, failed:Failed, errors:Erroneous},R).
Daniel@0 527
Daniel@0 528 safe_call(Mapper,X,Z) :-
Daniel@0 529 ( catch((call(Mapper,X,Y), Z=ok(X,Y)), Ex,
Daniel@0 530 ( Ex=abort(_) -> throw(Ex)
Daniel@0 531 ; Z=error(X,Ex))), !
Daniel@0 532 ; Z=fail(X)
Daniel@0 533 ).
Daniel@0 534
Daniel@0 535 safe_fold(Folder,Items,s(NOk1,FH-FT1,EH-ET1,S1),s(NOk2,FH-FT2,EH-ET2,S2)) :-
Daniel@0 536 seqmap(partition,Items,s(NOk1,OkH,FT1,ET1),s(NOk2,OkT,FT2,ET2)),
Daniel@0 537 call(Folder,OkH-OkT,S1,S2).
Daniel@0 538
Daniel@0 539 partition(ok(_,X),s(N,[X|O],F,E),s(M,O,F,E)) :- M is N+1.
Daniel@0 540 partition(fail(X),s(N,O,[X|F],E),s(N,O,F,E)).
Daniel@0 541 partition(error(X,Ex),s(N,O,F,[_{item:X, error:Msg}|E]),s(N,O,F,E)) :- message_to_string(Ex,Msg).
Daniel@0 542