Mercurial > hg > dml-open-cliopatria
comparison cpack/dml/api/perspectives.pl @ 0:718306e29690 tip
commiting public release
author | Daniel Wolff |
---|---|
date | Tue, 09 Feb 2016 21:05:06 +0100 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:718306e29690 |
---|---|
1 /* Part of DML (Digital Music Laboratory) | |
2 Copyright 2014-2015 Samer Abdallah, University of London | |
3 | |
4 This program is free software; you can redistribute it and/or | |
5 modify it under the terms of the GNU General Public License | |
6 as published by the Free Software Foundation; either version 2 | |
7 of the License, or (at your option) any later version. | |
8 | |
9 This program is distributed in the hope that it will be useful, | |
10 but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 GNU General Public License for more details. | |
13 | |
14 You should have received a copy of the GNU General Public | |
15 License along with this library; if not, write to the Free Software | |
16 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 */ | |
18 | |
19 :- module(perspectives, []). | |
20 | |
21 /** <module> VIS API Perspectives | |
22 | |
23 Todo | |
24 | |
25 - Chord sequences | |
26 - Standardise data structures | |
27 */ | |
28 :- use_module(library(http/http_dispatch), [http_link_to_id/3]). | |
29 :- use_module(library(semweb/rdf_db)). | |
30 :- use_module(library(semweb/rdf_label)). | |
31 :- use_module(library(dcg_core)). | |
32 :- use_module(library(insist)). | |
33 :- use_module(library(computations)). | |
34 :- use_module(library(backend_json)). | |
35 :- use_module(library(dataset)). | |
36 :- use_module(library(memo)). | |
37 :- use_module(library(async)). | |
38 :- use_module(library(mlserver)). | |
39 :- use_module(api(dmlvis)). | |
40 :- use_module(api(archive)). | |
41 | |
42 % :- setting(memoise_failures,boolean,false,"Whether or not to record failed computations to avoid retrying"). | |
43 :- setting(default_recompute_policy,oneof([none,failed,force]),none,'Default policy on recomputing memoised computations'). | |
44 :- setting(default_vamp_on_demand,boolean,false,'Default policy on doing VAMP computations on demand'). | |
45 | |
46 % registry of perspectives. | |
47 dmlvis:perspective( getRecordingPerspective, Name, | |
48 [+uri(URI),vamp_on_demand(V)-false|Params], | |
49 cc(perspectives:rla(Pred,[vamp_on_demand(V)],URI)) | |
50 ) :- rec_persp(Name, Params, Pred). | |
51 | |
52 dmlvis:perspective( getCollectionPerspective, Name, | |
53 [+cid(CID),recompute(R)-none,vamp_on_demand(V)-false,coverage(C)-summary|Params], | |
54 cc(perspectives:cla(Pred,[recompute(R),vamp_on_demand(V),coverage(C)],CID)) | |
55 ) :- coll_persp(Name, Params, Pred). | |
56 | |
57 :- meta_predicate rla(2,+,+,-,-), cla(2,+,+,-,-). | |
58 | |
59 rla(Pred,Opts,URI,Result,stable) :- | |
60 option(vamp_on_demand(V), Opts, false), | |
61 with_global(vamp_on_demand, V, call(Pred,URI,Result)). | |
62 | |
63 cla(Pred,Opts,CID,Result,stable) :- | |
64 check_collection(CID), | |
65 call(Pred,Opts,CID,Result1), | |
66 option(coverage(Cov),Opts,full), | |
67 insist(filter_coverage(Cov,Result1,Result), invalid_coverage_parameter(Cov)). | |
68 | |
69 :- op(1050,xfy,=>). | |
70 G1 => G2 --> (call_dcg(G1) -> call_dcg(G2); []). | |
71 | |
72 filter_coverage(full) --> []. | |
73 filter_coverage(summary) --> dtrans(coverage,C1,C2) => {summarise_coverage(C1,C2)}. | |
74 summarise_coverage --> foldl(replace_list_with_length,[failed,errors],[failed_count,errors_count]). | |
75 replace_list_with_length(Key) --> dtrans(Key,List,Length) => {length(List,Length)}. | |
76 replace_list_with_length(Key1,Key2) --> ddel(Key1,List) => {length(List,Len)}, dput(Key2,Len). | |
77 dtrans(Key,Val1,Val2,D1,D2) :- get_dict(Key,D1,Val1,D2,Val2). | |
78 ddel(Key,Val,D1,D2) :- del_dict(Key,D1,Val,D2). | |
79 dput(Key,Val,D1,D2) :- put_dict(Key,D1,Val,D2). | |
80 % dget(Key,Val,D,D) :- get_dict(Key,D,Val). | |
81 | |
82 check_collection(CID) :- | |
83 insist(dataset_size(CID,Size), unknown_collection(CID)), | |
84 debug(dmlvis(perspective),'Doing collection level analysis on ~d items.',[Size]). | |
85 | |
86 rec_persp( transcription, [], output_link(transcription(0))). | |
87 rec_persp( transcription_fine, [], output_link(transcription(1))). | |
88 rec_persp( chords, [], output_link(chords)). | |
89 rec_persp( chord_notes, [], output_link(chord_notes)). | |
90 rec_persp( beatroot, [], output_link(beats(beatroot))). | |
91 rec_persp( key, [], output_link(key)). | |
92 rec_persp( key_tonic, [], output_link(tonic)). | |
93 rec_persp( beats, [], output_link(beats(qm))). | |
94 rec_persp( tempo, [], output_link(tempo)). | |
95 rec_persp( chromagram, [], output_link(chromagram)). | |
96 rec_persp( mfcc, [], output_link(mfcc)). | |
97 | |
98 rec_persp( spectrogram, [offset(O)-0,length(L)-60], spectrogram_link(O,L)). | |
99 rec_persp( tempo_nonuniform, [], nonuniform_tempo). | |
100 rec_persp( tempo_uniform, [period(DT)-1,lang(L)-ml ], uniform_tempo(L,DT)). | |
101 rec_persp( tempo_normalised, [num_samples(N)-20,lang(L)-ml ], normalised_tempo(L,N)). | |
102 rec_persp( chord_histogram, [], chord_histogram). | |
103 rec_persp( midi_pitch_histogram, [weighting(W)-none], pitch_histogram(W)). | |
104 rec_persp( pitch_histogram, [weighting(W)-none, quant(Q)-5, min(Min)-0, max(Max)-127, lang(L)-ml ], | |
105 freq_histogram(L,Min,Max,Q,W)). | |
106 rec_persp( tempo_histogram, [period(DT)-1, num_bins(N)-50, min(Min)-20, max(Max)-360, lang(L)-ml ], | |
107 tempo_histogram(L,DT,Min,Max,N)). | |
108 | |
109 %% coll_persp(P:perspective(A), Params:list(param), Pred:pred(+options,+dataset,-A)) is nondet. | |
110 % | |
111 % Database of collection perspectives. The first argument is an atom denoting a perspective | |
112 % which returns results of type A. Params must be defined as in dmlvis:options_optspec/2. | |
113 % Pred must accept a list of options and a collection (dataset) id and produce a result. | |
114 coll_persp( mean_tempo_curve, [num_samples(N)-20,lang(L)-ml], mem(collection_tempo_curve(L,N))). | |
115 coll_persp( midi_pitch_histogram, [weighting(W)-none], mem(collection_pitch_histogram(W))). | |
116 coll_persp( pitch_histogram, [weighting(W)-none, quant(Q)-5, min(Min)-20, max(Max)-100, lang(L)-ml], | |
117 mem(collection_freq_histogram(L,Min,Max,Q,W))). | |
118 coll_persp( tempo_histogram, [period(DT)-1, num_bins(N)-50, min(Min)-20, max(Max)-100, lang(L)-ml], | |
119 mem(collection_tempo_histogram(L,DT,Min,Max,N))). | |
120 coll_persp( pitch_lookup, [+midi_pitch(P), weighting(W)-none, limit(Lim)-5000,offset(Off)-0], | |
121 nomem(collection_pitch_lookup(W,P,Lim,Off))). | |
122 | |
123 % using python back-end | |
124 coll_persp( tonic_relative_pitch_class_histogram, [], | |
125 mem(py_hist(transcription_tonic_duration, tonic_norm_semitone_hist:aggregate, [opts{normalisation:piece}]))). | |
126 coll_persp( tonic_histogram, [], mem(py_hist(tagged(tonic), key_tonic_hist:aggregate, []))). | |
127 coll_persp( pitch_class_histogram, [], mem(py_hist(tagged(transcription), semitone_hist:aggregate, []))). | |
128 coll_persp( tuning_stats, [], mem(py_cla(tagged(transcription(1)),tuning_stats:per_file,[]))). | |
129 coll_persp( tuning_stats_by_year, [], mem(py_cla(transcription_date,tuning_stats_byyear:per_file,[]))). | |
130 coll_persp( places_hist, [], nomem(py_cla(list_places,places_hist:per_file,[]))). | |
131 coll_persp( key_relative_chord_seq, | |
132 [ spm_minlen(MinLen)-2, spm_maxseqs(MaxSeqs)-500, spm_algorithm(Alg)-'CM-SPADE', | |
133 spm_ignore_n(Ignn)-1, spm_maxtime(Smaxt)-60, spm_minsupport(Smins)-50 ], | |
134 mem(py_cla( keys_chords,chord_seq_key_relative:aggregate, | |
135 [opts{ spm_minlen:MinLen, spm_maxseqs:MaxSeqs, spm_algorithm:Alg, | |
136 spm_ignore_n:Ignn,spm_maxtime:Smaxt,spm_minsupport:Smins } ]))). | |
137 | |
138 coll_persp( similarity, | |
139 [ sim_downsample(SimDown)-1,sim_clusters(SimClusters)-40,sim_reclimit(Limo)-2000, | |
140 sim_type(SimType)-'euclidean',sim_features(SimFeat)-'chromagram', | |
141 sim_compressor(SimComp)-'zlib'], | |
142 mem(py_cla(similarity_bundle,similarity:per_file, | |
143 [opts{sim_type:SimType,sim_clusters:SimClusters,sim_downsample:SimDown, | |
144 sim_reclimit:Limo,sim_features:SimFeat,sim_compressor:SimComp}]))). | |
145 | |
146 % adaptor to ignore collection perspective options parameter | |
147 nomem(Goal,Opts,CID,Result) :- | |
148 option(vamp_on_demand(V), Opts, false), | |
149 with_global(vamp_on_demand, V, | |
150 with_progress_stack(call(Goal,CID,Result))). | |
151 | |
152 dmlvis:param( recompute, [oneof([none,failed,force]), default(Def), | |
153 description('Controls handling of memoised collection level results')]) :- | |
154 setting(default_recompute_policy,Def). | |
155 dmlvis:param( vamp_on_demand, [boolean, default(Def), | |
156 description('Whether to run VAMP plugins if results are not already available')]) :- | |
157 setting(default_vamp_on_demand,Def). | |
158 dmlvis:param( coverage, [oneof([full,summary]), default(summary), | |
159 description('How much detail to provide about recordings not successfully included in CLA')]). | |
160 dmlvis:param( offset, [number, default(0), description('Offset into signal in seconds')]). | |
161 dmlvis:param( length, [number, default(60), description('Length of signal extract in seconds')]). | |
162 dmlvis:param( weighting, [oneof([none,dur,vel]), default(none), description('Weighting for pitch_histogram perspective')]). | |
163 dmlvis:param( quant, [nonneg, default(5), description('Subdivisions of a semitone for freq_histogram')]). | |
164 dmlvis:param( period, [number, default(1), description('Sampling period in seconds')]). | |
165 dmlvis:param( num_bins, [nonneg, default(50), description('Number of bins for histogram')]). | |
166 dmlvis:param( num_samples, [nonneg, default(50), description('Number of samples for normalised histogram')]). | |
167 dmlvis:param( max, [nonneg, default(100), description('Max pitch for pitch histogram')]). | |
168 dmlvis:param( min, [nonneg, default(20), description('Min pitch for pitch histogram')]). | |
169 dmlvis:param( lang, [oneof([ml,r]), default(r), description('Numerical computations language')]). | |
170 | |
171 /* chord sequence parameters */ | |
172 dmlvis:param( spm_minlen, [nonneg, default(2), description('Minimum length of chord sequence')]). | |
173 dmlvis:param( spm_maxseqs, [nonneg, default(500), description('Maximum number of sequences to return')]). | |
174 dmlvis:param( spm_algorithm, [atom, default('CM-SPADE'), description('CM-SPADE, TKS or ClaSP')]). | |
175 dmlvis:param( spm_ignore_n, [nonneg, default(1), description('Ignore failed chord detections')]). | |
176 dmlvis:param( spm_maxtime, [nonneg, default(60), description('Max. runtime for SPM algorithm')]). | |
177 dmlvis:param( spm_minsupport, [nonneg, default(50), description('Minimal Support in Percent')]). | |
178 | |
179 | |
180 /* similarity parameters */ | |
181 dmlvis:param( sim_type, [atom, default('euclidean'), description('Tpye of similarity measure: euclidean, compression')]). | |
182 dmlvis:param( sim_clusters, [nonneg, default(40), description('Number of clusters for vector Quantisation (40-200)')]). | |
183 dmlvis:param( sim_downsample, [nonneg, default(1), description('Downsample the audio analysis to a resolution of 1 second')]). | |
184 dmlvis:param( sim_reclimit, [nonneg, default(2000), description('Maximum number of recordings in dataset')]). | |
185 dmlvis:param( sim_features, [atom, default('chromagram'), description('Feature basis of the similarity estimation, any combination, separated by comma: chromagram,mfcc,chords')]). | |
186 dmlvis:param( sim_compressor, [atom, default('zlib'), description('Compressor for similarity estimation: zlib, zxd')]). | |
187 | |
188 | |
189 :- rdf_meta transform_computation(+,r,r). | |
190 transform_computation(Class,In,Out) :- | |
191 ( transform(Class,Fn), computation(Fn,In,Out) *-> true | |
192 ; ( nb_current(vamp_on_demand,true) | |
193 -> insist(transform(Class,Fn), unrecognised_transform_class(Class)), % picks first match | |
194 format(string(Desc),"Running computation ~w on ~w.",[Fn,In]), | |
195 simple_task(Desc,computation_memo(Fn,In,Out)) | |
196 ; throw(missing_computation(Class,In)) | |
197 ) | |
198 ). | |
199 | |
200 :- rdf_meta transform_op(+,+,r,-). | |
201 transform_op(TName,Op,In,Out) :- | |
202 transform_computation(TName,In,X), | |
203 csv_op(Op,X,Out). | |
204 | |
205 % ------- recording level perspectives ------------ | |
206 | |
207 spectrogram_link(Offs,Len,URI,_{image_url:Link}) :- | |
208 http_link_to_id(spectrogram_window, [uri(URI), offset(Offs), length(Len)], Link). | |
209 | |
210 output_link(TransformName,Input,_{csv:Output}) :- | |
211 transform_computation(TransformName,Input,Output). | |
212 | |
213 chord_histogram(URI,_{values:Chords, counts:Counts}) :- | |
214 transform_op(chords,chord_hist,URI,Hist), | |
215 unzip(Hist,Chords,Counts). | |
216 | |
217 pitch_histogram(W,URI,_{values:NNs, counts:Counts}) :- | |
218 transform_op(transcription,pitch_hist(W),URI,Hist), | |
219 unzip(Hist,NNs,Counts). | |
220 | |
221 freq_histogram(ml,Min,Max,Q,W,URI,_{edges:Edges, counts:Counts}) :- | |
222 microtone_map(Min,Max,Q,Map), | |
223 transform_op(transcription(1),freq_hist(Map,W),URI,Counts), | |
224 map_edges(ml,Map,Edges). | |
225 freq_histogram(r,Min,Max,Q,W,URI,_{edges:Edges, counts:Counts}) :- | |
226 microtone_map(Min,Max,Q,Map), | |
227 transform_op(transcription(1),freq_hist_r(Map,W),URI,Counts), | |
228 map_edges(r,Map,Edges). | |
229 | |
230 nonuniform_tempo(URI,_{times:Times, values:Values}) :- | |
231 transform_op(tempo,tempo,URI,Result), | |
232 unzip(Result,Times,Values). | |
233 | |
234 uniform_tempo(ml,DT,URI,_{times:Times, values:Values}) :- | |
235 transform_op(tempo,uniform_tempo(DT),URI,Result), | |
236 Result=Times-Values. | |
237 | |
238 uniform_tempo(r,DT,URI,_{times:Times, values:Values}) :- | |
239 transform_op(tempo,uniform_tempo_r(DT),URI,Result), | |
240 Result=Times-Values. | |
241 | |
242 normalised_tempo(ml,N,URI,_{times:Times, values:Values}) :- | |
243 transform_op(tempo,normalised_tempo(N),URI,Result), | |
244 Result=Times-Values. | |
245 | |
246 normalised_tempo(r,N,URI,_{times:Times, values:Values}) :- | |
247 transform_op(tempo,normalised_tempo_r(N),URI,Result), | |
248 Result=Times-Values. | |
249 | |
250 tempo_histogram(ml,DT,Min,Max,N,URI,_{edges:Edges, counts:Counts}) :- | |
251 insist(Min>0, domain_error(min,"positive value",Min)), | |
252 Map=expmap(Min,Max,N), | |
253 map_edges(ml,Map,Edges), | |
254 transform_op(tempo,tempo_hist(DT,Map),URI,Result), | |
255 Result=_-Counts. | |
256 tempo_histogram(r,DT,Min,Max,N,URI,_{edges:Edges, counts:Counts}) :- | |
257 insist(Min>0, domain_error(min,"positive value",Min)), | |
258 Map=expmap(Min,Max,N), | |
259 map_edges(r,Map,Edges), | |
260 transform_op(tempo,tempo_hist_r(DT,Map),URI,Result), | |
261 Result=_-Counts. | |
262 | |
263 | |
264 % ------- collection level perspectives ------------ | |
265 | |
266 collection_pitch_histogram(W,CID,Result) :- | |
267 Min-Max = 20-100, % !!! FIXME | |
268 numlist(Min,Max,NNs), | |
269 dataset_histogram(CID, dense_pitch_hist(Min,Max,W), _{values:NNs}, Result). | |
270 | |
271 collection_pitch_lookup(Weighting, Pitch, Lim, Offset, CID, Result) :- | |
272 map_reduce_dataset(rec_pitch_hist(Weighting), pitch_lookup_cont(Pitch,Lim,Offset), CID, Result). | |
273 | |
274 pitch_lookup_cont(Pitch,Lim,Offset,RecHists, _{items:Items}) :- | |
275 findall( _{ uri: Rec, label:Label, count:Count, prob:Prob }, | |
276 offset(Offset, limit(Lim, order_by( [desc(Prob)], | |
277 ( member(Rec-Hist,RecHists), | |
278 rdf_display_label(Rec,Label), | |
279 pitch_hist_prob(Hist,Pitch,Count,Prob) | |
280 )))), | |
281 Items). | |
282 | |
283 % collection_pitch_lookup_alt(Weighting, Pitch, Lim, Offset, CID, _{ items:Items, coverage:Coverage}) :- | |
284 % findall_map_coverage(dataset_item(CID), rec_transcription, RecTrans, Coverage), | |
285 % findall( _{ uri: Rec, label:Label, count:Count, prob:Prob }, | |
286 % offset(Offset, limit(Lim, order_by( [desc(Prob)], | |
287 % ( csv_pitch_count_prob(Weighting,Trans,Pitch,Count,Prob), | |
288 % member(Rec-Trans,RecTrans), | |
289 % rdf_display_label(Rec,Label) | |
290 % )))), | |
291 % Items). | |
292 % | |
293 % rec_transcription(Rec,Rec-Transcription) :- transform_computation(transcription,Rec,Transcription). | |
294 | |
295 collection_freq_histogram(Lang,Min,Max,Q,W,CID,Result) :- | |
296 Map=binmap(Min,Max,(Max-Min)*Q+1), | |
297 map_edges(Lang,Map,Edges), | |
298 dataset_histogram(CID, dense_freq_hist(Lang,Map,W),_{edges:Edges}, Result). | |
299 | |
300 collection_tempo_histogram(Lang,DT,Min,Max,N,CID,Result) :- | |
301 insist(Min>0, domain_error(min,"positive value",Min)), | |
302 Map=expmap(Min,Max,N), | |
303 map_edges(Lang,Map,Edges), | |
304 dataset_histogram(CID, tempo_hist(Lang,DT,Map), _{edges:Edges}, Result). | |
305 | |
306 collection_tempo_curve(Lang,N,CID, Result) :- | |
307 map_reduce_dataset(tempo_curve(Lang,N), tempo_curves_stats(Lang), CID, Result). | |
308 | |
309 | |
310 dataset_histogram(CID, Mapper, Dict, Result) :- | |
311 dataset_map_fold_reduce(CID,Mapper,with_dl(fold_hist),finish_hist(Dict),nothing,Result). | |
312 | |
313 fold_hist([], S, S) :- !. | |
314 fold_hist(Xs, just(C1), just(C2)) :- !, insist(seqmap(maplist(add),Xs,C1,C2)). | |
315 fold_hist([X|Xs], nothing, just(C)) :- insist(seqmap(maplist(add),Xs,X,C)). | |
316 | |
317 finish_hist(Dict,just(Counts),Hist) :- put_dict(counts,Dict,Counts,Hist). | |
318 | |
319 py_hist(Mapper, PyFunction, Args, CID, _{counts:H,values:D,coverage:C,py_coverage:PYC}) :- | |
320 py_cla(Mapper,PyFunction,Args, CID, _{stats:_{counts:H,domain:D},coverage:C,py_coverage:PYC}). | |
321 | |
322 py_cla(Mapper, PyFunction, Args, CID, Result) :- | |
323 map_reduce_dataset(Mapper, py_cla_cont(PyFunction,Args), CID, Result). | |
324 | |
325 py_cla_cont(PyFunction,Args, Ok, _{stats:Result, py_coverage:Coverage}) :- | |
326 python_apply(PyFunction,[Ok|Args],Reply), | |
327 Reply = _{result:Result, stats:Coverage}. | |
328 | |
329 | |
330 % CLA mappers | |
331 rec_pitch_hist(W,Rec,Rec-Hist) :- transform_op(transcription,pitch_hist(W),Rec,Hist). | |
332 | |
333 dense_pitch_hist(Min,Max,W,Rec,DenseHist) :- | |
334 transform_op(transcription,pitch_hist(W),Rec,SparseHist), | |
335 sparse_to_dense(Min,Max,SparseHist,DenseHist). | |
336 | |
337 dense_freq_hist(ml,Map,W,Rec,Counts) :- | |
338 transform_op(transcription(1),freq_hist(Map,W),Rec,Counts). | |
339 dense_freq_hist(r,Map,W,Rec,Counts) :- | |
340 transform_op(transcription(1),freq_hist_r(Map,W),Rec,Counts). | |
341 | |
342 tempo_hist(ml,DT,Map,Rec,Counts) :- | |
343 transform_op(tempo,tempo_hist(DT,Map),Rec,Result), | |
344 Result=_-Counts. | |
345 tempo_hist(r,DT,Map,Rec,Counts) :- | |
346 transform_op(tempo,tempo_hist_r(DT,Map),Rec,Result), | |
347 Result=_-Counts. | |
348 | |
349 tempo_curve(ml,N,Rec,Values) :- | |
350 transform_op(tempo,normalised_tempo(N),Rec,Result), | |
351 Result=_-Values. | |
352 tempo_curve(r,N,Rec,Values) :- | |
353 transform_op(tempo,normalised_tempo_r(N),Rec,Result), | |
354 Result=_-Values. | |
355 | |
356 transcription_tonic_duration(Rec, _{transcription: Transcription, tonic: Tonic, duration:0 }) :- | |
357 tagged(transcription,Rec,Transcription), | |
358 tagged(tonic,Rec,Tonic). | |
359 | |
360 transcription_date(Rec, _{transcription: Transcription, date:Date}) :- | |
361 tagged(transcription(1),Rec,Transcription), | |
362 insist(recording_property(Rec,date,Date),missing_property(Rec,date)). | |
363 | |
364 keys_chords(Rec, _{keys: Keys, chords:Chords}) :- | |
365 tagged(key,Rec,Keys), | |
366 tagged(chords,Rec,Chords). | |
367 | |
368 similarity_bundle(Rec, _{chromagram: Chromagram, mfcc:Mfcc, keys: Keys, chords:Chords, list:_{uri:Rec, label:Label}}) :- | |
369 % nb_getval(vamp_on_demand,Vamp), | |
370 % concurrent_maplist(tagged_parallel(Vamp,Rec),[chromagram,mfcc,key,chords],[Chromagram,Mfcc,Keys,Chords]), | |
371 maplist(tagged,[chromagram,mfcc,key,chords],[Rec,Rec,Rec,Rec],[Chromagram,Mfcc,Keys,Chords]), | |
372 insist(recording_property(Rec,label,Label),missing_property(Rec,label)). | |
373 | |
374 tagged_parallel(Vamp,Rec,Transform,Result) :- | |
375 nb_setval(vamp_on_demand,Vamp), | |
376 with_progress_stack(tagged(Transform,Rec,Result)). | |
377 | |
378 list_places(Rec, _{place:Place,list:_{uri:Rec, label:Label}}) :- | |
379 insist(recording_property(Rec,place,Place),missing_property(Rec,place)), | |
380 insist(recording_property(Rec,label,Label),missing_property(Rec,label)). | |
381 | |
382 % for later... | |
383 % tagged_list(Spec,Rec,Dict) :- | |
384 % maplist(tagged_item(Rec),Spec,Pairs), | |
385 % dict_create(Dict,_,Pairs). | |
386 % tagged_item(Rec,Key:Transform,Key:Value) :- tagged(Transform,Rec,Value). | |
387 | |
388 tagged(Transform,Input,csv{value:Path}) :- | |
389 transform_computation(Transform,Input,R), uri_absolute_path(R,Path). | |
390 | |
391 % --------------------------------------------------- | |
392 | |
393 :- initialization time(memo_attach(memo(perspectives),[])). | |
394 | |
395 :- persistent_memo cla_memo(+spec:ground,+cid:atom,-result:any). | |
396 cla_memo(Spec,CID,Result) :- | |
397 debug(perspectives(cla),'cla_mem: ~q',[call(Spec,CID,Result)]), | |
398 with_progress_stack(call(Spec,CID,Result)). | |
399 | |
400 %% mem(+Spec:pred(+cid,-A),+Opts:options,+CID:cid,-Result:A) is det. | |
401 % | |
402 % Asynchronous memoised collection-level computation. | |
403 % Spec must be a ground term that can be called with two arguments: the id of a | |
404 % collection and a variable, which must be bound to an arbitrary result term on exit. | |
405 % If the computation has already be done and memoised in cla_memo/3, then the result is | |
406 % retrieved. Otherwise, the computation is started asynchronously and an exception | |
407 % describing the state of the computation will be thrown. | |
408 % == | |
409 % * dml_error(10, _{status:already_waiting, position:n}) | |
410 % the goal was previously added and is now waiting at position n in the queue. | |
411 % * dml_error(11, _{status:already_running, progress:Progress}) | |
412 % the goal was previously added and is currently running, with some progress information. | |
413 % * dml_error(12, _{status:initiate, position:N}) | |
414 % Means the goal has been added to the work queue of the thread pool at position N. | |
415 % == | |
416 % Options are options passed to control interaction with async_memo. | |
417 | |
418 mem(Spec,Opts,CID,Result) :- | |
419 option(vamp_on_demand(V), Opts, false), | |
420 async_memo(vis_cla,cla_memo(Spec,CID,Result),Status, | |
421 [ progress_levels([elapsed,summary,partial_result]), | |
422 globals([vamp_on_demand-V])|Opts ]), | |
423 ( Status=done(_-ok) -> true | |
424 ; status_response(Status,Code,Dict), | |
425 ( Status=done(_) -> Dict1=Dict | |
426 ; estimate_run_time(Spec,CID,ERT), | |
427 put_dict(ert,Dict,ERT,Dict1) | |
428 ), | |
429 throw(dml_error(Code,Dict1)) | |
430 ). | |
431 | |
432 % very crude estimate | |
433 estimate_run_time(Spec,CID0,ERT) :- | |
434 findall(Size-Dur, ( browse(cla_memo(Spec,CID,_),comp(_,_,Dur)-ok), | |
435 dataset_size(CID,Size)), Pairs), | |
436 length(Pairs,N), | |
437 ( N=0 -> ERT is -1 | |
438 ; maplist(computations:pair,Sizes,Durs,Pairs), | |
439 sumlist(Sizes,TotalSize), | |
440 sumlist(Durs,TotalDur), | |
441 dataset_size(CID0,Size), | |
442 ERT is TotalDur*Size/TotalSize | |
443 ). | |
444 | |
445 status_response(spawned(ID,Pos), 12, Info) :- | |
446 Info = _{status:initiated, id:ID, position:Pos }. | |
447 status_response(waiting(ID,T,Pos), 10, Info) :- | |
448 Info = _{status:already_waiting, id:ID, submit_time:TS, position:Pos }, | |
449 time_to_string(T,TS). | |
450 status_response(running(ID,TStart,_,nothing), 11, Info) :- | |
451 Info = _{ id:ID, status:already_running, start_time:TS }, | |
452 time_to_string(TStart,TS). | |
453 status_response(running(ID,TStart,_,just(Time-[Progress,Partial])), 11, Info) :- | |
454 maplist(progress_json,Progress,Progs), | |
455 time_to_string(TStart,TS), | |
456 Elapsed is Time-TStart, | |
457 ( member(stepwise(_,Done/Total),Progress), Done>0 | |
458 -> ETA is Elapsed*(Total-Done)/Done | |
459 ; ETA is -1 | |
460 ), | |
461 Info1 = _{ id:ID | |
462 , status:already_running | |
463 , start_time:TS | |
464 , elapsed_time:Elapsed | |
465 , progress:Progs | |
466 , eta:ETA | |
467 }, | |
468 ( Partial=just(R) | |
469 -> put_dict(partial_result,Info1,R,Info) | |
470 ; Info=Info1 | |
471 ). | |
472 | |
473 status_response(recomputing(ID,Pos,Meta), 13, Info) :- | |
474 Info = _{status:recomputing, id:ID, position:Pos, meta:MD}, | |
475 meta_dict(Meta,MD). | |
476 status_response(done(Meta), 14, _{status:failed, meta:MD}) :- | |
477 meta_dict(Meta,MD). | |
478 | |
479 meta_dict(comp(_,Time,Dur)-Result, _{ date: Date, duration:Dur, reason:Reason}) :- | |
480 format_result(Result,Reason), | |
481 time_to_string(Time,Date). | |
482 | |
483 format_result(fail,'Unspecified failure'). | |
484 format_result(ex(Ex),Description) :- message_to_string(Ex,Description). | |
485 | |
486 time_to_string(Time,String) :- format_time(string(String),'%FT%T%:z',Time). | |
487 | |
488 progress_json(A,A) :- atomic(A), !. | |
489 progress_json(stepwise(Desc,Done/Total), _{ task:Task, total:Total, done:Done }) :- !, | |
490 progress_json(Desc,Task). | |
491 progress_json(T,A) :- message_to_string(T,A). | |
492 | |
493 prolog:message(map_fold(_Mapper,_Folder)) --> ['Map-fold']. | |
494 | |
495 :- multifile thread_pool:create_pool/1. | |
496 thread_pool:create_pool(vis_cla) :- | |
497 current_prolog_flag(cpu_count,N), | |
498 thread_pool_create(vis_cla, N, [backlog(20)]). | |
499 | |
500 % ------------ computations with progress ------------------ | |
501 | |
502 map_reduce_dataset(Mapper,Reducer,CID,Result) :- | |
503 dataset_map_fold_reduce(CID,Mapper,append_dl,with_dl(Reducer),H-H,Result). | |
504 | |
505 append_dl(HH-TT,H-HH,H-TT). | |
506 with_dl(P,H-[],A) :- call(P,H,A). | |
507 with_dl(P,H-[],A,B) :- call(P,H,A,B). | |
508 | |
509 dataset_map_fold_reduce(CID,Mapper,Folder,Reducer,S0,Result) :- | |
510 dataset_items(CID,Items), | |
511 with_cont( 'Map-fold-reduce', | |
512 map_fold_with_progress( safe_call(Mapper), | |
513 safe_fold(Folder), | |
514 Items, s(0,F-F,E-E,S0)), | |
515 reduce_cont(Reducer), Result). | |
516 | |
517 reduce_cont(Reducer,s(NOk,Failed-[],Erroneous-[],S), R) :- | |
518 ( NOk>0 | |
519 -> simple_task(reducing(Reducer),call(Reducer,S,R1)), | |
520 put_coverage(NOk,Failed,Erroneous,R1,R) | |
521 ; put_coverage(NOk,Failed,Erroneous,_{status:'no successfully mapped items'},D), | |
522 throw(dml_error(20, D)) | |
523 ). | |
524 | |
525 put_coverage(NOk,Failed,Erroneous,R1,R) :- | |
526 put_dict(coverage,R1,_{ok_count:NOk, failed:Failed, errors:Erroneous},R). | |
527 | |
528 safe_call(Mapper,X,Z) :- | |
529 ( catch((call(Mapper,X,Y), Z=ok(X,Y)), Ex, | |
530 ( Ex=abort(_) -> throw(Ex) | |
531 ; Z=error(X,Ex))), ! | |
532 ; Z=fail(X) | |
533 ). | |
534 | |
535 safe_fold(Folder,Items,s(NOk1,FH-FT1,EH-ET1,S1),s(NOk2,FH-FT2,EH-ET2,S2)) :- | |
536 seqmap(partition,Items,s(NOk1,OkH,FT1,ET1),s(NOk2,OkT,FT2,ET2)), | |
537 call(Folder,OkH-OkT,S1,S2). | |
538 | |
539 partition(ok(_,X),s(N,[X|O],F,E),s(M,O,F,E)) :- M is N+1. | |
540 partition(fail(X),s(N,O,[X|F],E),s(N,O,F,E)). | |
541 partition(error(X,Ex),s(N,O,F,[_{item:X, error:Msg}|E]),s(N,O,F,E)) :- message_to_string(Ex,Msg). | |
542 |