dev@10
|
1 import {
|
dev@236
|
2 Component,
|
dev@236
|
3 OnInit,
|
dev@236
|
4 ViewChild,
|
dev@236
|
5 ElementRef,
|
dev@236
|
6 Input,
|
dev@236
|
7 AfterViewInit,
|
dev@236
|
8 NgZone,
|
dev@236
|
9 OnDestroy,
|
dev@236
|
10 ChangeDetectorRef
|
dev@10
|
11 } from '@angular/core';
|
dev@196
|
12 import {
|
dev@196
|
13 AudioPlayerService, AudioResource,
|
dev@196
|
14 AudioResourceError
|
dev@236
|
15 } from '../services/audio-player/audio-player.service';
|
dev@289
|
16 import wavesUI from 'waves-ui-piper';
|
dev@63
|
17 import {
|
dev@64
|
18 FeatureExtractionService
|
dev@236
|
19 } from '../services/feature-extraction/feature-extraction.service';
|
dev@236
|
20 import {Subscription} from 'rxjs/Subscription';
|
dev@63
|
21 import {
|
dev@63
|
22 FeatureCollection,
|
cannam@296
|
23 SimpleResponse,
|
cannam@299
|
24 VectorFeature,
|
cannam@299
|
25 MatrixFeature,
|
cannam@299
|
26 TracksFeature
|
dev@236
|
27 } from 'piper/HigherLevelUtilities';
|
dev@319
|
28 import {toSeconds, OutputDescriptor} from 'piper';
|
dev@236
|
29 import {FeatureList, Feature} from 'piper/Feature';
|
dev@81
|
30 import * as Hammer from 'hammerjs';
|
dev@236
|
31 import {WavesSpectrogramLayer} from '../spectrogram/Spectrogram';
|
dev@8
|
32
|
dev@54
|
33 type Layer = any;
|
dev@54
|
34 type Track = any;
|
dev@59
|
35 type Colour = string;
|
dev@6
|
36
|
dev@268
|
37
|
dev@268
|
38
|
dev@268
|
39 function* createColourGenerator(colours) {
|
cannam@257
|
40 let index = 0;
|
dev@268
|
41 const nColours = colours.length;
|
cannam@257
|
42 while (true) {
|
dev@268
|
43 yield colours[index = ++index % nColours];
|
cannam@257
|
44 }
|
dev@268
|
45 }
|
dev@268
|
46
|
dev@268
|
47 const defaultColourGenerator = createColourGenerator([
|
dev@268
|
48 '#0868ac', // "sapphire blue", our waveform / header colour
|
dev@268
|
49 '#c33c54', // "brick red"
|
dev@268
|
50 '#17bebb', // "tiffany blue"
|
dev@268
|
51 '#001021', // "rich black"
|
dev@268
|
52 '#fa8334', // "mango tango"
|
dev@268
|
53 '#034748' // "deep jungle green"
|
dev@268
|
54 ]);
|
cannam@257
|
55
|
dev@319
|
56 type HigherLevelFeatureShape = 'regions' | 'instants' | 'notes';
|
dev@319
|
57 type NoteLikeUnit = 'midi' | 'hz' ;
|
dev@319
|
58 interface Note {
|
dev@319
|
59 time: number;
|
dev@319
|
60 duration: number;
|
dev@319
|
61 pitch: number;
|
dev@319
|
62 velocity?: number;
|
dev@319
|
63 }
|
dev@319
|
64
|
dev@6
|
65 @Component({
|
dev@236
|
66 selector: 'ugly-waveform',
|
dev@6
|
67 templateUrl: './waveform.component.html',
|
dev@6
|
68 styleUrls: ['./waveform.component.css']
|
dev@6
|
69 })
|
cannam@257
|
70
|
dev@51
|
71 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
|
dev@20
|
72
|
dev@8
|
73 @ViewChild('track') trackDiv: ElementRef;
|
dev@285
|
74 @Input() set width(width: number) {
|
dev@285
|
75 if (this.timeline) {
|
dev@285
|
76 requestAnimationFrame(() => {
|
dev@285
|
77 this.timeline.timeContext.visibleWidth = width;
|
dev@285
|
78 this.timeline.tracks.update();
|
dev@285
|
79 });
|
dev@285
|
80 }
|
dev@285
|
81 }
|
dev@189
|
82 @Input() timeline: Timeline;
|
dev@189
|
83 @Input() trackIdPrefix: string;
|
dev@196
|
84 @Input() set isSubscribedToExtractionService(isSubscribed: boolean) {
|
dev@196
|
85 if (isSubscribed) {
|
dev@196
|
86 if (this.featureExtractionSubscription) {
|
dev@196
|
87 return;
|
dev@196
|
88 }
|
dev@268
|
89
|
dev@196
|
90 this.featureExtractionSubscription =
|
dev@196
|
91 this.piperService.featuresExtracted$.subscribe(
|
dev@196
|
92 features => {
|
dev@268
|
93 this.renderFeatures(features, defaultColourGenerator.next().value);
|
dev@196
|
94 });
|
dev@196
|
95 } else {
|
dev@196
|
96 if (this.featureExtractionSubscription) {
|
dev@196
|
97 this.featureExtractionSubscription.unsubscribe();
|
dev@196
|
98 }
|
dev@196
|
99 }
|
dev@196
|
100 }
|
dev@196
|
101 @Input() set isSubscribedToAudioService(isSubscribed: boolean) {
|
dev@196
|
102 this._isSubscribedToAudioService = isSubscribed;
|
dev@196
|
103 if (isSubscribed) {
|
dev@196
|
104 if (this.onAudioDataSubscription) {
|
dev@196
|
105 return;
|
dev@196
|
106 }
|
dev@196
|
107
|
dev@196
|
108 this.onAudioDataSubscription =
|
dev@196
|
109 this.audioService.audioLoaded$.subscribe(res => {
|
dev@196
|
110 const wasError = (res as AudioResourceError).message != null;
|
dev@196
|
111
|
dev@196
|
112 if (wasError) {
|
dev@196
|
113 console.warn('No audio, display error?');
|
dev@196
|
114 } else {
|
dev@196
|
115 this.audioBuffer = (res as AudioResource).samples;
|
dev@196
|
116 }
|
dev@196
|
117 });
|
dev@196
|
118 } else {
|
dev@196
|
119 if (this.onAudioDataSubscription) {
|
dev@196
|
120 this.onAudioDataSubscription.unsubscribe();
|
dev@196
|
121 }
|
dev@196
|
122 }
|
dev@196
|
123 }
|
dev@196
|
124
|
dev@196
|
125 get isSubscribedToAudioService(): boolean {
|
dev@196
|
126 return this._isSubscribedToAudioService;
|
dev@196
|
127 }
|
dev@196
|
128
|
dev@196
|
129 @Input() set isOneShotExtractor(isOneShot: boolean) {
|
dev@196
|
130 this._isOneShotExtractor = isOneShot;
|
dev@196
|
131 }
|
dev@196
|
132
|
dev@196
|
133 get isOneShotExtractor(): boolean {
|
dev@196
|
134 return this._isOneShotExtractor;
|
dev@196
|
135 }
|
dev@196
|
136
|
dev@196
|
137 @Input() set isSeeking(isSeeking: boolean) {
|
dev@196
|
138 this._isSeeking = isSeeking;
|
dev@196
|
139 if (isSeeking) {
|
dev@196
|
140 if (this.seekedSubscription) {
|
dev@196
|
141 return;
|
dev@196
|
142 }
|
dev@236
|
143 if (this.playingStateSubscription) {
|
dev@196
|
144 return;
|
dev@196
|
145 }
|
dev@196
|
146
|
dev@196
|
147 this.seekedSubscription = this.audioService.seeked$.subscribe(() => {
|
dev@337
|
148 if (!this.audioService.isPlaying()) {
|
dev@196
|
149 this.animate();
|
dev@236
|
150 }
|
dev@196
|
151 });
|
dev@196
|
152 this.playingStateSubscription =
|
dev@196
|
153 this.audioService.playingStateChange$.subscribe(
|
dev@196
|
154 isPlaying => {
|
dev@337
|
155 if (isPlaying) {
|
dev@196
|
156 this.animate();
|
dev@236
|
157 }
|
dev@196
|
158 });
|
dev@196
|
159 } else {
|
dev@196
|
160 if (this.playingStateSubscription) {
|
dev@196
|
161 this.playingStateSubscription.unsubscribe();
|
dev@196
|
162 }
|
dev@196
|
163 if (this.seekedSubscription) {
|
dev@196
|
164 this.seekedSubscription.unsubscribe();
|
dev@196
|
165 }
|
dev@196
|
166 }
|
dev@196
|
167 }
|
dev@196
|
168
|
dev@196
|
169 get isSeeking(): boolean {
|
dev@196
|
170 return this._isSeeking;
|
dev@196
|
171 }
|
dev@196
|
172
|
dev@16
|
173 set audioBuffer(buffer: AudioBuffer) {
|
dev@16
|
174 this._audioBuffer = buffer || undefined;
|
cannam@117
|
175 if (this.audioBuffer) {
|
dev@20
|
176 this.renderWaveform(this.audioBuffer);
|
dev@180
|
177 // this.renderSpectrogram(this.audioBuffer);
|
cannam@117
|
178 }
|
dev@16
|
179 }
|
dev@16
|
180
|
dev@16
|
181 get audioBuffer(): AudioBuffer {
|
dev@16
|
182 return this._audioBuffer;
|
dev@16
|
183 }
|
dev@16
|
184
|
dev@196
|
185 private _audioBuffer: AudioBuffer;
|
dev@196
|
186 private _isSubscribedToAudioService: boolean;
|
dev@196
|
187 private _isOneShotExtractor: boolean;
|
dev@196
|
188 private _isSeeking: boolean;
|
dev@196
|
189 private cursorLayer: any;
|
cannam@254
|
190 private highlightLayer: any;
|
dev@196
|
191 private layers: Layer[];
|
dev@51
|
192 private featureExtractionSubscription: Subscription;
|
dev@53
|
193 private playingStateSubscription: Subscription;
|
dev@53
|
194 private seekedSubscription: Subscription;
|
dev@196
|
195 private onAudioDataSubscription: Subscription;
|
dev@155
|
196 private zoomOnMouseDown: number;
|
dev@157
|
197 private offsetOnMouseDown: number;
|
dev@196
|
198 private hasShot: boolean;
|
dev@196
|
199 private isLoading: boolean;
|
dev@51
|
200
|
dev@236
|
201 private static changeColour(layer: Layer, colour: string): void {
|
dev@236
|
202 const butcherShapes = (shape) => {
|
dev@236
|
203 shape.install({color: () => colour});
|
dev@236
|
204 shape.params.color = colour;
|
dev@236
|
205 shape.update(layer._renderingContext, layer.data);
|
dev@236
|
206 };
|
dev@236
|
207
|
dev@236
|
208 layer._$itemCommonShapeMap.forEach(butcherShapes);
|
dev@236
|
209 layer._$itemShapeMap.forEach(butcherShapes);
|
dev@236
|
210 layer.render();
|
dev@236
|
211 layer.update();
|
dev@236
|
212 }
|
dev@236
|
213
|
dev@31
|
214 constructor(private audioService: AudioPlayerService,
|
dev@51
|
215 private piperService: FeatureExtractionService,
|
dev@234
|
216 private ngZone: NgZone,
|
dev@234
|
217 private ref: ChangeDetectorRef) {
|
dev@196
|
218 this.isSubscribedToAudioService = true;
|
dev@196
|
219 this.isSeeking = true;
|
dev@185
|
220 this.layers = [];
|
dev@196
|
221 this.audioBuffer = undefined;
|
dev@54
|
222 this.timeline = undefined;
|
dev@54
|
223 this.cursorLayer = undefined;
|
cannam@254
|
224 this.highlightLayer = undefined;
|
dev@196
|
225 this.isLoading = true;
|
dev@51
|
226 }
|
dev@51
|
227
|
dev@53
|
228 ngOnInit() {
|
dev@53
|
229 }
|
dev@10
|
230
|
dev@10
|
231 ngAfterViewInit(): void {
|
dev@236
|
232 this.trackIdPrefix = this.trackIdPrefix || 'default';
|
dev@196
|
233 if (this.timeline) {
|
dev@196
|
234 this.renderTimeline(null, true, true);
|
dev@196
|
235 } else {
|
dev@196
|
236 this.renderTimeline();
|
dev@196
|
237 }
|
dev@20
|
238 }
|
dev@20
|
239
|
dev@196
|
240 renderTimeline(duration: number = 1.0,
|
dev@196
|
241 useExistingDuration: boolean = false,
|
dev@196
|
242 isInitialRender: boolean = false): Timeline {
|
dev@18
|
243 const track: HTMLElement = this.trackDiv.nativeElement;
|
dev@236
|
244 track.innerHTML = '';
|
dev@18
|
245 const height: number = track.getBoundingClientRect().height;
|
dev@18
|
246 const width: number = track.getBoundingClientRect().width;
|
dev@18
|
247 const pixelsPerSecond = width / duration;
|
dev@196
|
248 const hasExistingTimeline = this.timeline instanceof wavesUI.core.Timeline;
|
dev@196
|
249
|
dev@196
|
250 if (hasExistingTimeline) {
|
dev@196
|
251 if (!useExistingDuration) {
|
dev@196
|
252 this.timeline.pixelsPerSecond = pixelsPerSecond;
|
dev@196
|
253 this.timeline.visibleWidth = width;
|
dev@196
|
254 }
|
dev@180
|
255 } else {
|
dev@180
|
256 this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
|
dev@180
|
257 }
|
dev@196
|
258 const waveTrack = this.timeline.createTrack(
|
dev@196
|
259 track,
|
dev@196
|
260 height,
|
dev@196
|
261 `wave-${this.trackIdPrefix}`
|
dev@196
|
262 );
|
dev@196
|
263 if (isInitialRender && hasExistingTimeline) {
|
dev@196
|
264 // time axis
|
dev@196
|
265 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@196
|
266 height: height,
|
dev@196
|
267 color: '#b0b0b0'
|
dev@196
|
268 });
|
dev@196
|
269 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
|
dev@196
|
270 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
cannam@257
|
271 height: height,
|
cannam@257
|
272 color: '#c33c54'
|
dev@196
|
273 });
|
dev@196
|
274 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
|
dev@196
|
275 }
|
dev@196
|
276 if ('ontouchstart' in window) {
|
dev@196
|
277 interface Point {
|
dev@196
|
278 x: number;
|
dev@196
|
279 y: number;
|
dev@196
|
280 }
|
dev@196
|
281
|
dev@236
|
282 let zoomGestureJustEnded = false;
|
dev@196
|
283
|
dev@196
|
284 const pixelToExponent: Function = wavesUI.utils.scales.linear()
|
dev@196
|
285 .domain([0, 100]) // 100px => factor 2
|
dev@196
|
286 .range([0, 1]);
|
dev@196
|
287
|
dev@196
|
288 const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => {
|
dev@196
|
289 return Math.pow(
|
dev@196
|
290 Math.pow(p2.x - p1.x, 2) +
|
dev@196
|
291 Math.pow(p2.y - p1.y, 2), 0.5);
|
dev@196
|
292 };
|
dev@196
|
293
|
dev@205
|
294 const calculateMidPoint: (p1: Point, p2: Point) => Point = (p1, p2) => {
|
dev@205
|
295 return {
|
dev@205
|
296 x: 0.5 * (p1.x + p2.x),
|
dev@205
|
297 y: 0.5 * (p1.y + p2.y)
|
dev@205
|
298 };
|
dev@205
|
299 };
|
dev@205
|
300
|
dev@205
|
301 const hammertime = new Hammer.Manager(this.trackDiv.nativeElement, {
|
dev@205
|
302 recognizers: [
|
dev@205
|
303 [Hammer.Pan, { direction: Hammer.DIRECTION_HORIZONTAL }]
|
dev@205
|
304 ]
|
dev@205
|
305 });
|
dev@204
|
306
|
dev@204
|
307 // it seems HammerJs binds the event to the window?
|
dev@204
|
308 // causing these events to propagate to other components?
|
dev@204
|
309 const componentTimeline = this.timeline;
|
dev@204
|
310 let initialZoom;
|
dev@204
|
311 let initialDistance;
|
dev@204
|
312 let offsetAtPanStart;
|
dev@205
|
313 let startX;
|
dev@205
|
314 let isZooming;
|
dev@204
|
315
|
dev@196
|
316 const scroll = (ev) => {
|
dev@236
|
317 if (ev.center.x - startX === 0) {
|
dev@236
|
318 return;
|
dev@236
|
319 }
|
dev@236
|
320
|
dev@196
|
321 if (zoomGestureJustEnded) {
|
dev@196
|
322 zoomGestureJustEnded = false;
|
dev@236
|
323 console.log('Skip this event: likely a single touch dangling from pinch');
|
dev@196
|
324 return;
|
dev@196
|
325 }
|
dev@204
|
326 componentTimeline.timeContext.offset = offsetAtPanStart +
|
dev@204
|
327 componentTimeline.timeContext.timeToPixel.invert(ev.deltaX);
|
dev@204
|
328 componentTimeline.tracks.update();
|
dev@196
|
329 };
|
dev@196
|
330
|
dev@196
|
331 const zoom = (ev) => {
|
dev@236
|
332 if (ev.touches.length < 2) {
|
dev@236
|
333 return;
|
dev@236
|
334 }
|
dev@236
|
335
|
dev@214
|
336 ev.preventDefault();
|
dev@204
|
337 const minZoom = componentTimeline.state.minZoom;
|
dev@204
|
338 const maxZoom = componentTimeline.state.maxZoom;
|
dev@205
|
339 const p1: Point = {
|
dev@218
|
340 x: ev.touches[0].clientX,
|
dev@218
|
341 y: ev.touches[0].clientY
|
dev@205
|
342 };
|
dev@205
|
343 const p2: Point = {
|
dev@218
|
344 x: ev.touches[1].clientX,
|
dev@218
|
345 y: ev.touches[1].clientY
|
dev@205
|
346 };
|
dev@205
|
347 const distance = calculateDistance(p1, p2);
|
dev@205
|
348 const midPoint = calculateMidPoint(p1, p2);
|
dev@196
|
349
|
dev@196
|
350 const lastCenterTime =
|
dev@205
|
351 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
|
dev@196
|
352
|
dev@204
|
353 const exponent = pixelToExponent(distance - initialDistance);
|
dev@204
|
354 const targetZoom = initialZoom * Math.pow(2, exponent);
|
dev@196
|
355
|
dev@204
|
356 componentTimeline.timeContext.zoom =
|
dev@196
|
357 Math.min(Math.max(targetZoom, minZoom), maxZoom);
|
dev@196
|
358
|
dev@196
|
359 const newCenterTime =
|
dev@205
|
360 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
|
dev@196
|
361
|
dev@204
|
362 componentTimeline.timeContext.offset += newCenterTime - lastCenterTime;
|
dev@204
|
363 componentTimeline.tracks.update();
|
dev@196
|
364 };
|
dev@205
|
365 hammertime.on('panstart', (ev) => {
|
dev@204
|
366 offsetAtPanStart = componentTimeline.timeContext.offset;
|
dev@205
|
367 startX = ev.center.x;
|
dev@196
|
368 });
|
dev@196
|
369 hammertime.on('panleft', scroll);
|
dev@196
|
370 hammertime.on('panright', scroll);
|
dev@205
|
371
|
dev@205
|
372
|
dev@205
|
373 const element: HTMLElement = this.trackDiv.nativeElement;
|
dev@205
|
374 element.addEventListener('touchstart', (e) => {
|
dev@236
|
375 if (e.touches.length < 2) {
|
dev@236
|
376 return;
|
dev@236
|
377 }
|
dev@236
|
378
|
dev@205
|
379 isZooming = true;
|
dev@204
|
380 initialZoom = componentTimeline.timeContext.zoom;
|
dev@196
|
381
|
dev@204
|
382 initialDistance = calculateDistance({
|
dev@218
|
383 x: e.touches[0].clientX,
|
dev@218
|
384 y: e.touches[0].clientY
|
dev@196
|
385 }, {
|
dev@218
|
386 x: e.touches[1].clientX,
|
dev@218
|
387 y: e.touches[1].clientY
|
dev@196
|
388 });
|
dev@196
|
389 });
|
dev@205
|
390 element.addEventListener('touchend', () => {
|
dev@205
|
391 if (isZooming) {
|
dev@205
|
392 isZooming = false;
|
dev@205
|
393 zoomGestureJustEnded = true;
|
dev@205
|
394 }
|
dev@301
|
395 });
|
dev@205
|
396 element.addEventListener('touchmove', zoom);
|
dev@196
|
397 }
|
dev@189
|
398 // this.timeline.createTrack(track, height/2, `wave-${this.trackIdPrefix}`);
|
dev@189
|
399 // this.timeline.createTrack(track, height/2, `grid-${this.trackIdPrefix}`);
|
dev@54
|
400 }
|
dev@18
|
401
|
cannam@108
|
402 estimatePercentile(matrix, percentile) {
|
cannam@108
|
403 // our sample is not evenly distributed across the whole data set:
|
cannam@108
|
404 // it is guaranteed to include at least one sample from every
|
cannam@108
|
405 // column, and could sample some values more than once. But it
|
cannam@108
|
406 // should be good enough in most cases (todo: show this)
|
cannam@109
|
407 if (matrix.length === 0) {
|
cannam@109
|
408 return 0.0;
|
cannam@109
|
409 }
|
cannam@108
|
410 const w = matrix.length;
|
cannam@108
|
411 const h = matrix[0].length;
|
cannam@108
|
412 const n = w * h;
|
cannam@109
|
413 const m = (n > 50000 ? 50000 : n); // should base that on the %ile
|
cannam@108
|
414 let m_per = Math.floor(m / w);
|
dev@236
|
415 if (m_per < 1) {
|
dev@236
|
416 m_per = 1;
|
dev@236
|
417 }
|
dev@236
|
418
|
dev@236
|
419 const sample = [];
|
cannam@108
|
420 for (let x = 0; x < w; ++x) {
|
cannam@108
|
421 for (let i = 0; i < m_per; ++i) {
|
cannam@108
|
422 const y = Math.floor(Math.random() * h);
|
cannam@109
|
423 const value = matrix[x][y];
|
cannam@109
|
424 if (!isNaN(value) && value !== Infinity) {
|
cannam@109
|
425 sample.push(value);
|
cannam@109
|
426 }
|
cannam@108
|
427 }
|
cannam@108
|
428 }
|
cannam@109
|
429 if (sample.length === 0) {
|
dev@236
|
430 console.log('WARNING: No samples gathered, even though we hoped for ' +
|
dev@301
|
431 (m_per * w) + ' of them');
|
cannam@109
|
432 return 0.0;
|
cannam@109
|
433 }
|
dev@236
|
434 sample.sort((a, b) => { return a - b; });
|
cannam@108
|
435 const ix = Math.floor((sample.length * percentile) / 100);
|
dev@236
|
436 console.log('Estimating ' + percentile + '-%ile of ' +
|
dev@301
|
437 n + '-sample dataset (' + w + ' x ' + h + ') as value ' + ix +
|
dev@301
|
438 ' of sorted ' + sample.length + '-sample subset');
|
cannam@108
|
439 const estimate = sample[ix];
|
dev@236
|
440 console.log('Estimate is: ' + estimate + ' (where min sampled value = ' +
|
dev@301
|
441 sample[0] + ' and max = ' + sample[sample.length - 1] + ')');
|
cannam@108
|
442 return estimate;
|
cannam@108
|
443 }
|
cannam@108
|
444
|
cannam@108
|
445 interpolatingMapper(hexColours) {
|
cannam@108
|
446 const colours = hexColours.map(n => {
|
cannam@108
|
447 const i = parseInt(n, 16);
|
cannam@118
|
448 return [ ((i >> 16) & 255) / 255.0,
|
dev@301
|
449 ((i >> 8) & 255) / 255.0,
|
dev@301
|
450 ((i) & 255) / 255.0 ];
|
cannam@108
|
451 });
|
cannam@108
|
452 const last = colours.length - 1;
|
cannam@108
|
453 return (value => {
|
cannam@108
|
454 const m = value * last;
|
cannam@108
|
455 if (m >= last) {
|
cannam@108
|
456 return colours[last];
|
cannam@108
|
457 }
|
cannam@108
|
458 if (m <= 0) {
|
cannam@108
|
459 return colours[0];
|
cannam@108
|
460 }
|
cannam@108
|
461 const base = Math.floor(m);
|
cannam@108
|
462 const prop0 = base + 1.0 - m;
|
cannam@108
|
463 const prop1 = m - base;
|
cannam@108
|
464 const c0 = colours[base];
|
dev@236
|
465 const c1 = colours[base + 1];
|
cannam@118
|
466 return [ c0[0] * prop0 + c1[0] * prop1,
|
dev@301
|
467 c0[1] * prop0 + c1[1] * prop1,
|
dev@301
|
468 c0[2] * prop0 + c1[2] * prop1 ];
|
cannam@108
|
469 });
|
cannam@108
|
470 }
|
dev@110
|
471
|
cannam@108
|
472 iceMapper() {
|
dev@236
|
473 const hexColours = [
|
cannam@108
|
474 // Based on ColorBrewer ylGnBu
|
dev@236
|
475 'ffffff', 'ffff00', 'f7fcf0', 'e0f3db', 'ccebc5', 'a8ddb5',
|
dev@236
|
476 '7bccc4', '4eb3d3', '2b8cbe', '0868ac', '084081', '042040'
|
cannam@108
|
477 ];
|
cannam@108
|
478 hexColours.reverse();
|
cannam@108
|
479 return this.interpolatingMapper(hexColours);
|
cannam@108
|
480 }
|
dev@110
|
481
|
cannam@118
|
482 hsv2rgb(h, s, v) { // all values in range [0, 1]
|
cannam@118
|
483 const i = Math.floor(h * 6);
|
cannam@118
|
484 const f = h * 6 - i;
|
cannam@118
|
485 const p = v * (1 - s);
|
cannam@118
|
486 const q = v * (1 - f * s);
|
cannam@118
|
487 const t = v * (1 - (1 - f) * s);
|
cannam@118
|
488 let r = 0, g = 0, b = 0;
|
cannam@118
|
489 switch (i % 6) {
|
dev@301
|
490 case 0: r = v; g = t; b = p; break;
|
dev@301
|
491 case 1: r = q; g = v; b = p; break;
|
dev@301
|
492 case 2: r = p; g = v; b = t; break;
|
dev@301
|
493 case 3: r = p; g = q; b = v; break;
|
dev@301
|
494 case 4: r = t; g = p; b = v; break;
|
dev@301
|
495 case 5: r = v; g = p; b = q; break;
|
cannam@118
|
496 }
|
cannam@118
|
497 return [ r, g, b ];
|
cannam@118
|
498 }
|
dev@122
|
499
|
cannam@118
|
500 greenMapper() {
|
cannam@118
|
501 const blue = 0.6666;
|
cannam@118
|
502 const pieslice = 0.3333;
|
cannam@118
|
503 return (value => {
|
cannam@118
|
504 const h = blue - value * 2.0 * pieslice;
|
cannam@118
|
505 const s = 0.5 + value / 2.0;
|
cannam@118
|
506 const v = value;
|
cannam@118
|
507 return this.hsv2rgb(h, s, v);
|
cannam@118
|
508 });
|
cannam@118
|
509 }
|
cannam@118
|
510
|
cannam@118
|
511 sunsetMapper() {
|
cannam@118
|
512 return (value => {
|
dev@236
|
513 const r = (value - 0.24) * 2.38;
|
dev@236
|
514 const g = (value - 0.64) * 2.777;
|
cannam@118
|
515 let b = (3.6 * value);
|
dev@236
|
516 if (value > 0.277) {
|
dev@236
|
517 b = 2.0 - b;
|
dev@236
|
518 }
|
cannam@118
|
519 return [ r, g, b ];
|
cannam@118
|
520 });
|
cannam@118
|
521 }
|
cannam@118
|
522
|
dev@122
|
523 clearTimeline(): void {
|
dev@122
|
524 // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly
|
dev@122
|
525 const timeContextChildren = this.timeline.timeContext._children;
|
dev@236
|
526 for (const track of this.timeline.tracks) {
|
dev@122
|
527 if (track.layers.length === 0) { continue; }
|
dev@122
|
528 const trackLayers = Array.from(track.layers);
|
dev@122
|
529 while (trackLayers.length) {
|
dev@236
|
530 const layer: Layer = trackLayers.pop();
|
dev@185
|
531 if (this.layers.includes(layer)) {
|
dev@185
|
532 track.remove(layer);
|
dev@185
|
533 this.layers.splice(this.layers.indexOf(layer), 1);
|
dev@185
|
534 const index = timeContextChildren.indexOf(layer.timeContext);
|
dev@185
|
535 if (index >= 0) {
|
dev@185
|
536 timeContextChildren.splice(index, 1);
|
dev@185
|
537 }
|
dev@185
|
538 layer.destroy();
|
dev@122
|
539 }
|
dev@122
|
540 }
|
dev@122
|
541 }
|
dev@122
|
542 }
|
dev@122
|
543
|
dev@54
|
544 renderWaveform(buffer: AudioBuffer): void {
|
dev@180
|
545 // const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@180
|
546 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@189
|
547 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
|
dev@54
|
548 if (this.timeline) {
|
dev@54
|
549 // resize
|
dev@54
|
550 const width = this.trackDiv.nativeElement.getBoundingClientRect().width;
|
dev@55
|
551
|
dev@122
|
552 this.clearTimeline();
|
dev@59
|
553
|
dev@54
|
554 this.timeline.visibleWidth = width;
|
dev@54
|
555 this.timeline.pixelsPerSecond = width / buffer.duration;
|
cannam@117
|
556 waveTrack.height = height;
|
dev@54
|
557 } else {
|
dev@236
|
558 this.renderTimeline(buffer.duration);
|
dev@54
|
559 }
|
dev@83
|
560 this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration;
|
cannam@106
|
561
|
dev@18
|
562 // time axis
|
dev@18
|
563 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@18
|
564 height: height,
|
cannam@106
|
565 color: '#b0b0b0'
|
dev@18
|
566 });
|
cannam@117
|
567 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
|
dev@18
|
568
|
cannam@161
|
569 const nchannels = buffer.numberOfChannels;
|
cannam@161
|
570 const totalWaveHeight = height * 0.9;
|
cannam@161
|
571 const waveHeight = totalWaveHeight / nchannels;
|
dev@189
|
572
|
cannam@161
|
573 for (let ch = 0; ch < nchannels; ++ch) {
|
dev@236
|
574 console.log('about to construct a waveform layer for channel ' + ch);
|
cannam@161
|
575 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
|
dev@236
|
576 top: (height - totalWaveHeight) / 2 + waveHeight * ch,
|
dev@236
|
577 height: waveHeight,
|
cannam@257
|
578 color: '#0868ac',
|
dev@236
|
579 channel: ch
|
cannam@161
|
580 });
|
cannam@161
|
581 this.addLayer(waveformLayer, waveTrack, this.timeline.timeContext);
|
cannam@161
|
582 }
|
cannam@117
|
583
|
dev@53
|
584 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
cannam@257
|
585 height: height,
|
cannam@257
|
586 color: '#c33c54'
|
dev@31
|
587 });
|
cannam@117
|
588 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
|
dev@51
|
589 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
|
cannam@117
|
590 waveTrack.render();
|
cannam@117
|
591 waveTrack.update();
|
dev@81
|
592
|
dev@196
|
593 this.isLoading = false;
|
dev@234
|
594 this.ref.markForCheck();
|
dev@53
|
595 this.animate();
|
dev@53
|
596 }
|
dev@53
|
597
|
cannam@117
|
598 renderSpectrogram(buffer: AudioBuffer): void {
|
cannam@117
|
599 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@189
|
600 const gridTrack = this.timeline.getTrackById(`grid-${this.trackIdPrefix}`);
|
cannam@117
|
601
|
dev@129
|
602 const spectrogramLayer = new WavesSpectrogramLayer(buffer, {
|
cannam@221
|
603 top: 0,
|
cannam@221
|
604 height: height,
|
cannam@117
|
605 stepSize: 512,
|
dev@129
|
606 blockSize: 1024,
|
cannam@118
|
607 normalise: 'none',
|
cannam@118
|
608 mapper: this.sunsetMapper()
|
cannam@117
|
609 });
|
cannam@117
|
610 this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext);
|
cannam@117
|
611
|
cannam@117
|
612 this.timeline.tracks.update();
|
cannam@117
|
613 }
|
cannam@117
|
614
|
cannam@308
|
615 private addLineLayers(features: VectorFeature[],
|
cannam@313
|
616 unit: string,
|
cannam@308
|
617 colour: Colour) {
|
cannam@298
|
618
|
cannam@308
|
619 // Winnow out empty features
|
cannam@308
|
620 features = features.filter(feature => (feature.data.length > 0));
|
dev@316
|
621
|
cannam@308
|
622 // First establish a [min,max] range across all of the features
|
cannam@308
|
623 let [min, max] = features.reduce((acc, feature) => {
|
cannam@308
|
624 return feature.data.reduce((acc, val) => {
|
cannam@308
|
625 const [min, max] = acc;
|
cannam@308
|
626 return [Math.min (min, val), Math.max (max, val)];
|
cannam@308
|
627 }, acc);
|
cannam@308
|
628 }, [Infinity, -Infinity]);
|
cannam@308
|
629
|
dev@316
|
630 console.log('addLineLayers: ' + features.length + ' non-empty features, overall min = ' + min + ', max = ' + max);
|
cannam@308
|
631
|
cannam@298
|
632 if (min === Infinity) {
|
cannam@298
|
633 min = 0;
|
cannam@298
|
634 max = 1;
|
cannam@298
|
635 }
|
cannam@308
|
636
|
cannam@298
|
637 if (min !== min || max !== max) {
|
dev@316
|
638 console.log('WARNING: min or max is NaN');
|
cannam@298
|
639 min = 0;
|
cannam@298
|
640 max = 1;
|
cannam@298
|
641 }
|
cannam@298
|
642
|
cannam@298
|
643 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
cannam@298
|
644 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
|
cannam@308
|
645
|
cannam@308
|
646 // Now add a line layer for each vector feature
|
cannam@308
|
647 const lineLayers = features.map(feature => {
|
cannam@308
|
648
|
cannam@309
|
649 let duration = 0;
|
cannam@309
|
650
|
cannam@309
|
651 // Give the plot items positions relative to the start of the
|
cannam@309
|
652 // line, rather than relative to absolute time 0. This is
|
cannam@309
|
653 // because we'll be setting the layer timeline start property
|
cannam@309
|
654 // later on and these will be positioned relative to that
|
dev@316
|
655
|
cannam@308
|
656 const plotData = [...feature.data].map((val, i) => {
|
cannam@309
|
657 const t = i * feature.stepDuration;
|
cannam@309
|
658 duration = t + feature.stepDuration;
|
cannam@308
|
659 return {
|
cannam@309
|
660 cx: t,
|
cannam@308
|
661 cy: val
|
cannam@308
|
662 };
|
cannam@308
|
663 });
|
dev@316
|
664
|
cannam@308
|
665 const lineLayer = new wavesUI.helpers.LineLayer(plotData, {
|
cannam@308
|
666 color: colour,
|
cannam@308
|
667 height: height,
|
cannam@308
|
668 yDomain: [ min, max ]
|
cannam@308
|
669 });
|
cannam@308
|
670 this.addLayer(
|
cannam@308
|
671 lineLayer,
|
cannam@308
|
672 waveTrack,
|
cannam@308
|
673 this.timeline.timeContext
|
cannam@308
|
674 );
|
cannam@308
|
675
|
cannam@309
|
676 // Set start and duration so that the highlight layer can use
|
cannam@309
|
677 // them to determine which line to draw values from
|
cannam@309
|
678 lineLayer.start = feature.startTime;
|
cannam@309
|
679 lineLayer.duration = duration;
|
dev@316
|
680
|
cannam@308
|
681 return lineLayer;
|
cannam@298
|
682 });
|
cannam@309
|
683
|
cannam@309
|
684 // And a single scale layer at left
|
dev@316
|
685 // !!! todo: unit in scale layer
|
cannam@298
|
686 const scaleLayer = new wavesUI.helpers.ScaleLayer({
|
cannam@298
|
687 tickColor: colour,
|
cannam@298
|
688 textColor: colour,
|
cannam@298
|
689 height: height,
|
cannam@298
|
690 yDomain: [ min, max ]
|
cannam@298
|
691 });
|
cannam@298
|
692 this.addLayer(
|
cannam@298
|
693 scaleLayer,
|
cannam@298
|
694 waveTrack,
|
cannam@298
|
695 this.timeline.timeContext
|
cannam@298
|
696 );
|
cannam@308
|
697
|
cannam@309
|
698 // And a single highlight layer which uses all of the line layers
|
cannam@309
|
699 // as its source material
|
cannam@308
|
700 this.highlightLayer = new wavesUI.helpers.HighlightLayer(lineLayers, {
|
cannam@298
|
701 opacity: 0.7,
|
cannam@298
|
702 height: height,
|
cannam@298
|
703 color: '#c33c54',
|
cannam@298
|
704 labelOffset: 38,
|
cannam@313
|
705 yDomain: [ min, max ],
|
cannam@313
|
706 unit
|
cannam@298
|
707 });
|
cannam@298
|
708 this.addLayer(
|
cannam@298
|
709 this.highlightLayer,
|
cannam@298
|
710 waveTrack,
|
cannam@298
|
711 this.timeline.timeContext
|
cannam@298
|
712 );
|
cannam@298
|
713 }
|
dev@303
|
714
|
dev@53
|
715 // TODO refactor - this doesn't belong here
|
dev@64
|
716 private renderFeatures(extracted: SimpleResponse, colour: Colour): void {
|
dev@196
|
717 if (this.isOneShotExtractor && !this.hasShot) {
|
dev@196
|
718 this.featureExtractionSubscription.unsubscribe();
|
dev@196
|
719 this.hasShot = true;
|
dev@196
|
720 }
|
dev@196
|
721
|
dev@236
|
722 if (!extracted.hasOwnProperty('features')
|
cannam@296
|
723 || !extracted.hasOwnProperty('outputDescriptor')) {
|
dev@236
|
724 return;
|
dev@236
|
725 }
|
dev@236
|
726 if (!extracted.features.hasOwnProperty('shape')
|
dev@301
|
727 || !extracted.features.hasOwnProperty('collected')) {
|
dev@236
|
728 return;
|
dev@236
|
729 }
|
dev@64
|
730 const features: FeatureCollection = (extracted.features as FeatureCollection);
|
dev@64
|
731 const outputDescriptor = extracted.outputDescriptor;
|
dev@196
|
732 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@189
|
733 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
|
dev@64
|
734
|
dev@316
|
735 let unit = '';
|
cannam@313
|
736 if (outputDescriptor.configured.hasOwnProperty('unit')) {
|
cannam@313
|
737 unit = outputDescriptor.configured.unit;
|
cannam@313
|
738 }
|
cannam@313
|
739
|
dev@64
|
740 // TODO refactor all of this
|
dev@63
|
741 switch (features.shape) {
|
cannam@298
|
742
|
cannam@298
|
743 case 'vector': {
|
cannam@299
|
744 const collected = features.collected as VectorFeature;
|
cannam@313
|
745 this.addLineLayers([collected], unit, colour);
|
cannam@296
|
746 break;
|
dev@64
|
747 }
|
dev@303
|
748
|
cannam@308
|
749 case 'tracks': {
|
cannam@308
|
750 const collected = features.collected as TracksFeature;
|
cannam@313
|
751 this.addLineLayers(collected, unit, colour);
|
cannam@308
|
752 break;
|
cannam@308
|
753 }
|
dev@316
|
754
|
dev@64
|
755 case 'list': {
|
dev@301
|
756 const featureData = features.collected as FeatureList;
|
dev@236
|
757 if (featureData.length === 0) {
|
dev@236
|
758 return;
|
dev@236
|
759 }
|
dev@319
|
760
|
dev@64
|
761 // TODO refactor, this is incomprehensible
|
dev@319
|
762 try {
|
dev@319
|
763 const featureShape = deduceHigherLevelFeatureShape(
|
dev@319
|
764 featureData,
|
dev@319
|
765 outputDescriptor
|
dev@122
|
766 );
|
dev@319
|
767 switch (featureShape) {
|
dev@319
|
768 case 'instants':
|
dev@319
|
769 const plotData = featureData.map(feature => ({
|
dev@319
|
770 time: toSeconds(feature.timestamp),
|
dev@319
|
771 label: feature.label
|
dev@319
|
772 }));
|
dev@319
|
773 const featureLayer = new wavesUI.helpers.TickLayer(plotData, {
|
dev@319
|
774 height: height,
|
dev@319
|
775 color: colour,
|
dev@319
|
776 labelPosition: 'bottom',
|
dev@319
|
777 shadeSegments: true
|
dev@67
|
778 });
|
dev@319
|
779 this.addLayer(
|
dev@319
|
780 featureLayer,
|
dev@319
|
781 waveTrack,
|
dev@319
|
782 this.timeline.timeContext
|
dev@319
|
783 );
|
dev@319
|
784 break;
|
dev@319
|
785 case 'regions':
|
dev@319
|
786 this.renderRegions(
|
dev@319
|
787 featureData,
|
dev@319
|
788 outputDescriptor,
|
dev@319
|
789 waveTrack,
|
dev@319
|
790 height,
|
dev@319
|
791 colour
|
dev@319
|
792 );
|
dev@319
|
793 break;
|
dev@319
|
794 case 'notes':
|
cannam@333
|
795 const notes = mapFeaturesToNotes(featureData, outputDescriptor);
|
cannam@333
|
796 let [min, max] = notes.reduce((acc, note) => {
|
cannam@333
|
797 const [min, max] = acc;
|
cannam@333
|
798 return [Math.min (min, note.pitch), Math.max (max, note.pitch)];
|
cannam@333
|
799 }, [Infinity, -Infinity]);
|
cannam@333
|
800 if (min === Infinity || min < 0 || max < 0) {
|
cannam@333
|
801 min = 0;
|
cannam@333
|
802 max = 127;
|
cannam@333
|
803 }
|
cannam@333
|
804 // round min and max to octave boundaries (starting at C as in MIDI)
|
cannam@333
|
805 min = 12 * Math.floor(min / 12);
|
cannam@333
|
806 max = 12 * Math.ceil(max / 12);
|
dev@319
|
807 const pianoRollLayer = new wavesUI.helpers.PianoRollLayer(
|
cannam@333
|
808 notes,
|
cannam@333
|
809 {height: height, color: colour, yDomain: [min, max] }
|
dev@319
|
810 );
|
dev@319
|
811 this.addLayer(
|
dev@319
|
812 pianoRollLayer,
|
dev@319
|
813 waveTrack,
|
dev@319
|
814 this.timeline.timeContext
|
dev@319
|
815 );
|
dev@319
|
816 break;
|
dev@319
|
817 }
|
dev@319
|
818 } catch (e) {
|
dev@319
|
819 console.warn(e); // TODO display
|
dev@319
|
820 break;
|
dev@64
|
821 }
|
dev@64
|
822 break;
|
dev@64
|
823 }
|
cannam@106
|
824 case 'matrix': {
|
dev@303
|
825 const collected = features.collected as MatrixFeature;
|
dev@316
|
826 const startTime = collected.startTime; // !!! + make use of
|
cannam@296
|
827 const stepDuration = collected.stepDuration;
|
cannam@296
|
828 const matrixData = collected.data;
|
dev@335
|
829
|
dev@236
|
830 if (matrixData.length === 0) {
|
dev@236
|
831 return;
|
dev@236
|
832 }
|
dev@236
|
833
|
dev@236
|
834 console.log('matrix data length = ' + matrixData.length);
|
dev@236
|
835 console.log('height of first column = ' + matrixData[0].length);
|
cannam@109
|
836 const targetValue = this.estimatePercentile(matrixData, 95);
|
cannam@108
|
837 const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0);
|
dev@236
|
838 console.log('setting gain to ' + gain);
|
cannam@120
|
839 const matrixEntity =
|
cannam@120
|
840 new wavesUI.utils.PrefilledMatrixEntity(matrixData,
|
dev@301
|
841 0, // startTime
|
dev@303
|
842 stepDuration);
|
dev@236
|
843 const matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, {
|
cannam@108
|
844 gain,
|
cannam@221
|
845 top: 0,
|
cannam@221
|
846 height: height,
|
cannam@109
|
847 normalise: 'none',
|
cannam@108
|
848 mapper: this.iceMapper()
|
cannam@108
|
849 });
|
dev@122
|
850 this.addLayer(
|
cannam@108
|
851 matrixLayer,
|
cannam@117
|
852 waveTrack,
|
cannam@108
|
853 this.timeline.timeContext
|
dev@122
|
854 );
|
cannam@108
|
855 break;
|
cannam@106
|
856 }
|
dev@67
|
857 default:
|
dev@236
|
858 console.log(
|
dev@236
|
859 `Cannot render an appropriate layer for feature shape '${features.shape}'`
|
dev@236
|
860 );
|
dev@63
|
861 }
|
dev@59
|
862
|
dev@196
|
863 this.isLoading = false;
|
dev@234
|
864 this.ref.markForCheck();
|
dev@56
|
865 this.timeline.tracks.update();
|
dev@336
|
866 this.animate();
|
dev@53
|
867 }
|
dev@53
|
868
|
dev@53
|
869 private animate(): void {
|
dev@236
|
870 if (!this.isSeeking) {
|
dev@236
|
871 return;
|
dev@236
|
872 }
|
dev@196
|
873
|
dev@31
|
874 this.ngZone.runOutsideAngular(() => {
|
dev@31
|
875 // listen for time passing...
|
dev@31
|
876 const updateSeekingCursor = () => {
|
dev@53
|
877 const currentTime = this.audioService.getCurrentTime();
|
dev@53
|
878 this.cursorLayer.currentPosition = currentTime;
|
dev@53
|
879 this.cursorLayer.update();
|
dev@53
|
880
|
cannam@254
|
881 if (typeof(this.highlightLayer) !== 'undefined') {
|
cannam@254
|
882 this.highlightLayer.currentPosition = currentTime;
|
cannam@254
|
883 this.highlightLayer.update();
|
cannam@254
|
884 }
|
cannam@254
|
885
|
dev@53
|
886 const currentOffset = this.timeline.timeContext.offset;
|
dev@53
|
887 const offsetTimestamp = currentOffset
|
dev@53
|
888 + currentTime;
|
dev@53
|
889
|
dev@53
|
890 const visibleDuration = this.timeline.timeContext.visibleDuration;
|
dev@53
|
891 // TODO reduce duplication between directions and make more declarative
|
dev@53
|
892 // this kinda logic should also be tested
|
dev@53
|
893 const mustPageForward = offsetTimestamp > visibleDuration;
|
dev@53
|
894 const mustPageBackward = currentTime < -currentOffset;
|
dev@53
|
895
|
dev@53
|
896 if (mustPageForward) {
|
dev@53
|
897 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
|
dev@53
|
898
|
dev@301
|
899 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
dev@301
|
900 -currentTime + 0.5 * visibleDuration :
|
dev@301
|
901 currentOffset - visibleDuration;
|
dev@51
|
902 this.timeline.tracks.update();
|
dev@34
|
903 }
|
dev@53
|
904
|
dev@53
|
905 if (mustPageBackward) {
|
dev@53
|
906 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
|
dev@301
|
907 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
dev@301
|
908 -currentTime + 0.5 * visibleDuration :
|
dev@301
|
909 currentOffset + visibleDuration;
|
dev@51
|
910 this.timeline.tracks.update();
|
dev@34
|
911 }
|
dev@53
|
912
|
dev@337
|
913 if (this.audioService.isPlaying()) {
|
dev@53
|
914 requestAnimationFrame(updateSeekingCursor);
|
dev@236
|
915 }
|
dev@31
|
916 };
|
dev@31
|
917 updateSeekingCursor();
|
dev@31
|
918 });
|
dev@6
|
919 }
|
dev@16
|
920
|
dev@319
|
921 // TODO not sure how much of the logic in here is actually sensible w.r.t
|
dev@319
|
922 // what it functionally produces
|
dev@319
|
923 private renderRegions(featureData: FeatureList,
|
dev@319
|
924 outputDescriptor: OutputDescriptor,
|
dev@319
|
925 waveTrack: any,
|
dev@319
|
926 height: number,
|
dev@319
|
927 colour: Colour) {
|
dev@319
|
928 console.log('Output is of region type');
|
dev@319
|
929 const binCount = outputDescriptor.configured.binCount || 0;
|
dev@319
|
930 const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ;
|
dev@319
|
931 const getSegmentArgs = () => {
|
dev@319
|
932 if (isBarRegion) {
|
dev@319
|
933
|
dev@319
|
934 // TODO refactor - this is messy
|
dev@319
|
935 interface FoldsToNumber<T> {
|
dev@319
|
936 reduce(fn: (previousValue: number,
|
dev@319
|
937 currentValue: T,
|
dev@319
|
938 currentIndex: number,
|
dev@319
|
939 array: ArrayLike<T>) => number,
|
dev@319
|
940 initialValue?: number): number;
|
dev@319
|
941 }
|
dev@319
|
942
|
dev@319
|
943 // TODO potentially change impl., i.e avoid reduce
|
dev@319
|
944 const findMin = <T>(arr: FoldsToNumber<T>,
|
dev@319
|
945 getElement: (x: T) => number): number => {
|
dev@319
|
946 return arr.reduce(
|
dev@319
|
947 (min, val) => Math.min(min, getElement(val)),
|
dev@319
|
948 Infinity
|
dev@319
|
949 );
|
dev@319
|
950 };
|
dev@319
|
951
|
dev@319
|
952 const findMax = <T>(arr: FoldsToNumber<T>,
|
dev@319
|
953 getElement: (x: T) => number): number => {
|
dev@319
|
954 return arr.reduce(
|
dev@319
|
955 (min, val) => Math.max(min, getElement(val)),
|
dev@319
|
956 -Infinity
|
dev@319
|
957 );
|
dev@319
|
958 };
|
dev@319
|
959
|
dev@319
|
960 const min = findMin<Feature>(featureData, (x: Feature) => {
|
dev@319
|
961 return findMin<number>(x.featureValues, y => y);
|
dev@319
|
962 });
|
dev@319
|
963
|
dev@319
|
964 const max = findMax<Feature>(featureData, (x: Feature) => {
|
dev@319
|
965 return findMax<number>(x.featureValues, y => y);
|
dev@319
|
966 });
|
dev@319
|
967
|
dev@319
|
968 const barHeight = 1.0 / height;
|
dev@319
|
969 return [
|
dev@319
|
970 featureData.reduce((bars, feature) => {
|
dev@319
|
971 const staticProperties = {
|
dev@319
|
972 x: toSeconds(feature.timestamp),
|
dev@319
|
973 width: toSeconds(feature.duration),
|
dev@319
|
974 height: min + barHeight,
|
dev@319
|
975 color: colour,
|
dev@319
|
976 opacity: 0.8
|
dev@319
|
977 };
|
dev@319
|
978 // TODO avoid copying Float32Array to an array - map is problematic here
|
dev@319
|
979 return bars.concat([...feature.featureValues]
|
dev@319
|
980 .map(val => Object.assign({}, staticProperties, {y: val})));
|
dev@319
|
981 }, []),
|
dev@319
|
982 {yDomain: [min, max + barHeight], height: height} as any
|
dev@319
|
983 ];
|
dev@319
|
984 } else {
|
dev@319
|
985 return [featureData.map(feature => ({
|
dev@319
|
986 x: toSeconds(feature.timestamp),
|
dev@319
|
987 width: toSeconds(feature.duration),
|
dev@319
|
988 color: colour,
|
dev@319
|
989 opacity: 0.8
|
dev@319
|
990 })), {height: height}];
|
dev@319
|
991 }
|
dev@319
|
992 };
|
dev@319
|
993
|
dev@319
|
994 const segmentLayer = new wavesUI.helpers.SegmentLayer(
|
dev@319
|
995 ...getSegmentArgs()
|
dev@319
|
996 );
|
dev@319
|
997 this.addLayer(
|
dev@319
|
998 segmentLayer,
|
dev@319
|
999 waveTrack,
|
dev@319
|
1000 this.timeline.timeContext
|
dev@319
|
1001 );
|
dev@319
|
1002 }
|
dev@319
|
1003
|
dev@122
|
1004 private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void {
|
dev@54
|
1005 timeContext.zoom = 1.0;
|
dev@54
|
1006 if (!layer.timeContext) {
|
dev@54
|
1007 layer.setTimeContext(isAxis ?
|
dev@54
|
1008 timeContext : new wavesUI.core.LayerTimeContext(timeContext));
|
dev@54
|
1009 }
|
dev@54
|
1010 track.add(layer);
|
dev@185
|
1011 this.layers.push(layer);
|
dev@54
|
1012 layer.render();
|
dev@54
|
1013 layer.update();
|
dev@122
|
1014 if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) {
|
dev@112
|
1015 track.$layout.appendChild(this.cursorLayer.$el);
|
dev@112
|
1016 }
|
dev@59
|
1017 }
|
dev@59
|
1018
|
dev@51
|
1019 ngOnDestroy(): void {
|
dev@236
|
1020 if (this.featureExtractionSubscription) {
|
dev@196
|
1021 this.featureExtractionSubscription.unsubscribe();
|
dev@236
|
1022 }
|
dev@236
|
1023 if (this.playingStateSubscription) {
|
dev@196
|
1024 this.playingStateSubscription.unsubscribe();
|
dev@236
|
1025 }
|
dev@236
|
1026 if (this.seekedSubscription) {
|
dev@196
|
1027 this.seekedSubscription.unsubscribe();
|
dev@236
|
1028 }
|
dev@236
|
1029 if (this.onAudioDataSubscription) {
|
dev@196
|
1030 this.onAudioDataSubscription.unsubscribe();
|
dev@236
|
1031 }
|
dev@51
|
1032 }
|
dev@154
|
1033
|
dev@155
|
1034 seekStart(): void {
|
dev@155
|
1035 this.zoomOnMouseDown = this.timeline.timeContext.zoom;
|
dev@157
|
1036 this.offsetOnMouseDown = this.timeline.timeContext.offset;
|
dev@155
|
1037 }
|
dev@155
|
1038
|
dev@155
|
1039 seekEnd(x: number): void {
|
dev@157
|
1040 const hasSameZoom: boolean = this.zoomOnMouseDown ===
|
dev@157
|
1041 this.timeline.timeContext.zoom;
|
dev@157
|
1042 const hasSameOffset: boolean = this.offsetOnMouseDown ===
|
dev@157
|
1043 this.timeline.timeContext.offset;
|
dev@157
|
1044 if (hasSameZoom && hasSameOffset) {
|
dev@155
|
1045 this.seek(x);
|
dev@155
|
1046 }
|
dev@155
|
1047 }
|
dev@155
|
1048
|
dev@154
|
1049 seek(x: number): void {
|
dev@154
|
1050 if (this.timeline) {
|
dev@154
|
1051 const timeContext: any = this.timeline.timeContext;
|
dev@196
|
1052 if (this.isSeeking) {
|
dev@196
|
1053 this.audioService.seekTo(
|
dev@236
|
1054 timeContext.timeToPixel.invert(x) - timeContext.offset
|
dev@196
|
1055 );
|
dev@196
|
1056 }
|
dev@154
|
1057 }
|
dev@154
|
1058 }
|
dev@6
|
1059 }
|
dev@319
|
1060
|
dev@319
|
1061 function deduceHigherLevelFeatureShape(featureData: FeatureList,
|
dev@319
|
1062 descriptor: OutputDescriptor)
|
dev@319
|
1063 : HigherLevelFeatureShape {
|
dev@319
|
1064 // TODO look at output descriptor instead of directly inspecting features
|
dev@319
|
1065 const hasDuration = descriptor.configured.hasDuration;
|
dev@319
|
1066 const binCount = descriptor.configured.binCount;
|
dev@319
|
1067 const isMarker = !hasDuration
|
dev@319
|
1068 && binCount === 0
|
dev@319
|
1069 && featureData[0].featureValues == null;
|
dev@319
|
1070
|
dev@319
|
1071 const isMaybeNote = getCanonicalNoteLikeUnit(descriptor.configured.unit)
|
dev@319
|
1072 && [1, 2].find(nBins => nBins === binCount);
|
dev@319
|
1073
|
dev@319
|
1074 const isRegionLike = hasDuration && featureData[0].timestamp != null;
|
dev@319
|
1075
|
dev@319
|
1076 const isNote = isMaybeNote && isRegionLike;
|
dev@319
|
1077 const isRegion = !isMaybeNote && isRegionLike;
|
dev@319
|
1078 if (isMarker) {
|
dev@319
|
1079 return 'instants';
|
dev@319
|
1080 }
|
dev@319
|
1081 if (isNote) {
|
dev@319
|
1082 return 'notes';
|
dev@319
|
1083 }
|
dev@319
|
1084 if (isRegion) {
|
dev@319
|
1085 return 'regions';
|
dev@319
|
1086 }
|
dev@335
|
1087 throw new Error('No shape could be deduced');
|
dev@319
|
1088 }
|
dev@319
|
1089
|
dev@319
|
1090 function getCanonicalNoteLikeUnit(unit: string): NoteLikeUnit | null {
|
dev@319
|
1091 const canonicalUnits: NoteLikeUnit[] = ['midi', 'hz'];
|
dev@319
|
1092 return canonicalUnits.find(canonicalUnit => {
|
dev@335
|
1093 return unit.toLowerCase().indexOf(canonicalUnit) >= 0;
|
dev@319
|
1094 });
|
dev@319
|
1095 }
|
dev@319
|
1096
|
dev@319
|
1097 function mapFeaturesToNotes(featureData: FeatureList,
|
dev@319
|
1098 descriptor: OutputDescriptor): Note[] {
|
dev@319
|
1099 const canonicalUnit = getCanonicalNoteLikeUnit(descriptor.configured.unit);
|
dev@319
|
1100 const isHz = canonicalUnit === 'hz';
|
dev@319
|
1101 return featureData.map(feature => ({
|
dev@319
|
1102 time: toSeconds(feature.timestamp),
|
dev@319
|
1103 duration: toSeconds(feature.duration),
|
dev@319
|
1104 pitch: isHz ?
|
dev@319
|
1105 frequencyToMidiNote(feature.featureValues[0]) : feature.featureValues[0]
|
dev@319
|
1106 }));
|
dev@319
|
1107 }
|
dev@319
|
1108
|
dev@319
|
1109 function frequencyToMidiNote(frequency: number,
|
dev@319
|
1110 concertA: number = 440.0): number {
|
dev@319
|
1111 return 69 + 12 * Math.log2(frequency / concertA);
|
dev@319
|
1112 }
|