dev@10
|
1 import {
|
dev@236
|
2 Component,
|
dev@236
|
3 OnInit,
|
dev@236
|
4 ViewChild,
|
dev@236
|
5 ElementRef,
|
dev@236
|
6 Input,
|
dev@236
|
7 AfterViewInit,
|
dev@236
|
8 NgZone,
|
dev@236
|
9 OnDestroy,
|
dev@236
|
10 ChangeDetectorRef
|
dev@10
|
11 } from '@angular/core';
|
dev@196
|
12 import {
|
dev@196
|
13 AudioPlayerService, AudioResource,
|
dev@196
|
14 AudioResourceError
|
dev@236
|
15 } from '../services/audio-player/audio-player.service';
|
dev@289
|
16 import wavesUI from 'waves-ui-piper';
|
dev@63
|
17 import {
|
dev@64
|
18 FeatureExtractionService
|
dev@236
|
19 } from '../services/feature-extraction/feature-extraction.service';
|
dev@236
|
20 import {Subscription} from 'rxjs/Subscription';
|
dev@63
|
21 import {
|
dev@63
|
22 FeatureCollection,
|
cannam@296
|
23 SimpleResponse,
|
cannam@299
|
24 VectorFeature,
|
cannam@299
|
25 MatrixFeature,
|
cannam@299
|
26 TracksFeature
|
dev@236
|
27 } from 'piper/HigherLevelUtilities';
|
dev@236
|
28 import {toSeconds} from 'piper';
|
dev@236
|
29 import {FeatureList, Feature} from 'piper/Feature';
|
dev@81
|
30 import * as Hammer from 'hammerjs';
|
dev@236
|
31 import {WavesSpectrogramLayer} from '../spectrogram/Spectrogram';
|
dev@8
|
32
|
dev@54
|
33 type Layer = any;
|
dev@54
|
34 type Track = any;
|
dev@59
|
35 type Colour = string;
|
dev@6
|
36
|
dev@268
|
37
|
dev@268
|
38
|
dev@268
|
39 function* createColourGenerator(colours) {
|
cannam@257
|
40 let index = 0;
|
dev@268
|
41 const nColours = colours.length;
|
cannam@257
|
42 while (true) {
|
dev@268
|
43 yield colours[index = ++index % nColours];
|
cannam@257
|
44 }
|
dev@268
|
45 }
|
dev@268
|
46
|
dev@268
|
47 const defaultColourGenerator = createColourGenerator([
|
dev@268
|
48 '#0868ac', // "sapphire blue", our waveform / header colour
|
dev@268
|
49 '#c33c54', // "brick red"
|
dev@268
|
50 '#17bebb', // "tiffany blue"
|
dev@268
|
51 '#001021', // "rich black"
|
dev@268
|
52 '#fa8334', // "mango tango"
|
dev@268
|
53 '#034748' // "deep jungle green"
|
dev@268
|
54 ]);
|
cannam@257
|
55
|
dev@6
|
56 @Component({
|
dev@236
|
57 selector: 'ugly-waveform',
|
dev@6
|
58 templateUrl: './waveform.component.html',
|
dev@6
|
59 styleUrls: ['./waveform.component.css']
|
dev@6
|
60 })
|
cannam@257
|
61
|
dev@51
|
62 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
|
dev@20
|
63
|
dev@8
|
64 @ViewChild('track') trackDiv: ElementRef;
|
dev@285
|
65 @Input() set width(width: number) {
|
dev@285
|
66 if (this.timeline) {
|
dev@285
|
67 requestAnimationFrame(() => {
|
dev@285
|
68 this.timeline.timeContext.visibleWidth = width;
|
dev@285
|
69 this.timeline.tracks.update();
|
dev@285
|
70 });
|
dev@285
|
71 }
|
dev@285
|
72 }
|
dev@189
|
73 @Input() timeline: Timeline;
|
dev@189
|
74 @Input() trackIdPrefix: string;
|
dev@196
|
75 @Input() set isSubscribedToExtractionService(isSubscribed: boolean) {
|
dev@196
|
76 if (isSubscribed) {
|
dev@196
|
77 if (this.featureExtractionSubscription) {
|
dev@196
|
78 return;
|
dev@196
|
79 }
|
dev@268
|
80
|
dev@196
|
81 this.featureExtractionSubscription =
|
dev@196
|
82 this.piperService.featuresExtracted$.subscribe(
|
dev@196
|
83 features => {
|
dev@268
|
84 this.renderFeatures(features, defaultColourGenerator.next().value);
|
dev@196
|
85 });
|
dev@196
|
86 } else {
|
dev@196
|
87 if (this.featureExtractionSubscription) {
|
dev@196
|
88 this.featureExtractionSubscription.unsubscribe();
|
dev@196
|
89 }
|
dev@196
|
90 }
|
dev@196
|
91 }
|
dev@196
|
92 @Input() set isSubscribedToAudioService(isSubscribed: boolean) {
|
dev@196
|
93 this._isSubscribedToAudioService = isSubscribed;
|
dev@196
|
94 if (isSubscribed) {
|
dev@196
|
95 if (this.onAudioDataSubscription) {
|
dev@196
|
96 return;
|
dev@196
|
97 }
|
dev@196
|
98
|
dev@196
|
99 this.onAudioDataSubscription =
|
dev@196
|
100 this.audioService.audioLoaded$.subscribe(res => {
|
dev@196
|
101 const wasError = (res as AudioResourceError).message != null;
|
dev@196
|
102
|
dev@196
|
103 if (wasError) {
|
dev@196
|
104 console.warn('No audio, display error?');
|
dev@196
|
105 } else {
|
dev@196
|
106 this.audioBuffer = (res as AudioResource).samples;
|
dev@196
|
107 }
|
dev@196
|
108 });
|
dev@196
|
109 } else {
|
dev@196
|
110 if (this.onAudioDataSubscription) {
|
dev@196
|
111 this.onAudioDataSubscription.unsubscribe();
|
dev@196
|
112 }
|
dev@196
|
113 }
|
dev@196
|
114 }
|
dev@196
|
115
|
dev@196
|
116 get isSubscribedToAudioService(): boolean {
|
dev@196
|
117 return this._isSubscribedToAudioService;
|
dev@196
|
118 }
|
dev@196
|
119
|
dev@196
|
120 @Input() set isOneShotExtractor(isOneShot: boolean) {
|
dev@196
|
121 this._isOneShotExtractor = isOneShot;
|
dev@196
|
122 }
|
dev@196
|
123
|
dev@196
|
124 get isOneShotExtractor(): boolean {
|
dev@196
|
125 return this._isOneShotExtractor;
|
dev@196
|
126 }
|
dev@196
|
127
|
dev@196
|
128 @Input() set isSeeking(isSeeking: boolean) {
|
dev@196
|
129 this._isSeeking = isSeeking;
|
dev@196
|
130 if (isSeeking) {
|
dev@196
|
131 if (this.seekedSubscription) {
|
dev@196
|
132 return;
|
dev@196
|
133 }
|
dev@236
|
134 if (this.playingStateSubscription) {
|
dev@196
|
135 return;
|
dev@196
|
136 }
|
dev@196
|
137
|
dev@196
|
138 this.seekedSubscription = this.audioService.seeked$.subscribe(() => {
|
dev@236
|
139 if (!this.isPlaying) {
|
dev@196
|
140 this.animate();
|
dev@236
|
141 }
|
dev@196
|
142 });
|
dev@196
|
143 this.playingStateSubscription =
|
dev@196
|
144 this.audioService.playingStateChange$.subscribe(
|
dev@196
|
145 isPlaying => {
|
dev@196
|
146 this.isPlaying = isPlaying;
|
dev@236
|
147 if (this.isPlaying) {
|
dev@196
|
148 this.animate();
|
dev@236
|
149 }
|
dev@196
|
150 });
|
dev@196
|
151 } else {
|
dev@196
|
152 if (this.isPlaying) {
|
dev@196
|
153 this.isPlaying = false;
|
dev@196
|
154 }
|
dev@196
|
155 if (this.playingStateSubscription) {
|
dev@196
|
156 this.playingStateSubscription.unsubscribe();
|
dev@196
|
157 }
|
dev@196
|
158 if (this.seekedSubscription) {
|
dev@196
|
159 this.seekedSubscription.unsubscribe();
|
dev@196
|
160 }
|
dev@196
|
161 }
|
dev@196
|
162 }
|
dev@196
|
163
|
dev@196
|
164 get isSeeking(): boolean {
|
dev@196
|
165 return this._isSeeking;
|
dev@196
|
166 }
|
dev@196
|
167
|
dev@16
|
168 set audioBuffer(buffer: AudioBuffer) {
|
dev@16
|
169 this._audioBuffer = buffer || undefined;
|
cannam@117
|
170 if (this.audioBuffer) {
|
dev@20
|
171 this.renderWaveform(this.audioBuffer);
|
dev@180
|
172 // this.renderSpectrogram(this.audioBuffer);
|
cannam@117
|
173 }
|
dev@16
|
174 }
|
dev@16
|
175
|
dev@16
|
176 get audioBuffer(): AudioBuffer {
|
dev@16
|
177 return this._audioBuffer;
|
dev@16
|
178 }
|
dev@16
|
179
|
dev@196
|
180 private _audioBuffer: AudioBuffer;
|
dev@196
|
181 private _isSubscribedToAudioService: boolean;
|
dev@196
|
182 private _isOneShotExtractor: boolean;
|
dev@196
|
183 private _isSeeking: boolean;
|
dev@196
|
184 private cursorLayer: any;
|
cannam@254
|
185 private highlightLayer: any;
|
dev@196
|
186 private layers: Layer[];
|
dev@51
|
187 private featureExtractionSubscription: Subscription;
|
dev@53
|
188 private playingStateSubscription: Subscription;
|
dev@53
|
189 private seekedSubscription: Subscription;
|
dev@196
|
190 private onAudioDataSubscription: Subscription;
|
dev@53
|
191 private isPlaying: boolean;
|
dev@155
|
192 private zoomOnMouseDown: number;
|
dev@157
|
193 private offsetOnMouseDown: number;
|
dev@196
|
194 private hasShot: boolean;
|
dev@196
|
195 private isLoading: boolean;
|
dev@51
|
196
|
dev@236
|
197 private static changeColour(layer: Layer, colour: string): void {
|
dev@236
|
198 const butcherShapes = (shape) => {
|
dev@236
|
199 shape.install({color: () => colour});
|
dev@236
|
200 shape.params.color = colour;
|
dev@236
|
201 shape.update(layer._renderingContext, layer.data);
|
dev@236
|
202 };
|
dev@236
|
203
|
dev@236
|
204 layer._$itemCommonShapeMap.forEach(butcherShapes);
|
dev@236
|
205 layer._$itemShapeMap.forEach(butcherShapes);
|
dev@236
|
206 layer.render();
|
dev@236
|
207 layer.update();
|
dev@236
|
208 }
|
dev@236
|
209
|
dev@31
|
210 constructor(private audioService: AudioPlayerService,
|
dev@51
|
211 private piperService: FeatureExtractionService,
|
dev@234
|
212 private ngZone: NgZone,
|
dev@234
|
213 private ref: ChangeDetectorRef) {
|
dev@196
|
214 this.isSubscribedToAudioService = true;
|
dev@196
|
215 this.isSeeking = true;
|
dev@185
|
216 this.layers = [];
|
dev@196
|
217 this.audioBuffer = undefined;
|
dev@54
|
218 this.timeline = undefined;
|
dev@54
|
219 this.cursorLayer = undefined;
|
cannam@254
|
220 this.highlightLayer = undefined;
|
dev@53
|
221 this.isPlaying = false;
|
dev@196
|
222 this.isLoading = true;
|
dev@51
|
223 }
|
dev@51
|
224
|
dev@53
|
225 ngOnInit() {
|
dev@53
|
226 }
|
dev@10
|
227
|
dev@10
|
228 ngAfterViewInit(): void {
|
dev@236
|
229 this.trackIdPrefix = this.trackIdPrefix || 'default';
|
dev@196
|
230 if (this.timeline) {
|
dev@196
|
231 this.renderTimeline(null, true, true);
|
dev@196
|
232 } else {
|
dev@196
|
233 this.renderTimeline();
|
dev@196
|
234 }
|
dev@20
|
235 }
|
dev@20
|
236
|
dev@196
|
237 renderTimeline(duration: number = 1.0,
|
dev@196
|
238 useExistingDuration: boolean = false,
|
dev@196
|
239 isInitialRender: boolean = false): Timeline {
|
dev@18
|
240 const track: HTMLElement = this.trackDiv.nativeElement;
|
dev@236
|
241 track.innerHTML = '';
|
dev@18
|
242 const height: number = track.getBoundingClientRect().height;
|
dev@18
|
243 const width: number = track.getBoundingClientRect().width;
|
dev@18
|
244 const pixelsPerSecond = width / duration;
|
dev@196
|
245 const hasExistingTimeline = this.timeline instanceof wavesUI.core.Timeline;
|
dev@196
|
246
|
dev@196
|
247 if (hasExistingTimeline) {
|
dev@196
|
248 if (!useExistingDuration) {
|
dev@196
|
249 this.timeline.pixelsPerSecond = pixelsPerSecond;
|
dev@196
|
250 this.timeline.visibleWidth = width;
|
dev@196
|
251 }
|
dev@180
|
252 } else {
|
dev@180
|
253 this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
|
dev@180
|
254 }
|
dev@196
|
255 const waveTrack = this.timeline.createTrack(
|
dev@196
|
256 track,
|
dev@196
|
257 height,
|
dev@196
|
258 `wave-${this.trackIdPrefix}`
|
dev@196
|
259 );
|
dev@196
|
260 if (isInitialRender && hasExistingTimeline) {
|
dev@196
|
261 // time axis
|
dev@196
|
262 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@196
|
263 height: height,
|
dev@196
|
264 color: '#b0b0b0'
|
dev@196
|
265 });
|
dev@196
|
266 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
|
dev@196
|
267 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
cannam@257
|
268 height: height,
|
cannam@257
|
269 color: '#c33c54'
|
dev@196
|
270 });
|
dev@196
|
271 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
|
dev@196
|
272 }
|
dev@196
|
273 if ('ontouchstart' in window) {
|
dev@196
|
274 interface Point {
|
dev@196
|
275 x: number;
|
dev@196
|
276 y: number;
|
dev@196
|
277 }
|
dev@196
|
278
|
dev@236
|
279 let zoomGestureJustEnded = false;
|
dev@196
|
280
|
dev@196
|
281 const pixelToExponent: Function = wavesUI.utils.scales.linear()
|
dev@196
|
282 .domain([0, 100]) // 100px => factor 2
|
dev@196
|
283 .range([0, 1]);
|
dev@196
|
284
|
dev@196
|
285 const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => {
|
dev@196
|
286 return Math.pow(
|
dev@196
|
287 Math.pow(p2.x - p1.x, 2) +
|
dev@196
|
288 Math.pow(p2.y - p1.y, 2), 0.5);
|
dev@196
|
289 };
|
dev@196
|
290
|
dev@205
|
291 const calculateMidPoint: (p1: Point, p2: Point) => Point = (p1, p2) => {
|
dev@205
|
292 return {
|
dev@205
|
293 x: 0.5 * (p1.x + p2.x),
|
dev@205
|
294 y: 0.5 * (p1.y + p2.y)
|
dev@205
|
295 };
|
dev@205
|
296 };
|
dev@205
|
297
|
dev@205
|
298 const hammertime = new Hammer.Manager(this.trackDiv.nativeElement, {
|
dev@205
|
299 recognizers: [
|
dev@205
|
300 [Hammer.Pan, { direction: Hammer.DIRECTION_HORIZONTAL }]
|
dev@205
|
301 ]
|
dev@205
|
302 });
|
dev@204
|
303
|
dev@204
|
304 // it seems HammerJs binds the event to the window?
|
dev@204
|
305 // causing these events to propagate to other components?
|
dev@204
|
306 const componentTimeline = this.timeline;
|
dev@204
|
307 let initialZoom;
|
dev@204
|
308 let initialDistance;
|
dev@204
|
309 let offsetAtPanStart;
|
dev@205
|
310 let startX;
|
dev@205
|
311 let isZooming;
|
dev@204
|
312
|
dev@196
|
313 const scroll = (ev) => {
|
dev@236
|
314 if (ev.center.x - startX === 0) {
|
dev@236
|
315 return;
|
dev@236
|
316 }
|
dev@236
|
317
|
dev@196
|
318 if (zoomGestureJustEnded) {
|
dev@196
|
319 zoomGestureJustEnded = false;
|
dev@236
|
320 console.log('Skip this event: likely a single touch dangling from pinch');
|
dev@196
|
321 return;
|
dev@196
|
322 }
|
dev@204
|
323 componentTimeline.timeContext.offset = offsetAtPanStart +
|
dev@204
|
324 componentTimeline.timeContext.timeToPixel.invert(ev.deltaX);
|
dev@204
|
325 componentTimeline.tracks.update();
|
dev@196
|
326 };
|
dev@196
|
327
|
dev@196
|
328 const zoom = (ev) => {
|
dev@236
|
329 if (ev.touches.length < 2) {
|
dev@236
|
330 return;
|
dev@236
|
331 }
|
dev@236
|
332
|
dev@214
|
333 ev.preventDefault();
|
dev@204
|
334 const minZoom = componentTimeline.state.minZoom;
|
dev@204
|
335 const maxZoom = componentTimeline.state.maxZoom;
|
dev@205
|
336 const p1: Point = {
|
dev@218
|
337 x: ev.touches[0].clientX,
|
dev@218
|
338 y: ev.touches[0].clientY
|
dev@205
|
339 };
|
dev@205
|
340 const p2: Point = {
|
dev@218
|
341 x: ev.touches[1].clientX,
|
dev@218
|
342 y: ev.touches[1].clientY
|
dev@205
|
343 };
|
dev@205
|
344 const distance = calculateDistance(p1, p2);
|
dev@205
|
345 const midPoint = calculateMidPoint(p1, p2);
|
dev@196
|
346
|
dev@196
|
347 const lastCenterTime =
|
dev@205
|
348 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
|
dev@196
|
349
|
dev@204
|
350 const exponent = pixelToExponent(distance - initialDistance);
|
dev@204
|
351 const targetZoom = initialZoom * Math.pow(2, exponent);
|
dev@196
|
352
|
dev@204
|
353 componentTimeline.timeContext.zoom =
|
dev@196
|
354 Math.min(Math.max(targetZoom, minZoom), maxZoom);
|
dev@196
|
355
|
dev@196
|
356 const newCenterTime =
|
dev@205
|
357 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
|
dev@196
|
358
|
dev@204
|
359 componentTimeline.timeContext.offset += newCenterTime - lastCenterTime;
|
dev@204
|
360 componentTimeline.tracks.update();
|
dev@196
|
361 };
|
dev@205
|
362 hammertime.on('panstart', (ev) => {
|
dev@204
|
363 offsetAtPanStart = componentTimeline.timeContext.offset;
|
dev@205
|
364 startX = ev.center.x;
|
dev@196
|
365 });
|
dev@196
|
366 hammertime.on('panleft', scroll);
|
dev@196
|
367 hammertime.on('panright', scroll);
|
dev@205
|
368
|
dev@205
|
369
|
dev@205
|
370 const element: HTMLElement = this.trackDiv.nativeElement;
|
dev@205
|
371 element.addEventListener('touchstart', (e) => {
|
dev@236
|
372 if (e.touches.length < 2) {
|
dev@236
|
373 return;
|
dev@236
|
374 }
|
dev@236
|
375
|
dev@205
|
376 isZooming = true;
|
dev@204
|
377 initialZoom = componentTimeline.timeContext.zoom;
|
dev@196
|
378
|
dev@204
|
379 initialDistance = calculateDistance({
|
dev@218
|
380 x: e.touches[0].clientX,
|
dev@218
|
381 y: e.touches[0].clientY
|
dev@196
|
382 }, {
|
dev@218
|
383 x: e.touches[1].clientX,
|
dev@218
|
384 y: e.touches[1].clientY
|
dev@196
|
385 });
|
dev@196
|
386 });
|
dev@205
|
387 element.addEventListener('touchend', () => {
|
dev@205
|
388 if (isZooming) {
|
dev@205
|
389 isZooming = false;
|
dev@205
|
390 zoomGestureJustEnded = true;
|
dev@205
|
391 }
|
dev@301
|
392 });
|
dev@205
|
393 element.addEventListener('touchmove', zoom);
|
dev@196
|
394 }
|
dev@189
|
395 // this.timeline.createTrack(track, height/2, `wave-${this.trackIdPrefix}`);
|
dev@189
|
396 // this.timeline.createTrack(track, height/2, `grid-${this.trackIdPrefix}`);
|
dev@54
|
397 }
|
dev@18
|
398
|
cannam@108
|
399 estimatePercentile(matrix, percentile) {
|
cannam@108
|
400 // our sample is not evenly distributed across the whole data set:
|
cannam@108
|
401 // it is guaranteed to include at least one sample from every
|
cannam@108
|
402 // column, and could sample some values more than once. But it
|
cannam@108
|
403 // should be good enough in most cases (todo: show this)
|
cannam@109
|
404 if (matrix.length === 0) {
|
cannam@109
|
405 return 0.0;
|
cannam@109
|
406 }
|
cannam@108
|
407 const w = matrix.length;
|
cannam@108
|
408 const h = matrix[0].length;
|
cannam@108
|
409 const n = w * h;
|
cannam@109
|
410 const m = (n > 50000 ? 50000 : n); // should base that on the %ile
|
cannam@108
|
411 let m_per = Math.floor(m / w);
|
dev@236
|
412 if (m_per < 1) {
|
dev@236
|
413 m_per = 1;
|
dev@236
|
414 }
|
dev@236
|
415
|
dev@236
|
416 const sample = [];
|
cannam@108
|
417 for (let x = 0; x < w; ++x) {
|
cannam@108
|
418 for (let i = 0; i < m_per; ++i) {
|
cannam@108
|
419 const y = Math.floor(Math.random() * h);
|
cannam@109
|
420 const value = matrix[x][y];
|
cannam@109
|
421 if (!isNaN(value) && value !== Infinity) {
|
cannam@109
|
422 sample.push(value);
|
cannam@109
|
423 }
|
cannam@108
|
424 }
|
cannam@108
|
425 }
|
cannam@109
|
426 if (sample.length === 0) {
|
dev@236
|
427 console.log('WARNING: No samples gathered, even though we hoped for ' +
|
dev@301
|
428 (m_per * w) + ' of them');
|
cannam@109
|
429 return 0.0;
|
cannam@109
|
430 }
|
dev@236
|
431 sample.sort((a, b) => { return a - b; });
|
cannam@108
|
432 const ix = Math.floor((sample.length * percentile) / 100);
|
dev@236
|
433 console.log('Estimating ' + percentile + '-%ile of ' +
|
dev@301
|
434 n + '-sample dataset (' + w + ' x ' + h + ') as value ' + ix +
|
dev@301
|
435 ' of sorted ' + sample.length + '-sample subset');
|
cannam@108
|
436 const estimate = sample[ix];
|
dev@236
|
437 console.log('Estimate is: ' + estimate + ' (where min sampled value = ' +
|
dev@301
|
438 sample[0] + ' and max = ' + sample[sample.length - 1] + ')');
|
cannam@108
|
439 return estimate;
|
cannam@108
|
440 }
|
cannam@108
|
441
|
cannam@108
|
442 interpolatingMapper(hexColours) {
|
cannam@108
|
443 const colours = hexColours.map(n => {
|
cannam@108
|
444 const i = parseInt(n, 16);
|
cannam@118
|
445 return [ ((i >> 16) & 255) / 255.0,
|
dev@301
|
446 ((i >> 8) & 255) / 255.0,
|
dev@301
|
447 ((i) & 255) / 255.0 ];
|
cannam@108
|
448 });
|
cannam@108
|
449 const last = colours.length - 1;
|
cannam@108
|
450 return (value => {
|
cannam@108
|
451 const m = value * last;
|
cannam@108
|
452 if (m >= last) {
|
cannam@108
|
453 return colours[last];
|
cannam@108
|
454 }
|
cannam@108
|
455 if (m <= 0) {
|
cannam@108
|
456 return colours[0];
|
cannam@108
|
457 }
|
cannam@108
|
458 const base = Math.floor(m);
|
cannam@108
|
459 const prop0 = base + 1.0 - m;
|
cannam@108
|
460 const prop1 = m - base;
|
cannam@108
|
461 const c0 = colours[base];
|
dev@236
|
462 const c1 = colours[base + 1];
|
cannam@118
|
463 return [ c0[0] * prop0 + c1[0] * prop1,
|
dev@301
|
464 c0[1] * prop0 + c1[1] * prop1,
|
dev@301
|
465 c0[2] * prop0 + c1[2] * prop1 ];
|
cannam@108
|
466 });
|
cannam@108
|
467 }
|
dev@110
|
468
|
cannam@108
|
469 iceMapper() {
|
dev@236
|
470 const hexColours = [
|
cannam@108
|
471 // Based on ColorBrewer ylGnBu
|
dev@236
|
472 'ffffff', 'ffff00', 'f7fcf0', 'e0f3db', 'ccebc5', 'a8ddb5',
|
dev@236
|
473 '7bccc4', '4eb3d3', '2b8cbe', '0868ac', '084081', '042040'
|
cannam@108
|
474 ];
|
cannam@108
|
475 hexColours.reverse();
|
cannam@108
|
476 return this.interpolatingMapper(hexColours);
|
cannam@108
|
477 }
|
dev@110
|
478
|
cannam@118
|
479 hsv2rgb(h, s, v) { // all values in range [0, 1]
|
cannam@118
|
480 const i = Math.floor(h * 6);
|
cannam@118
|
481 const f = h * 6 - i;
|
cannam@118
|
482 const p = v * (1 - s);
|
cannam@118
|
483 const q = v * (1 - f * s);
|
cannam@118
|
484 const t = v * (1 - (1 - f) * s);
|
cannam@118
|
485 let r = 0, g = 0, b = 0;
|
cannam@118
|
486 switch (i % 6) {
|
dev@301
|
487 case 0: r = v; g = t; b = p; break;
|
dev@301
|
488 case 1: r = q; g = v; b = p; break;
|
dev@301
|
489 case 2: r = p; g = v; b = t; break;
|
dev@301
|
490 case 3: r = p; g = q; b = v; break;
|
dev@301
|
491 case 4: r = t; g = p; b = v; break;
|
dev@301
|
492 case 5: r = v; g = p; b = q; break;
|
cannam@118
|
493 }
|
cannam@118
|
494 return [ r, g, b ];
|
cannam@118
|
495 }
|
dev@122
|
496
|
cannam@118
|
497 greenMapper() {
|
cannam@118
|
498 const blue = 0.6666;
|
cannam@118
|
499 const pieslice = 0.3333;
|
cannam@118
|
500 return (value => {
|
cannam@118
|
501 const h = blue - value * 2.0 * pieslice;
|
cannam@118
|
502 const s = 0.5 + value / 2.0;
|
cannam@118
|
503 const v = value;
|
cannam@118
|
504 return this.hsv2rgb(h, s, v);
|
cannam@118
|
505 });
|
cannam@118
|
506 }
|
cannam@118
|
507
|
cannam@118
|
508 sunsetMapper() {
|
cannam@118
|
509 return (value => {
|
dev@236
|
510 const r = (value - 0.24) * 2.38;
|
dev@236
|
511 const g = (value - 0.64) * 2.777;
|
cannam@118
|
512 let b = (3.6 * value);
|
dev@236
|
513 if (value > 0.277) {
|
dev@236
|
514 b = 2.0 - b;
|
dev@236
|
515 }
|
cannam@118
|
516 return [ r, g, b ];
|
cannam@118
|
517 });
|
cannam@118
|
518 }
|
cannam@118
|
519
|
dev@122
|
520 clearTimeline(): void {
|
dev@122
|
521 // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly
|
dev@122
|
522 const timeContextChildren = this.timeline.timeContext._children;
|
dev@236
|
523 for (const track of this.timeline.tracks) {
|
dev@122
|
524 if (track.layers.length === 0) { continue; }
|
dev@122
|
525 const trackLayers = Array.from(track.layers);
|
dev@122
|
526 while (trackLayers.length) {
|
dev@236
|
527 const layer: Layer = trackLayers.pop();
|
dev@185
|
528 if (this.layers.includes(layer)) {
|
dev@185
|
529 track.remove(layer);
|
dev@185
|
530 this.layers.splice(this.layers.indexOf(layer), 1);
|
dev@185
|
531 const index = timeContextChildren.indexOf(layer.timeContext);
|
dev@185
|
532 if (index >= 0) {
|
dev@185
|
533 timeContextChildren.splice(index, 1);
|
dev@185
|
534 }
|
dev@185
|
535 layer.destroy();
|
dev@122
|
536 }
|
dev@122
|
537 }
|
dev@122
|
538 }
|
dev@122
|
539 }
|
dev@122
|
540
|
dev@54
|
541 renderWaveform(buffer: AudioBuffer): void {
|
dev@180
|
542 // const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@180
|
543 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@189
|
544 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
|
dev@54
|
545 if (this.timeline) {
|
dev@54
|
546 // resize
|
dev@54
|
547 const width = this.trackDiv.nativeElement.getBoundingClientRect().width;
|
dev@55
|
548
|
dev@122
|
549 this.clearTimeline();
|
dev@59
|
550
|
dev@54
|
551 this.timeline.visibleWidth = width;
|
dev@54
|
552 this.timeline.pixelsPerSecond = width / buffer.duration;
|
cannam@117
|
553 waveTrack.height = height;
|
dev@54
|
554 } else {
|
dev@236
|
555 this.renderTimeline(buffer.duration);
|
dev@54
|
556 }
|
dev@83
|
557 this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration;
|
cannam@106
|
558
|
dev@18
|
559 // time axis
|
dev@18
|
560 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@18
|
561 height: height,
|
cannam@106
|
562 color: '#b0b0b0'
|
dev@18
|
563 });
|
cannam@117
|
564 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
|
dev@18
|
565
|
cannam@161
|
566 const nchannels = buffer.numberOfChannels;
|
cannam@161
|
567 const totalWaveHeight = height * 0.9;
|
cannam@161
|
568 const waveHeight = totalWaveHeight / nchannels;
|
dev@189
|
569
|
cannam@161
|
570 for (let ch = 0; ch < nchannels; ++ch) {
|
dev@236
|
571 console.log('about to construct a waveform layer for channel ' + ch);
|
cannam@161
|
572 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
|
dev@236
|
573 top: (height - totalWaveHeight) / 2 + waveHeight * ch,
|
dev@236
|
574 height: waveHeight,
|
cannam@257
|
575 color: '#0868ac',
|
dev@236
|
576 channel: ch
|
cannam@161
|
577 });
|
cannam@161
|
578 this.addLayer(waveformLayer, waveTrack, this.timeline.timeContext);
|
cannam@161
|
579 }
|
cannam@117
|
580
|
dev@53
|
581 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
cannam@257
|
582 height: height,
|
cannam@257
|
583 color: '#c33c54'
|
dev@31
|
584 });
|
cannam@117
|
585 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
|
dev@51
|
586 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
|
cannam@117
|
587 waveTrack.render();
|
cannam@117
|
588 waveTrack.update();
|
dev@81
|
589
|
dev@196
|
590 this.isLoading = false;
|
dev@234
|
591 this.ref.markForCheck();
|
dev@53
|
592 this.animate();
|
dev@53
|
593 }
|
dev@53
|
594
|
cannam@117
|
595 renderSpectrogram(buffer: AudioBuffer): void {
|
cannam@117
|
596 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@189
|
597 const gridTrack = this.timeline.getTrackById(`grid-${this.trackIdPrefix}`);
|
cannam@117
|
598
|
dev@129
|
599 const spectrogramLayer = new WavesSpectrogramLayer(buffer, {
|
cannam@221
|
600 top: 0,
|
cannam@221
|
601 height: height,
|
cannam@117
|
602 stepSize: 512,
|
dev@129
|
603 blockSize: 1024,
|
cannam@118
|
604 normalise: 'none',
|
cannam@118
|
605 mapper: this.sunsetMapper()
|
cannam@117
|
606 });
|
cannam@117
|
607 this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext);
|
cannam@117
|
608
|
cannam@117
|
609 this.timeline.tracks.update();
|
cannam@117
|
610 }
|
cannam@117
|
611
|
cannam@308
|
612 private addLineLayers(features: VectorFeature[],
|
cannam@313
|
613 unit: string,
|
cannam@308
|
614 colour: Colour) {
|
cannam@298
|
615
|
cannam@308
|
616 // Winnow out empty features
|
cannam@308
|
617 features = features.filter(feature => (feature.data.length > 0));
|
cannam@308
|
618
|
cannam@308
|
619 // First establish a [min,max] range across all of the features
|
cannam@308
|
620 let [min, max] = features.reduce((acc, feature) => {
|
cannam@308
|
621 return feature.data.reduce((acc, val) => {
|
cannam@308
|
622 const [min, max] = acc;
|
cannam@308
|
623 return [Math.min (min, val), Math.max (max, val)];
|
cannam@308
|
624 }, acc);
|
cannam@308
|
625 }, [Infinity, -Infinity]);
|
cannam@308
|
626
|
cannam@308
|
627 console.log("addLineLayers: " + features.length + " non-empty features, overall min = " + min + ", max = " + max);
|
cannam@308
|
628
|
cannam@298
|
629 if (min === Infinity) {
|
cannam@298
|
630 min = 0;
|
cannam@298
|
631 max = 1;
|
cannam@298
|
632 }
|
cannam@308
|
633
|
cannam@298
|
634 if (min !== min || max !== max) {
|
cannam@298
|
635 console.log("WARNING: min or max is NaN");
|
cannam@298
|
636 min = 0;
|
cannam@298
|
637 max = 1;
|
cannam@298
|
638 }
|
cannam@298
|
639
|
cannam@298
|
640 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
cannam@298
|
641 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
|
cannam@308
|
642
|
cannam@308
|
643 // Now add a line layer for each vector feature
|
cannam@308
|
644 const lineLayers = features.map(feature => {
|
cannam@308
|
645
|
cannam@309
|
646 let duration = 0;
|
cannam@309
|
647
|
cannam@309
|
648 // Give the plot items positions relative to the start of the
|
cannam@309
|
649 // line, rather than relative to absolute time 0. This is
|
cannam@309
|
650 // because we'll be setting the layer timeline start property
|
cannam@309
|
651 // later on and these will be positioned relative to that
|
cannam@309
|
652
|
cannam@308
|
653 const plotData = [...feature.data].map((val, i) => {
|
cannam@309
|
654 const t = i * feature.stepDuration;
|
cannam@309
|
655 duration = t + feature.stepDuration;
|
cannam@308
|
656 return {
|
cannam@309
|
657 cx: t,
|
cannam@308
|
658 cy: val
|
cannam@308
|
659 };
|
cannam@308
|
660 });
|
cannam@308
|
661
|
cannam@308
|
662 const lineLayer = new wavesUI.helpers.LineLayer(plotData, {
|
cannam@308
|
663 color: colour,
|
cannam@308
|
664 height: height,
|
cannam@308
|
665 yDomain: [ min, max ]
|
cannam@308
|
666 });
|
cannam@308
|
667 this.addLayer(
|
cannam@308
|
668 lineLayer,
|
cannam@308
|
669 waveTrack,
|
cannam@308
|
670 this.timeline.timeContext
|
cannam@308
|
671 );
|
cannam@308
|
672
|
cannam@309
|
673 // Set start and duration so that the highlight layer can use
|
cannam@309
|
674 // them to determine which line to draw values from
|
cannam@309
|
675 lineLayer.start = feature.startTime;
|
cannam@309
|
676 lineLayer.duration = duration;
|
cannam@309
|
677
|
cannam@308
|
678 return lineLayer;
|
cannam@298
|
679 });
|
cannam@309
|
680
|
cannam@309
|
681 // And a single scale layer at left
|
cannam@313
|
682 //!!! todo: unit in scale layer
|
cannam@298
|
683 const scaleLayer = new wavesUI.helpers.ScaleLayer({
|
cannam@298
|
684 tickColor: colour,
|
cannam@298
|
685 textColor: colour,
|
cannam@298
|
686 height: height,
|
cannam@298
|
687 yDomain: [ min, max ]
|
cannam@298
|
688 });
|
cannam@298
|
689 this.addLayer(
|
cannam@298
|
690 scaleLayer,
|
cannam@298
|
691 waveTrack,
|
cannam@298
|
692 this.timeline.timeContext
|
cannam@298
|
693 );
|
cannam@308
|
694
|
cannam@309
|
695 // And a single highlight layer which uses all of the line layers
|
cannam@309
|
696 // as its source material
|
cannam@308
|
697 this.highlightLayer = new wavesUI.helpers.HighlightLayer(lineLayers, {
|
cannam@298
|
698 opacity: 0.7,
|
cannam@298
|
699 height: height,
|
cannam@298
|
700 color: '#c33c54',
|
cannam@298
|
701 labelOffset: 38,
|
cannam@313
|
702 yDomain: [ min, max ],
|
cannam@313
|
703 unit
|
cannam@298
|
704 });
|
cannam@298
|
705 this.addLayer(
|
cannam@298
|
706 this.highlightLayer,
|
cannam@298
|
707 waveTrack,
|
cannam@298
|
708 this.timeline.timeContext
|
cannam@298
|
709 );
|
cannam@298
|
710 }
|
dev@303
|
711
|
dev@53
|
712 // TODO refactor - this doesn't belong here
|
dev@64
|
713 private renderFeatures(extracted: SimpleResponse, colour: Colour): void {
|
dev@196
|
714 if (this.isOneShotExtractor && !this.hasShot) {
|
dev@196
|
715 this.featureExtractionSubscription.unsubscribe();
|
dev@196
|
716 this.hasShot = true;
|
dev@196
|
717 }
|
dev@196
|
718
|
dev@236
|
719 if (!extracted.hasOwnProperty('features')
|
cannam@296
|
720 || !extracted.hasOwnProperty('outputDescriptor')) {
|
dev@236
|
721 return;
|
dev@236
|
722 }
|
dev@236
|
723 if (!extracted.features.hasOwnProperty('shape')
|
dev@301
|
724 || !extracted.features.hasOwnProperty('collected')) {
|
dev@236
|
725 return;
|
dev@236
|
726 }
|
dev@64
|
727 const features: FeatureCollection = (extracted.features as FeatureCollection);
|
dev@64
|
728 const outputDescriptor = extracted.outputDescriptor;
|
dev@196
|
729 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@189
|
730 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
|
dev@64
|
731
|
cannam@313
|
732 let unit = "";
|
cannam@313
|
733 if (outputDescriptor.configured.hasOwnProperty('unit')) {
|
cannam@313
|
734 unit = outputDescriptor.configured.unit;
|
cannam@313
|
735 }
|
cannam@313
|
736
|
dev@64
|
737 // TODO refactor all of this
|
dev@63
|
738 switch (features.shape) {
|
cannam@298
|
739
|
cannam@298
|
740 case 'vector': {
|
cannam@299
|
741 const collected = features.collected as VectorFeature;
|
cannam@313
|
742 this.addLineLayers([collected], unit, colour);
|
cannam@296
|
743 break;
|
dev@64
|
744 }
|
dev@303
|
745
|
cannam@308
|
746 case 'tracks': {
|
cannam@308
|
747 const collected = features.collected as TracksFeature;
|
cannam@313
|
748 this.addLineLayers(collected, unit, colour);
|
cannam@308
|
749 break;
|
cannam@308
|
750 }
|
cannam@308
|
751
|
dev@64
|
752 case 'list': {
|
dev@301
|
753 const featureData = features.collected as FeatureList;
|
dev@236
|
754 if (featureData.length === 0) {
|
dev@236
|
755 return;
|
dev@236
|
756 }
|
dev@64
|
757 // TODO look at output descriptor instead of directly inspecting features
|
dev@64
|
758 const hasDuration = outputDescriptor.configured.hasDuration;
|
dev@64
|
759 const isMarker = !hasDuration
|
dev@64
|
760 && outputDescriptor.configured.binCount === 0
|
dev@64
|
761 && featureData[0].featureValues == null;
|
dev@64
|
762 const isRegion = hasDuration
|
dev@64
|
763 && featureData[0].timestamp != null;
|
dev@236
|
764 console.log('Have list features: length ' + featureData.length +
|
dev@301
|
765 ', isMarker ' + isMarker + ', isRegion ' + isRegion +
|
dev@301
|
766 ', hasDuration ' + hasDuration);
|
dev@64
|
767 // TODO refactor, this is incomprehensible
|
dev@64
|
768 if (isMarker) {
|
dev@236
|
769 const plotData = featureData.map(feature => ({
|
dev@236
|
770 time: toSeconds(feature.timestamp),
|
dev@236
|
771 label: feature.label
|
dev@236
|
772 }));
|
dev@236
|
773 const featureLayer = new wavesUI.helpers.TickLayer(plotData, {
|
dev@64
|
774 height: height,
|
dev@64
|
775 color: colour,
|
cannam@152
|
776 labelPosition: 'bottom',
|
cannam@152
|
777 shadeSegments: true
|
dev@64
|
778 });
|
dev@122
|
779 this.addLayer(
|
cannam@149
|
780 featureLayer,
|
cannam@117
|
781 waveTrack,
|
dev@64
|
782 this.timeline.timeContext
|
dev@122
|
783 );
|
dev@64
|
784 } else if (isRegion) {
|
dev@236
|
785 console.log('Output is of region type');
|
dev@67
|
786 const binCount = outputDescriptor.configured.binCount || 0;
|
dev@67
|
787 const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ;
|
dev@64
|
788 const getSegmentArgs = () => {
|
dev@64
|
789 if (isBarRegion) {
|
dev@64
|
790
|
dev@67
|
791 // TODO refactor - this is messy
|
dev@67
|
792 interface FoldsToNumber<T> {
|
dev@67
|
793 reduce(fn: (previousValue: number,
|
dev@67
|
794 currentValue: T,
|
dev@67
|
795 currentIndex: number,
|
dev@67
|
796 array: ArrayLike<T>) => number,
|
dev@67
|
797 initialValue?: number): number;
|
dev@67
|
798 }
|
dev@64
|
799
|
dev@67
|
800 // TODO potentially change impl., i.e avoid reduce
|
dev@67
|
801 const findMin = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
802 return arr.reduce((min, val) => Math.min(min, getElement(val)), Infinity);
|
dev@67
|
803 };
|
dev@67
|
804
|
dev@67
|
805 const findMax = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
806 return arr.reduce((min, val) => Math.max(min, getElement(val)), -Infinity);
|
dev@67
|
807 };
|
dev@67
|
808
|
dev@67
|
809 const min = findMin<Feature>(featureData, (x: Feature) => {
|
dev@67
|
810 return findMin<number>(x.featureValues, y => y);
|
dev@67
|
811 });
|
dev@67
|
812
|
dev@67
|
813 const max = findMax<Feature>(featureData, (x: Feature) => {
|
dev@67
|
814 return findMax<number>(x.featureValues, y => y);
|
dev@67
|
815 });
|
dev@67
|
816
|
dev@67
|
817 const barHeight = 1.0 / height;
|
dev@64
|
818 return [
|
dev@67
|
819 featureData.reduce((bars, feature) => {
|
dev@67
|
820 const staticProperties = {
|
dev@64
|
821 x: toSeconds(feature.timestamp),
|
dev@64
|
822 width: toSeconds(feature.duration),
|
dev@67
|
823 height: min + barHeight,
|
dev@64
|
824 color: colour,
|
dev@64
|
825 opacity: 0.8
|
dev@67
|
826 };
|
dev@67
|
827 // TODO avoid copying Float32Array to an array - map is problematic here
|
dev@67
|
828 return bars.concat([...feature.featureValues]
|
dev@236
|
829 .map(val => Object.assign({}, staticProperties, {y: val})));
|
dev@67
|
830 }, []),
|
dev@67
|
831 {yDomain: [min, max + barHeight], height: height} as any
|
dev@67
|
832 ];
|
dev@64
|
833 } else {
|
dev@236
|
834 return [featureData.map(feature => ({
|
dev@236
|
835 x: toSeconds(feature.timestamp),
|
dev@236
|
836 width: toSeconds(feature.duration),
|
dev@236
|
837 color: colour,
|
dev@236
|
838 opacity: 0.8
|
dev@236
|
839 })), {height: height}];
|
dev@64
|
840 }
|
dev@64
|
841 };
|
dev@64
|
842
|
dev@236
|
843 const segmentLayer = new wavesUI.helpers.SegmentLayer(
|
dev@64
|
844 ...getSegmentArgs()
|
dev@64
|
845 );
|
dev@122
|
846 this.addLayer(
|
dev@64
|
847 segmentLayer,
|
cannam@117
|
848 waveTrack,
|
dev@64
|
849 this.timeline.timeContext
|
dev@122
|
850 );
|
dev@64
|
851 }
|
dev@64
|
852 break;
|
dev@64
|
853 }
|
cannam@106
|
854 case 'matrix': {
|
dev@303
|
855 const collected = features.collected as MatrixFeature;
|
cannam@299
|
856 const startTime = collected.startTime; //!!! + make use of
|
cannam@296
|
857 const stepDuration = collected.stepDuration;
|
cannam@296
|
858 const matrixData = collected.data;
|
dev@236
|
859
|
dev@236
|
860 if (matrixData.length === 0) {
|
dev@236
|
861 return;
|
dev@236
|
862 }
|
dev@236
|
863
|
dev@236
|
864 console.log('matrix data length = ' + matrixData.length);
|
dev@236
|
865 console.log('height of first column = ' + matrixData[0].length);
|
cannam@109
|
866 const targetValue = this.estimatePercentile(matrixData, 95);
|
cannam@108
|
867 const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0);
|
dev@236
|
868 console.log('setting gain to ' + gain);
|
cannam@120
|
869 const matrixEntity =
|
cannam@120
|
870 new wavesUI.utils.PrefilledMatrixEntity(matrixData,
|
dev@301
|
871 0, // startTime
|
dev@303
|
872 stepDuration);
|
dev@236
|
873 const matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, {
|
cannam@108
|
874 gain,
|
cannam@221
|
875 top: 0,
|
cannam@221
|
876 height: height,
|
cannam@109
|
877 normalise: 'none',
|
cannam@108
|
878 mapper: this.iceMapper()
|
cannam@108
|
879 });
|
dev@122
|
880 this.addLayer(
|
cannam@108
|
881 matrixLayer,
|
cannam@117
|
882 waveTrack,
|
cannam@108
|
883 this.timeline.timeContext
|
dev@122
|
884 );
|
cannam@108
|
885 break;
|
cannam@106
|
886 }
|
dev@67
|
887 default:
|
dev@236
|
888 console.log(
|
dev@236
|
889 `Cannot render an appropriate layer for feature shape '${features.shape}'`
|
dev@236
|
890 );
|
dev@63
|
891 }
|
dev@59
|
892
|
dev@196
|
893 this.isLoading = false;
|
dev@234
|
894 this.ref.markForCheck();
|
dev@56
|
895 this.timeline.tracks.update();
|
dev@53
|
896 }
|
dev@53
|
897
|
dev@53
|
898 private animate(): void {
|
dev@236
|
899 if (!this.isSeeking) {
|
dev@236
|
900 return;
|
dev@236
|
901 }
|
dev@196
|
902
|
dev@31
|
903 this.ngZone.runOutsideAngular(() => {
|
dev@31
|
904 // listen for time passing...
|
dev@31
|
905 const updateSeekingCursor = () => {
|
dev@53
|
906 const currentTime = this.audioService.getCurrentTime();
|
dev@53
|
907 this.cursorLayer.currentPosition = currentTime;
|
dev@53
|
908 this.cursorLayer.update();
|
dev@53
|
909
|
cannam@254
|
910 if (typeof(this.highlightLayer) !== 'undefined') {
|
cannam@254
|
911 this.highlightLayer.currentPosition = currentTime;
|
cannam@254
|
912 this.highlightLayer.update();
|
cannam@254
|
913 }
|
cannam@254
|
914
|
dev@53
|
915 const currentOffset = this.timeline.timeContext.offset;
|
dev@53
|
916 const offsetTimestamp = currentOffset
|
dev@53
|
917 + currentTime;
|
dev@53
|
918
|
dev@53
|
919 const visibleDuration = this.timeline.timeContext.visibleDuration;
|
dev@53
|
920 // TODO reduce duplication between directions and make more declarative
|
dev@53
|
921 // this kinda logic should also be tested
|
dev@53
|
922 const mustPageForward = offsetTimestamp > visibleDuration;
|
dev@53
|
923 const mustPageBackward = currentTime < -currentOffset;
|
dev@53
|
924
|
dev@53
|
925 if (mustPageForward) {
|
dev@53
|
926 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
|
dev@53
|
927
|
dev@301
|
928 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
dev@301
|
929 -currentTime + 0.5 * visibleDuration :
|
dev@301
|
930 currentOffset - visibleDuration;
|
dev@51
|
931 this.timeline.tracks.update();
|
dev@34
|
932 }
|
dev@53
|
933
|
dev@53
|
934 if (mustPageBackward) {
|
dev@53
|
935 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
|
dev@301
|
936 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
dev@301
|
937 -currentTime + 0.5 * visibleDuration :
|
dev@301
|
938 currentOffset + visibleDuration;
|
dev@51
|
939 this.timeline.tracks.update();
|
dev@34
|
940 }
|
dev@53
|
941
|
dev@236
|
942 if (this.isPlaying) {
|
dev@53
|
943 requestAnimationFrame(updateSeekingCursor);
|
dev@236
|
944 }
|
dev@31
|
945 };
|
dev@31
|
946 updateSeekingCursor();
|
dev@31
|
947 });
|
dev@6
|
948 }
|
dev@16
|
949
|
dev@122
|
950 private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void {
|
dev@54
|
951 timeContext.zoom = 1.0;
|
dev@54
|
952 if (!layer.timeContext) {
|
dev@54
|
953 layer.setTimeContext(isAxis ?
|
dev@54
|
954 timeContext : new wavesUI.core.LayerTimeContext(timeContext));
|
dev@54
|
955 }
|
dev@54
|
956 track.add(layer);
|
dev@185
|
957 this.layers.push(layer);
|
dev@54
|
958 layer.render();
|
dev@54
|
959 layer.update();
|
dev@122
|
960 if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) {
|
dev@112
|
961 track.$layout.appendChild(this.cursorLayer.$el);
|
dev@112
|
962 }
|
dev@59
|
963 }
|
dev@59
|
964
|
dev@51
|
965 ngOnDestroy(): void {
|
dev@236
|
966 if (this.featureExtractionSubscription) {
|
dev@196
|
967 this.featureExtractionSubscription.unsubscribe();
|
dev@236
|
968 }
|
dev@236
|
969 if (this.playingStateSubscription) {
|
dev@196
|
970 this.playingStateSubscription.unsubscribe();
|
dev@236
|
971 }
|
dev@236
|
972 if (this.seekedSubscription) {
|
dev@196
|
973 this.seekedSubscription.unsubscribe();
|
dev@236
|
974 }
|
dev@236
|
975 if (this.onAudioDataSubscription) {
|
dev@196
|
976 this.onAudioDataSubscription.unsubscribe();
|
dev@236
|
977 }
|
dev@51
|
978 }
|
dev@154
|
979
|
dev@155
|
980 seekStart(): void {
|
dev@155
|
981 this.zoomOnMouseDown = this.timeline.timeContext.zoom;
|
dev@157
|
982 this.offsetOnMouseDown = this.timeline.timeContext.offset;
|
dev@155
|
983 }
|
dev@155
|
984
|
dev@155
|
985 seekEnd(x: number): void {
|
dev@157
|
986 const hasSameZoom: boolean = this.zoomOnMouseDown ===
|
dev@157
|
987 this.timeline.timeContext.zoom;
|
dev@157
|
988 const hasSameOffset: boolean = this.offsetOnMouseDown ===
|
dev@157
|
989 this.timeline.timeContext.offset;
|
dev@157
|
990 if (hasSameZoom && hasSameOffset) {
|
dev@155
|
991 this.seek(x);
|
dev@155
|
992 }
|
dev@155
|
993 }
|
dev@155
|
994
|
dev@154
|
995 seek(x: number): void {
|
dev@154
|
996 if (this.timeline) {
|
dev@154
|
997 const timeContext: any = this.timeline.timeContext;
|
dev@196
|
998 if (this.isSeeking) {
|
dev@196
|
999 this.audioService.seekTo(
|
dev@236
|
1000 timeContext.timeToPixel.invert(x) - timeContext.offset
|
dev@196
|
1001 );
|
dev@196
|
1002 }
|
dev@154
|
1003 }
|
dev@154
|
1004 }
|
dev@6
|
1005 }
|