dev@10
|
1 import {
|
dev@236
|
2 Component,
|
dev@236
|
3 OnInit,
|
dev@236
|
4 ViewChild,
|
dev@236
|
5 ElementRef,
|
dev@236
|
6 Input,
|
dev@236
|
7 AfterViewInit,
|
dev@236
|
8 NgZone,
|
dev@236
|
9 OnDestroy,
|
dev@236
|
10 ChangeDetectorRef
|
dev@10
|
11 } from '@angular/core';
|
dev@196
|
12 import {
|
dev@196
|
13 AudioPlayerService, AudioResource,
|
dev@196
|
14 AudioResourceError
|
dev@236
|
15 } from '../services/audio-player/audio-player.service';
|
dev@36
|
16 import wavesUI from 'waves-ui';
|
dev@63
|
17 import {
|
dev@64
|
18 FeatureExtractionService
|
dev@236
|
19 } from '../services/feature-extraction/feature-extraction.service';
|
dev@236
|
20 import {Subscription} from 'rxjs/Subscription';
|
dev@63
|
21 import {
|
dev@63
|
22 FeatureCollection,
|
dev@236
|
23 FixedSpacedFeatures,
|
dev@236
|
24 SimpleResponse
|
dev@236
|
25 } from 'piper/HigherLevelUtilities';
|
dev@236
|
26 import {toSeconds} from 'piper';
|
dev@236
|
27 import {FeatureList, Feature} from 'piper/Feature';
|
dev@81
|
28 import * as Hammer from 'hammerjs';
|
dev@236
|
29 import {WavesSpectrogramLayer} from '../spectrogram/Spectrogram';
|
dev@8
|
30
|
dev@54
|
31 type Layer = any;
|
dev@54
|
32 type Track = any;
|
dev@59
|
33 type Colour = string;
|
dev@6
|
34
|
cannam@257
|
35 const colours = function* () {
|
cannam@257
|
36 const circularColours = [
|
cannam@257
|
37 '#0868ac', // "sapphire blue", our waveform / header colour
|
cannam@257
|
38 '#c33c54', // "brick red"
|
cannam@257
|
39 '#17bebb', // "tiffany blue"
|
cannam@257
|
40 '#001021', // "rich black"
|
cannam@257
|
41 '#fa8334', // "mango tango"
|
cannam@257
|
42 '#034748' // "deep jungle green"
|
cannam@257
|
43 ];
|
cannam@257
|
44 let index = 0;
|
cannam@257
|
45 const nColours = circularColours.length;
|
cannam@257
|
46 while (true) {
|
cannam@257
|
47 yield circularColours[index = ++index % nColours];
|
cannam@257
|
48 }
|
cannam@257
|
49 }();
|
cannam@257
|
50
|
dev@6
|
51 @Component({
|
dev@236
|
52 selector: 'ugly-waveform',
|
dev@6
|
53 templateUrl: './waveform.component.html',
|
dev@6
|
54 styleUrls: ['./waveform.component.css']
|
dev@6
|
55 })
|
cannam@257
|
56
|
dev@51
|
57 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
|
dev@20
|
58
|
dev@8
|
59 @ViewChild('track') trackDiv: ElementRef;
|
dev@6
|
60
|
dev@189
|
61 @Input() timeline: Timeline;
|
dev@189
|
62 @Input() trackIdPrefix: string;
|
dev@196
|
63 @Input() set isSubscribedToExtractionService(isSubscribed: boolean) {
|
dev@196
|
64 if (isSubscribed) {
|
dev@196
|
65 if (this.featureExtractionSubscription) {
|
dev@196
|
66 return;
|
dev@196
|
67 }
|
cannam@257
|
68
|
dev@196
|
69 this.featureExtractionSubscription =
|
dev@196
|
70 this.piperService.featuresExtracted$.subscribe(
|
dev@196
|
71 features => {
|
dev@196
|
72 this.renderFeatures(features, colours.next().value);
|
dev@196
|
73 });
|
dev@196
|
74 } else {
|
dev@196
|
75 if (this.featureExtractionSubscription) {
|
dev@196
|
76 this.featureExtractionSubscription.unsubscribe();
|
dev@196
|
77 }
|
dev@196
|
78 }
|
dev@196
|
79 }
|
dev@196
|
80 @Input() set isSubscribedToAudioService(isSubscribed: boolean) {
|
dev@196
|
81 this._isSubscribedToAudioService = isSubscribed;
|
dev@196
|
82 if (isSubscribed) {
|
dev@196
|
83 if (this.onAudioDataSubscription) {
|
dev@196
|
84 return;
|
dev@196
|
85 }
|
dev@196
|
86
|
dev@196
|
87 this.onAudioDataSubscription =
|
dev@196
|
88 this.audioService.audioLoaded$.subscribe(res => {
|
dev@196
|
89 const wasError = (res as AudioResourceError).message != null;
|
dev@196
|
90
|
dev@196
|
91 if (wasError) {
|
dev@196
|
92 console.warn('No audio, display error?');
|
dev@196
|
93 } else {
|
dev@196
|
94 this.audioBuffer = (res as AudioResource).samples;
|
dev@196
|
95 }
|
dev@196
|
96 });
|
dev@196
|
97 } else {
|
dev@196
|
98 if (this.onAudioDataSubscription) {
|
dev@196
|
99 this.onAudioDataSubscription.unsubscribe();
|
dev@196
|
100 }
|
dev@196
|
101 }
|
dev@196
|
102 }
|
dev@196
|
103
|
dev@196
|
104 get isSubscribedToAudioService(): boolean {
|
dev@196
|
105 return this._isSubscribedToAudioService;
|
dev@196
|
106 }
|
dev@196
|
107
|
dev@196
|
108 @Input() set isOneShotExtractor(isOneShot: boolean) {
|
dev@196
|
109 this._isOneShotExtractor = isOneShot;
|
dev@196
|
110 }
|
dev@196
|
111
|
dev@196
|
112 get isOneShotExtractor(): boolean {
|
dev@196
|
113 return this._isOneShotExtractor;
|
dev@196
|
114 }
|
dev@196
|
115
|
dev@196
|
116 @Input() set isSeeking(isSeeking: boolean) {
|
dev@196
|
117 this._isSeeking = isSeeking;
|
dev@196
|
118 if (isSeeking) {
|
dev@196
|
119 if (this.seekedSubscription) {
|
dev@196
|
120 return;
|
dev@196
|
121 }
|
dev@236
|
122 if (this.playingStateSubscription) {
|
dev@196
|
123 return;
|
dev@196
|
124 }
|
dev@196
|
125
|
dev@196
|
126 this.seekedSubscription = this.audioService.seeked$.subscribe(() => {
|
dev@236
|
127 if (!this.isPlaying) {
|
dev@196
|
128 this.animate();
|
dev@236
|
129 }
|
dev@196
|
130 });
|
dev@196
|
131 this.playingStateSubscription =
|
dev@196
|
132 this.audioService.playingStateChange$.subscribe(
|
dev@196
|
133 isPlaying => {
|
dev@196
|
134 this.isPlaying = isPlaying;
|
dev@236
|
135 if (this.isPlaying) {
|
dev@196
|
136 this.animate();
|
dev@236
|
137 }
|
dev@196
|
138 });
|
dev@196
|
139 } else {
|
dev@196
|
140 if (this.isPlaying) {
|
dev@196
|
141 this.isPlaying = false;
|
dev@196
|
142 }
|
dev@196
|
143 if (this.playingStateSubscription) {
|
dev@196
|
144 this.playingStateSubscription.unsubscribe();
|
dev@196
|
145 }
|
dev@196
|
146 if (this.seekedSubscription) {
|
dev@196
|
147 this.seekedSubscription.unsubscribe();
|
dev@196
|
148 }
|
dev@196
|
149 }
|
dev@196
|
150 }
|
dev@196
|
151
|
dev@196
|
152 get isSeeking(): boolean {
|
dev@196
|
153 return this._isSeeking;
|
dev@196
|
154 }
|
dev@196
|
155
|
dev@16
|
156 set audioBuffer(buffer: AudioBuffer) {
|
dev@16
|
157 this._audioBuffer = buffer || undefined;
|
cannam@117
|
158 if (this.audioBuffer) {
|
dev@20
|
159 this.renderWaveform(this.audioBuffer);
|
dev@180
|
160 // this.renderSpectrogram(this.audioBuffer);
|
cannam@117
|
161 }
|
dev@16
|
162 }
|
dev@16
|
163
|
dev@16
|
164 get audioBuffer(): AudioBuffer {
|
dev@16
|
165 return this._audioBuffer;
|
dev@16
|
166 }
|
dev@16
|
167
|
dev@196
|
168 private _audioBuffer: AudioBuffer;
|
dev@196
|
169 private _isSubscribedToAudioService: boolean;
|
dev@196
|
170 private _isOneShotExtractor: boolean;
|
dev@196
|
171 private _isSeeking: boolean;
|
dev@196
|
172 private cursorLayer: any;
|
cannam@254
|
173 private highlightLayer: any;
|
dev@196
|
174 private layers: Layer[];
|
dev@51
|
175 private featureExtractionSubscription: Subscription;
|
dev@53
|
176 private playingStateSubscription: Subscription;
|
dev@53
|
177 private seekedSubscription: Subscription;
|
dev@196
|
178 private onAudioDataSubscription: Subscription;
|
dev@53
|
179 private isPlaying: boolean;
|
dev@155
|
180 private zoomOnMouseDown: number;
|
dev@157
|
181 private offsetOnMouseDown: number;
|
dev@196
|
182 private hasShot: boolean;
|
dev@196
|
183 private isLoading: boolean;
|
dev@51
|
184
|
dev@236
|
185 private static changeColour(layer: Layer, colour: string): void {
|
dev@236
|
186 const butcherShapes = (shape) => {
|
dev@236
|
187 shape.install({color: () => colour});
|
dev@236
|
188 shape.params.color = colour;
|
dev@236
|
189 shape.update(layer._renderingContext, layer.data);
|
dev@236
|
190 };
|
dev@236
|
191
|
dev@236
|
192 layer._$itemCommonShapeMap.forEach(butcherShapes);
|
dev@236
|
193 layer._$itemShapeMap.forEach(butcherShapes);
|
dev@236
|
194 layer.render();
|
dev@236
|
195 layer.update();
|
dev@236
|
196 }
|
dev@236
|
197
|
dev@31
|
198 constructor(private audioService: AudioPlayerService,
|
dev@51
|
199 private piperService: FeatureExtractionService,
|
dev@234
|
200 private ngZone: NgZone,
|
dev@234
|
201 private ref: ChangeDetectorRef) {
|
dev@196
|
202 this.isSubscribedToAudioService = true;
|
dev@196
|
203 this.isSeeking = true;
|
dev@185
|
204 this.layers = [];
|
dev@196
|
205 this.audioBuffer = undefined;
|
dev@54
|
206 this.timeline = undefined;
|
dev@54
|
207 this.cursorLayer = undefined;
|
cannam@254
|
208 this.highlightLayer = undefined;
|
dev@53
|
209 this.isPlaying = false;
|
dev@196
|
210 this.isLoading = true;
|
dev@51
|
211 }
|
dev@51
|
212
|
dev@53
|
213 ngOnInit() {
|
dev@53
|
214 }
|
dev@10
|
215
|
dev@10
|
216 ngAfterViewInit(): void {
|
dev@236
|
217 this.trackIdPrefix = this.trackIdPrefix || 'default';
|
dev@196
|
218 if (this.timeline) {
|
dev@196
|
219 this.renderTimeline(null, true, true);
|
dev@196
|
220 } else {
|
dev@196
|
221 this.renderTimeline();
|
dev@196
|
222 }
|
dev@20
|
223 }
|
dev@20
|
224
|
dev@196
|
225 renderTimeline(duration: number = 1.0,
|
dev@196
|
226 useExistingDuration: boolean = false,
|
dev@196
|
227 isInitialRender: boolean = false): Timeline {
|
dev@18
|
228 const track: HTMLElement = this.trackDiv.nativeElement;
|
dev@236
|
229 track.innerHTML = '';
|
dev@18
|
230 const height: number = track.getBoundingClientRect().height;
|
dev@18
|
231 const width: number = track.getBoundingClientRect().width;
|
dev@18
|
232 const pixelsPerSecond = width / duration;
|
dev@196
|
233 const hasExistingTimeline = this.timeline instanceof wavesUI.core.Timeline;
|
dev@196
|
234
|
dev@196
|
235 if (hasExistingTimeline) {
|
dev@196
|
236 if (!useExistingDuration) {
|
dev@196
|
237 this.timeline.pixelsPerSecond = pixelsPerSecond;
|
dev@196
|
238 this.timeline.visibleWidth = width;
|
dev@196
|
239 }
|
dev@180
|
240 } else {
|
dev@180
|
241 this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
|
dev@180
|
242 }
|
dev@196
|
243 const waveTrack = this.timeline.createTrack(
|
dev@196
|
244 track,
|
dev@196
|
245 height,
|
dev@196
|
246 `wave-${this.trackIdPrefix}`
|
dev@196
|
247 );
|
dev@196
|
248 if (isInitialRender && hasExistingTimeline) {
|
dev@196
|
249 // time axis
|
dev@196
|
250 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@196
|
251 height: height,
|
dev@196
|
252 color: '#b0b0b0'
|
dev@196
|
253 });
|
dev@196
|
254 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
|
dev@196
|
255 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
cannam@257
|
256 height: height,
|
cannam@257
|
257 color: '#c33c54'
|
dev@196
|
258 });
|
dev@196
|
259 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
|
dev@196
|
260 }
|
dev@196
|
261 if ('ontouchstart' in window) {
|
dev@196
|
262 interface Point {
|
dev@196
|
263 x: number;
|
dev@196
|
264 y: number;
|
dev@196
|
265 }
|
dev@196
|
266
|
dev@236
|
267 let zoomGestureJustEnded = false;
|
dev@196
|
268
|
dev@196
|
269 const pixelToExponent: Function = wavesUI.utils.scales.linear()
|
dev@196
|
270 .domain([0, 100]) // 100px => factor 2
|
dev@196
|
271 .range([0, 1]);
|
dev@196
|
272
|
dev@196
|
273 const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => {
|
dev@196
|
274 return Math.pow(
|
dev@196
|
275 Math.pow(p2.x - p1.x, 2) +
|
dev@196
|
276 Math.pow(p2.y - p1.y, 2), 0.5);
|
dev@196
|
277 };
|
dev@196
|
278
|
dev@205
|
279 const calculateMidPoint: (p1: Point, p2: Point) => Point = (p1, p2) => {
|
dev@205
|
280 return {
|
dev@205
|
281 x: 0.5 * (p1.x + p2.x),
|
dev@205
|
282 y: 0.5 * (p1.y + p2.y)
|
dev@205
|
283 };
|
dev@205
|
284 };
|
dev@205
|
285
|
dev@205
|
286 const hammertime = new Hammer.Manager(this.trackDiv.nativeElement, {
|
dev@205
|
287 recognizers: [
|
dev@205
|
288 [Hammer.Pan, { direction: Hammer.DIRECTION_HORIZONTAL }]
|
dev@205
|
289 ]
|
dev@205
|
290 });
|
dev@204
|
291
|
dev@204
|
292 // it seems HammerJs binds the event to the window?
|
dev@204
|
293 // causing these events to propagate to other components?
|
dev@204
|
294 const componentTimeline = this.timeline;
|
dev@204
|
295 let initialZoom;
|
dev@204
|
296 let initialDistance;
|
dev@204
|
297 let offsetAtPanStart;
|
dev@205
|
298 let startX;
|
dev@205
|
299 let isZooming;
|
dev@204
|
300
|
dev@196
|
301 const scroll = (ev) => {
|
dev@236
|
302 if (ev.center.x - startX === 0) {
|
dev@236
|
303 return;
|
dev@236
|
304 }
|
dev@236
|
305
|
dev@196
|
306 if (zoomGestureJustEnded) {
|
dev@196
|
307 zoomGestureJustEnded = false;
|
dev@236
|
308 console.log('Skip this event: likely a single touch dangling from pinch');
|
dev@196
|
309 return;
|
dev@196
|
310 }
|
dev@204
|
311 componentTimeline.timeContext.offset = offsetAtPanStart +
|
dev@204
|
312 componentTimeline.timeContext.timeToPixel.invert(ev.deltaX);
|
dev@204
|
313 componentTimeline.tracks.update();
|
dev@196
|
314 };
|
dev@196
|
315
|
dev@196
|
316 const zoom = (ev) => {
|
dev@236
|
317 if (ev.touches.length < 2) {
|
dev@236
|
318 return;
|
dev@236
|
319 }
|
dev@236
|
320
|
dev@214
|
321 ev.preventDefault();
|
dev@204
|
322 const minZoom = componentTimeline.state.minZoom;
|
dev@204
|
323 const maxZoom = componentTimeline.state.maxZoom;
|
dev@205
|
324 const p1: Point = {
|
dev@218
|
325 x: ev.touches[0].clientX,
|
dev@218
|
326 y: ev.touches[0].clientY
|
dev@205
|
327 };
|
dev@205
|
328 const p2: Point = {
|
dev@218
|
329 x: ev.touches[1].clientX,
|
dev@218
|
330 y: ev.touches[1].clientY
|
dev@205
|
331 };
|
dev@205
|
332 const distance = calculateDistance(p1, p2);
|
dev@205
|
333 const midPoint = calculateMidPoint(p1, p2);
|
dev@196
|
334
|
dev@196
|
335 const lastCenterTime =
|
dev@205
|
336 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
|
dev@196
|
337
|
dev@204
|
338 const exponent = pixelToExponent(distance - initialDistance);
|
dev@204
|
339 const targetZoom = initialZoom * Math.pow(2, exponent);
|
dev@196
|
340
|
dev@204
|
341 componentTimeline.timeContext.zoom =
|
dev@196
|
342 Math.min(Math.max(targetZoom, minZoom), maxZoom);
|
dev@196
|
343
|
dev@196
|
344 const newCenterTime =
|
dev@205
|
345 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
|
dev@196
|
346
|
dev@204
|
347 componentTimeline.timeContext.offset += newCenterTime - lastCenterTime;
|
dev@204
|
348 componentTimeline.tracks.update();
|
dev@196
|
349 };
|
dev@205
|
350 hammertime.on('panstart', (ev) => {
|
dev@204
|
351 offsetAtPanStart = componentTimeline.timeContext.offset;
|
dev@205
|
352 startX = ev.center.x;
|
dev@196
|
353 });
|
dev@196
|
354 hammertime.on('panleft', scroll);
|
dev@196
|
355 hammertime.on('panright', scroll);
|
dev@205
|
356
|
dev@205
|
357
|
dev@205
|
358 const element: HTMLElement = this.trackDiv.nativeElement;
|
dev@205
|
359 element.addEventListener('touchstart', (e) => {
|
dev@236
|
360 if (e.touches.length < 2) {
|
dev@236
|
361 return;
|
dev@236
|
362 }
|
dev@236
|
363
|
dev@205
|
364 isZooming = true;
|
dev@204
|
365 initialZoom = componentTimeline.timeContext.zoom;
|
dev@196
|
366
|
dev@204
|
367 initialDistance = calculateDistance({
|
dev@218
|
368 x: e.touches[0].clientX,
|
dev@218
|
369 y: e.touches[0].clientY
|
dev@196
|
370 }, {
|
dev@218
|
371 x: e.touches[1].clientX,
|
dev@218
|
372 y: e.touches[1].clientY
|
dev@196
|
373 });
|
dev@196
|
374 });
|
dev@205
|
375 element.addEventListener('touchend', () => {
|
dev@205
|
376 if (isZooming) {
|
dev@205
|
377 isZooming = false;
|
dev@205
|
378 zoomGestureJustEnded = true;
|
dev@205
|
379 }
|
dev@205
|
380 });
|
dev@205
|
381 element.addEventListener('touchmove', zoom);
|
dev@196
|
382 }
|
dev@189
|
383 // this.timeline.createTrack(track, height/2, `wave-${this.trackIdPrefix}`);
|
dev@189
|
384 // this.timeline.createTrack(track, height/2, `grid-${this.trackIdPrefix}`);
|
dev@54
|
385 }
|
dev@18
|
386
|
cannam@108
|
387 estimatePercentile(matrix, percentile) {
|
cannam@108
|
388 // our sample is not evenly distributed across the whole data set:
|
cannam@108
|
389 // it is guaranteed to include at least one sample from every
|
cannam@108
|
390 // column, and could sample some values more than once. But it
|
cannam@108
|
391 // should be good enough in most cases (todo: show this)
|
cannam@109
|
392 if (matrix.length === 0) {
|
cannam@109
|
393 return 0.0;
|
cannam@109
|
394 }
|
cannam@108
|
395 const w = matrix.length;
|
cannam@108
|
396 const h = matrix[0].length;
|
cannam@108
|
397 const n = w * h;
|
cannam@109
|
398 const m = (n > 50000 ? 50000 : n); // should base that on the %ile
|
cannam@108
|
399 let m_per = Math.floor(m / w);
|
dev@236
|
400 if (m_per < 1) {
|
dev@236
|
401 m_per = 1;
|
dev@236
|
402 }
|
dev@236
|
403
|
dev@236
|
404 const sample = [];
|
cannam@108
|
405 for (let x = 0; x < w; ++x) {
|
cannam@108
|
406 for (let i = 0; i < m_per; ++i) {
|
cannam@108
|
407 const y = Math.floor(Math.random() * h);
|
cannam@109
|
408 const value = matrix[x][y];
|
cannam@109
|
409 if (!isNaN(value) && value !== Infinity) {
|
cannam@109
|
410 sample.push(value);
|
cannam@109
|
411 }
|
cannam@108
|
412 }
|
cannam@108
|
413 }
|
cannam@109
|
414 if (sample.length === 0) {
|
dev@236
|
415 console.log('WARNING: No samples gathered, even though we hoped for ' +
|
dev@236
|
416 (m_per * w) + ' of them');
|
cannam@109
|
417 return 0.0;
|
cannam@109
|
418 }
|
dev@236
|
419 sample.sort((a, b) => { return a - b; });
|
cannam@108
|
420 const ix = Math.floor((sample.length * percentile) / 100);
|
dev@236
|
421 console.log('Estimating ' + percentile + '-%ile of ' +
|
dev@236
|
422 n + '-sample dataset (' + w + ' x ' + h + ') as value ' + ix +
|
dev@236
|
423 ' of sorted ' + sample.length + '-sample subset');
|
cannam@108
|
424 const estimate = sample[ix];
|
dev@236
|
425 console.log('Estimate is: ' + estimate + ' (where min sampled value = ' +
|
dev@236
|
426 sample[0] + ' and max = ' + sample[sample.length - 1] + ')');
|
cannam@108
|
427 return estimate;
|
cannam@108
|
428 }
|
cannam@108
|
429
|
cannam@108
|
430 interpolatingMapper(hexColours) {
|
cannam@108
|
431 const colours = hexColours.map(n => {
|
cannam@108
|
432 const i = parseInt(n, 16);
|
cannam@118
|
433 return [ ((i >> 16) & 255) / 255.0,
|
cannam@118
|
434 ((i >> 8) & 255) / 255.0,
|
cannam@118
|
435 ((i) & 255) / 255.0 ];
|
cannam@108
|
436 });
|
cannam@108
|
437 const last = colours.length - 1;
|
cannam@108
|
438 return (value => {
|
cannam@108
|
439 const m = value * last;
|
cannam@108
|
440 if (m >= last) {
|
cannam@108
|
441 return colours[last];
|
cannam@108
|
442 }
|
cannam@108
|
443 if (m <= 0) {
|
cannam@108
|
444 return colours[0];
|
cannam@108
|
445 }
|
cannam@108
|
446 const base = Math.floor(m);
|
cannam@108
|
447 const prop0 = base + 1.0 - m;
|
cannam@108
|
448 const prop1 = m - base;
|
cannam@108
|
449 const c0 = colours[base];
|
dev@236
|
450 const c1 = colours[base + 1];
|
cannam@118
|
451 return [ c0[0] * prop0 + c1[0] * prop1,
|
cannam@118
|
452 c0[1] * prop0 + c1[1] * prop1,
|
cannam@118
|
453 c0[2] * prop0 + c1[2] * prop1 ];
|
cannam@108
|
454 });
|
cannam@108
|
455 }
|
dev@110
|
456
|
cannam@108
|
457 iceMapper() {
|
dev@236
|
458 const hexColours = [
|
cannam@108
|
459 // Based on ColorBrewer ylGnBu
|
dev@236
|
460 'ffffff', 'ffff00', 'f7fcf0', 'e0f3db', 'ccebc5', 'a8ddb5',
|
dev@236
|
461 '7bccc4', '4eb3d3', '2b8cbe', '0868ac', '084081', '042040'
|
cannam@108
|
462 ];
|
cannam@108
|
463 hexColours.reverse();
|
cannam@108
|
464 return this.interpolatingMapper(hexColours);
|
cannam@108
|
465 }
|
dev@110
|
466
|
cannam@118
|
467 hsv2rgb(h, s, v) { // all values in range [0, 1]
|
cannam@118
|
468 const i = Math.floor(h * 6);
|
cannam@118
|
469 const f = h * 6 - i;
|
cannam@118
|
470 const p = v * (1 - s);
|
cannam@118
|
471 const q = v * (1 - f * s);
|
cannam@118
|
472 const t = v * (1 - (1 - f) * s);
|
cannam@118
|
473 let r = 0, g = 0, b = 0;
|
cannam@118
|
474 switch (i % 6) {
|
dev@236
|
475 case 0: r = v; g = t; b = p; break;
|
dev@236
|
476 case 1: r = q; g = v; b = p; break;
|
dev@236
|
477 case 2: r = p; g = v; b = t; break;
|
dev@236
|
478 case 3: r = p; g = q; b = v; break;
|
dev@236
|
479 case 4: r = t; g = p; b = v; break;
|
dev@236
|
480 case 5: r = v; g = p; b = q; break;
|
cannam@118
|
481 }
|
cannam@118
|
482 return [ r, g, b ];
|
cannam@118
|
483 }
|
dev@122
|
484
|
cannam@118
|
485 greenMapper() {
|
cannam@118
|
486 const blue = 0.6666;
|
cannam@118
|
487 const pieslice = 0.3333;
|
cannam@118
|
488 return (value => {
|
cannam@118
|
489 const h = blue - value * 2.0 * pieslice;
|
cannam@118
|
490 const s = 0.5 + value / 2.0;
|
cannam@118
|
491 const v = value;
|
cannam@118
|
492 return this.hsv2rgb(h, s, v);
|
cannam@118
|
493 });
|
cannam@118
|
494 }
|
cannam@118
|
495
|
cannam@118
|
496 sunsetMapper() {
|
cannam@118
|
497 return (value => {
|
dev@236
|
498 const r = (value - 0.24) * 2.38;
|
dev@236
|
499 const g = (value - 0.64) * 2.777;
|
cannam@118
|
500 let b = (3.6 * value);
|
dev@236
|
501 if (value > 0.277) {
|
dev@236
|
502 b = 2.0 - b;
|
dev@236
|
503 }
|
cannam@118
|
504 return [ r, g, b ];
|
cannam@118
|
505 });
|
cannam@118
|
506 }
|
cannam@118
|
507
|
dev@122
|
508 clearTimeline(): void {
|
dev@122
|
509 // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly
|
dev@122
|
510 const timeContextChildren = this.timeline.timeContext._children;
|
dev@236
|
511 for (const track of this.timeline.tracks) {
|
dev@122
|
512 if (track.layers.length === 0) { continue; }
|
dev@122
|
513 const trackLayers = Array.from(track.layers);
|
dev@122
|
514 while (trackLayers.length) {
|
dev@236
|
515 const layer: Layer = trackLayers.pop();
|
dev@185
|
516 if (this.layers.includes(layer)) {
|
dev@185
|
517 track.remove(layer);
|
dev@185
|
518 this.layers.splice(this.layers.indexOf(layer), 1);
|
dev@185
|
519 const index = timeContextChildren.indexOf(layer.timeContext);
|
dev@185
|
520 if (index >= 0) {
|
dev@185
|
521 timeContextChildren.splice(index, 1);
|
dev@185
|
522 }
|
dev@185
|
523 layer.destroy();
|
dev@122
|
524 }
|
dev@122
|
525 }
|
dev@122
|
526 }
|
dev@122
|
527 }
|
dev@122
|
528
|
dev@54
|
529 renderWaveform(buffer: AudioBuffer): void {
|
dev@180
|
530 // const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@180
|
531 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@189
|
532 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
|
dev@54
|
533 if (this.timeline) {
|
dev@54
|
534 // resize
|
dev@54
|
535 const width = this.trackDiv.nativeElement.getBoundingClientRect().width;
|
dev@55
|
536
|
dev@122
|
537 this.clearTimeline();
|
dev@59
|
538
|
dev@54
|
539 this.timeline.visibleWidth = width;
|
dev@54
|
540 this.timeline.pixelsPerSecond = width / buffer.duration;
|
cannam@117
|
541 waveTrack.height = height;
|
dev@54
|
542 } else {
|
dev@236
|
543 this.renderTimeline(buffer.duration);
|
dev@54
|
544 }
|
dev@83
|
545 this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration;
|
cannam@106
|
546
|
dev@18
|
547 // time axis
|
dev@18
|
548 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@18
|
549 height: height,
|
cannam@106
|
550 color: '#b0b0b0'
|
dev@18
|
551 });
|
cannam@117
|
552 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
|
dev@18
|
553
|
cannam@161
|
554 const nchannels = buffer.numberOfChannels;
|
cannam@161
|
555 const totalWaveHeight = height * 0.9;
|
cannam@161
|
556 const waveHeight = totalWaveHeight / nchannels;
|
dev@189
|
557
|
cannam@161
|
558 for (let ch = 0; ch < nchannels; ++ch) {
|
dev@236
|
559 console.log('about to construct a waveform layer for channel ' + ch);
|
cannam@161
|
560 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
|
dev@236
|
561 top: (height - totalWaveHeight) / 2 + waveHeight * ch,
|
dev@236
|
562 height: waveHeight,
|
cannam@257
|
563 color: '#0868ac',
|
dev@236
|
564 channel: ch
|
cannam@161
|
565 });
|
cannam@161
|
566 this.addLayer(waveformLayer, waveTrack, this.timeline.timeContext);
|
cannam@161
|
567 }
|
cannam@117
|
568
|
dev@53
|
569 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
cannam@257
|
570 height: height,
|
cannam@257
|
571 color: '#c33c54'
|
dev@31
|
572 });
|
cannam@117
|
573 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
|
dev@51
|
574 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
|
cannam@117
|
575 waveTrack.render();
|
cannam@117
|
576 waveTrack.update();
|
dev@81
|
577
|
dev@196
|
578 this.isLoading = false;
|
dev@234
|
579 this.ref.markForCheck();
|
dev@53
|
580 this.animate();
|
dev@53
|
581 }
|
dev@53
|
582
|
cannam@117
|
583 renderSpectrogram(buffer: AudioBuffer): void {
|
cannam@117
|
584 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@189
|
585 const gridTrack = this.timeline.getTrackById(`grid-${this.trackIdPrefix}`);
|
cannam@117
|
586
|
dev@129
|
587 const spectrogramLayer = new WavesSpectrogramLayer(buffer, {
|
cannam@221
|
588 top: 0,
|
cannam@221
|
589 height: height,
|
cannam@117
|
590 stepSize: 512,
|
dev@129
|
591 blockSize: 1024,
|
cannam@118
|
592 normalise: 'none',
|
cannam@118
|
593 mapper: this.sunsetMapper()
|
cannam@117
|
594 });
|
cannam@117
|
595 this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext);
|
cannam@117
|
596
|
cannam@117
|
597 this.timeline.tracks.update();
|
cannam@117
|
598 }
|
cannam@117
|
599
|
dev@53
|
600 // TODO refactor - this doesn't belong here
|
dev@64
|
601 private renderFeatures(extracted: SimpleResponse, colour: Colour): void {
|
dev@196
|
602 if (this.isOneShotExtractor && !this.hasShot) {
|
dev@196
|
603 this.featureExtractionSubscription.unsubscribe();
|
dev@196
|
604 this.hasShot = true;
|
dev@196
|
605 }
|
dev@196
|
606
|
dev@236
|
607 if (!extracted.hasOwnProperty('features')
|
dev@236
|
608 || !extracted.hasOwnProperty('outputDescriptor')) {
|
dev@236
|
609 return;
|
dev@236
|
610 }
|
dev@236
|
611 if (!extracted.features.hasOwnProperty('shape')
|
dev@236
|
612 || !extracted.features.hasOwnProperty('data')) {
|
dev@236
|
613 return;
|
dev@236
|
614 }
|
dev@64
|
615 const features: FeatureCollection = (extracted.features as FeatureCollection);
|
dev@64
|
616 const outputDescriptor = extracted.outputDescriptor;
|
dev@196
|
617 // const height = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@196
|
618 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@189
|
619 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
|
dev@64
|
620
|
dev@64
|
621 // TODO refactor all of this
|
dev@63
|
622 switch (features.shape) {
|
dev@64
|
623 case 'vector': {
|
dev@63
|
624 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
|
dev@63
|
625 const featureData = (features.data as Float32Array);
|
dev@236
|
626 if (featureData.length === 0) {
|
dev@236
|
627 return;
|
dev@236
|
628 }
|
dev@63
|
629 const plotData = [...featureData].map((feature, i) => {
|
dev@63
|
630 return {
|
dev@63
|
631 cx: i * stepDuration,
|
cannam@258
|
632 cy: feature
|
dev@63
|
633 };
|
dev@63
|
634 });
|
cannam@258
|
635 let min = featureData.reduce((m, f) => Math.min(m, f), Infinity);
|
cannam@258
|
636 let max = featureData.reduce((m, f) => Math.max(m, f), -Infinity);
|
cannam@258
|
637 if (min === Infinity) {
|
cannam@258
|
638 min = 0;
|
cannam@258
|
639 max = 1;
|
cannam@258
|
640 }
|
dev@236
|
641 const lineLayer = new wavesUI.helpers.LineLayer(plotData, {
|
dev@63
|
642 color: colour,
|
cannam@258
|
643 height: height,
|
cannam@258
|
644 yDomain: [ min, max ]
|
dev@63
|
645 });
|
dev@122
|
646 this.addLayer(
|
dev@105
|
647 lineLayer,
|
cannam@117
|
648 waveTrack,
|
dev@63
|
649 this.timeline.timeContext
|
dev@122
|
650 );
|
cannam@265
|
651 const scaleLayer = new wavesUI.helpers.ScaleLayer({
|
cannam@264
|
652 color: colour,
|
cannam@264
|
653 height: height,
|
cannam@264
|
654 yDomain: [ min, max ]
|
cannam@264
|
655 });
|
cannam@264
|
656 this.addLayer(
|
cannam@264
|
657 scaleLayer,
|
cannam@264
|
658 waveTrack,
|
cannam@264
|
659 this.timeline.timeContext
|
cannam@264
|
660 );
|
cannam@254
|
661 this.highlightLayer = new wavesUI.helpers.HighlightLayer(lineLayer, {
|
cannam@254
|
662 opacity: 0.7,
|
cannam@257
|
663 height: height,
|
cannam@258
|
664 color: '#c33c54',
|
cannam@258
|
665 yDomain: [ min, max ]
|
cannam@254
|
666 });
|
cannam@254
|
667 this.addLayer(
|
cannam@254
|
668 this.highlightLayer,
|
cannam@254
|
669 waveTrack,
|
cannam@254
|
670 this.timeline.timeContext
|
cannam@254
|
671 );
|
dev@63
|
672 break;
|
dev@64
|
673 }
|
dev@64
|
674 case 'list': {
|
dev@64
|
675 const featureData = (features.data as FeatureList);
|
dev@236
|
676 if (featureData.length === 0) {
|
dev@236
|
677 return;
|
dev@236
|
678 }
|
dev@64
|
679 // TODO look at output descriptor instead of directly inspecting features
|
dev@64
|
680 const hasDuration = outputDescriptor.configured.hasDuration;
|
dev@64
|
681 const isMarker = !hasDuration
|
dev@64
|
682 && outputDescriptor.configured.binCount === 0
|
dev@64
|
683 && featureData[0].featureValues == null;
|
dev@64
|
684 const isRegion = hasDuration
|
dev@64
|
685 && featureData[0].timestamp != null;
|
dev@236
|
686 console.log('Have list features: length ' + featureData.length +
|
dev@236
|
687 ', isMarker ' + isMarker + ', isRegion ' + isRegion +
|
dev@236
|
688 ', hasDuration ' + hasDuration);
|
dev@64
|
689 // TODO refactor, this is incomprehensible
|
dev@64
|
690 if (isMarker) {
|
dev@236
|
691 const plotData = featureData.map(feature => ({
|
dev@236
|
692 time: toSeconds(feature.timestamp),
|
dev@236
|
693 label: feature.label
|
dev@236
|
694 }));
|
dev@236
|
695 const featureLayer = new wavesUI.helpers.TickLayer(plotData, {
|
dev@64
|
696 height: height,
|
dev@64
|
697 color: colour,
|
cannam@152
|
698 labelPosition: 'bottom',
|
cannam@152
|
699 shadeSegments: true
|
dev@64
|
700 });
|
dev@122
|
701 this.addLayer(
|
cannam@149
|
702 featureLayer,
|
cannam@117
|
703 waveTrack,
|
dev@64
|
704 this.timeline.timeContext
|
dev@122
|
705 );
|
dev@64
|
706 } else if (isRegion) {
|
dev@236
|
707 console.log('Output is of region type');
|
dev@67
|
708 const binCount = outputDescriptor.configured.binCount || 0;
|
dev@67
|
709 const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ;
|
dev@64
|
710 const getSegmentArgs = () => {
|
dev@64
|
711 if (isBarRegion) {
|
dev@64
|
712
|
dev@67
|
713 // TODO refactor - this is messy
|
dev@67
|
714 interface FoldsToNumber<T> {
|
dev@67
|
715 reduce(fn: (previousValue: number,
|
dev@67
|
716 currentValue: T,
|
dev@67
|
717 currentIndex: number,
|
dev@67
|
718 array: ArrayLike<T>) => number,
|
dev@67
|
719 initialValue?: number): number;
|
dev@67
|
720 }
|
dev@64
|
721
|
dev@67
|
722 // TODO potentially change impl., i.e avoid reduce
|
dev@67
|
723 const findMin = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
724 return arr.reduce((min, val) => Math.min(min, getElement(val)), Infinity);
|
dev@67
|
725 };
|
dev@67
|
726
|
dev@67
|
727 const findMax = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
728 return arr.reduce((min, val) => Math.max(min, getElement(val)), -Infinity);
|
dev@67
|
729 };
|
dev@67
|
730
|
dev@67
|
731 const min = findMin<Feature>(featureData, (x: Feature) => {
|
dev@67
|
732 return findMin<number>(x.featureValues, y => y);
|
dev@67
|
733 });
|
dev@67
|
734
|
dev@67
|
735 const max = findMax<Feature>(featureData, (x: Feature) => {
|
dev@67
|
736 return findMax<number>(x.featureValues, y => y);
|
dev@67
|
737 });
|
dev@67
|
738
|
dev@67
|
739 const barHeight = 1.0 / height;
|
dev@64
|
740 return [
|
dev@67
|
741 featureData.reduce((bars, feature) => {
|
dev@67
|
742 const staticProperties = {
|
dev@64
|
743 x: toSeconds(feature.timestamp),
|
dev@64
|
744 width: toSeconds(feature.duration),
|
dev@67
|
745 height: min + barHeight,
|
dev@64
|
746 color: colour,
|
dev@64
|
747 opacity: 0.8
|
dev@67
|
748 };
|
dev@67
|
749 // TODO avoid copying Float32Array to an array - map is problematic here
|
dev@67
|
750 return bars.concat([...feature.featureValues]
|
dev@236
|
751 .map(val => Object.assign({}, staticProperties, {y: val})));
|
dev@67
|
752 }, []),
|
dev@67
|
753 {yDomain: [min, max + barHeight], height: height} as any
|
dev@67
|
754 ];
|
dev@64
|
755 } else {
|
dev@236
|
756 return [featureData.map(feature => ({
|
dev@236
|
757 x: toSeconds(feature.timestamp),
|
dev@236
|
758 width: toSeconds(feature.duration),
|
dev@236
|
759 color: colour,
|
dev@236
|
760 opacity: 0.8
|
dev@236
|
761 })), {height: height}];
|
dev@64
|
762 }
|
dev@64
|
763 };
|
dev@64
|
764
|
dev@236
|
765 const segmentLayer = new wavesUI.helpers.SegmentLayer(
|
dev@64
|
766 ...getSegmentArgs()
|
dev@64
|
767 );
|
dev@122
|
768 this.addLayer(
|
dev@64
|
769 segmentLayer,
|
cannam@117
|
770 waveTrack,
|
dev@64
|
771 this.timeline.timeContext
|
dev@122
|
772 );
|
dev@64
|
773 }
|
dev@64
|
774 break;
|
dev@64
|
775 }
|
cannam@106
|
776 case 'matrix': {
|
cannam@108
|
777 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
|
dev@236
|
778 // !!! + start time
|
cannam@108
|
779 const matrixData = (features.data as Float32Array[]);
|
dev@236
|
780
|
dev@236
|
781 if (matrixData.length === 0) {
|
dev@236
|
782 return;
|
dev@236
|
783 }
|
dev@236
|
784
|
dev@236
|
785 console.log('matrix data length = ' + matrixData.length);
|
dev@236
|
786 console.log('height of first column = ' + matrixData[0].length);
|
cannam@109
|
787 const targetValue = this.estimatePercentile(matrixData, 95);
|
cannam@108
|
788 const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0);
|
dev@236
|
789 console.log('setting gain to ' + gain);
|
cannam@120
|
790 const matrixEntity =
|
cannam@120
|
791 new wavesUI.utils.PrefilledMatrixEntity(matrixData,
|
cannam@120
|
792 0, // startTime
|
cannam@120
|
793 stepDuration);
|
dev@236
|
794 const matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, {
|
cannam@108
|
795 gain,
|
cannam@221
|
796 top: 0,
|
cannam@221
|
797 height: height,
|
cannam@109
|
798 normalise: 'none',
|
cannam@108
|
799 mapper: this.iceMapper()
|
cannam@108
|
800 });
|
dev@122
|
801 this.addLayer(
|
cannam@108
|
802 matrixLayer,
|
cannam@117
|
803 waveTrack,
|
cannam@108
|
804 this.timeline.timeContext
|
dev@122
|
805 );
|
cannam@108
|
806 break;
|
cannam@106
|
807 }
|
dev@67
|
808 default:
|
dev@236
|
809 console.log(
|
dev@236
|
810 `Cannot render an appropriate layer for feature shape '${features.shape}'`
|
dev@236
|
811 );
|
dev@63
|
812 }
|
dev@59
|
813
|
dev@196
|
814 this.isLoading = false;
|
dev@234
|
815 this.ref.markForCheck();
|
dev@56
|
816 this.timeline.tracks.update();
|
dev@53
|
817 }
|
dev@53
|
818
|
dev@53
|
819 private animate(): void {
|
dev@236
|
820 if (!this.isSeeking) {
|
dev@236
|
821 return;
|
dev@236
|
822 }
|
dev@196
|
823
|
dev@31
|
824 this.ngZone.runOutsideAngular(() => {
|
dev@31
|
825 // listen for time passing...
|
dev@31
|
826 const updateSeekingCursor = () => {
|
dev@53
|
827 const currentTime = this.audioService.getCurrentTime();
|
dev@53
|
828 this.cursorLayer.currentPosition = currentTime;
|
dev@53
|
829 this.cursorLayer.update();
|
dev@53
|
830
|
cannam@254
|
831 if (typeof(this.highlightLayer) !== 'undefined') {
|
cannam@254
|
832 this.highlightLayer.currentPosition = currentTime;
|
cannam@254
|
833 this.highlightLayer.update();
|
cannam@254
|
834 }
|
cannam@254
|
835
|
dev@53
|
836 const currentOffset = this.timeline.timeContext.offset;
|
dev@53
|
837 const offsetTimestamp = currentOffset
|
dev@53
|
838 + currentTime;
|
dev@53
|
839
|
dev@53
|
840 const visibleDuration = this.timeline.timeContext.visibleDuration;
|
dev@53
|
841 // TODO reduce duplication between directions and make more declarative
|
dev@53
|
842 // this kinda logic should also be tested
|
dev@53
|
843 const mustPageForward = offsetTimestamp > visibleDuration;
|
dev@53
|
844 const mustPageBackward = currentTime < -currentOffset;
|
dev@53
|
845
|
dev@53
|
846 if (mustPageForward) {
|
dev@53
|
847 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
|
dev@53
|
848
|
cannam@106
|
849 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
cannam@106
|
850 -currentTime + 0.5 * visibleDuration :
|
cannam@106
|
851 currentOffset - visibleDuration;
|
dev@51
|
852 this.timeline.tracks.update();
|
dev@34
|
853 }
|
dev@53
|
854
|
dev@53
|
855 if (mustPageBackward) {
|
dev@53
|
856 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
|
cannam@106
|
857 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
cannam@106
|
858 -currentTime + 0.5 * visibleDuration :
|
cannam@106
|
859 currentOffset + visibleDuration;
|
dev@51
|
860 this.timeline.tracks.update();
|
dev@34
|
861 }
|
dev@53
|
862
|
dev@236
|
863 if (this.isPlaying) {
|
dev@53
|
864 requestAnimationFrame(updateSeekingCursor);
|
dev@236
|
865 }
|
dev@31
|
866 };
|
dev@31
|
867 updateSeekingCursor();
|
dev@31
|
868 });
|
dev@6
|
869 }
|
dev@16
|
870
|
dev@122
|
871 private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void {
|
dev@54
|
872 timeContext.zoom = 1.0;
|
dev@54
|
873 if (!layer.timeContext) {
|
dev@54
|
874 layer.setTimeContext(isAxis ?
|
dev@54
|
875 timeContext : new wavesUI.core.LayerTimeContext(timeContext));
|
dev@54
|
876 }
|
dev@54
|
877 track.add(layer);
|
dev@185
|
878 this.layers.push(layer);
|
dev@54
|
879 layer.render();
|
dev@54
|
880 layer.update();
|
dev@122
|
881 if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) {
|
dev@112
|
882 track.$layout.appendChild(this.cursorLayer.$el);
|
dev@112
|
883 }
|
dev@59
|
884 }
|
dev@59
|
885
|
dev@51
|
886 ngOnDestroy(): void {
|
dev@236
|
887 if (this.featureExtractionSubscription) {
|
dev@196
|
888 this.featureExtractionSubscription.unsubscribe();
|
dev@236
|
889 }
|
dev@236
|
890 if (this.playingStateSubscription) {
|
dev@196
|
891 this.playingStateSubscription.unsubscribe();
|
dev@236
|
892 }
|
dev@236
|
893 if (this.seekedSubscription) {
|
dev@196
|
894 this.seekedSubscription.unsubscribe();
|
dev@236
|
895 }
|
dev@236
|
896 if (this.onAudioDataSubscription) {
|
dev@196
|
897 this.onAudioDataSubscription.unsubscribe();
|
dev@236
|
898 }
|
dev@51
|
899 }
|
dev@154
|
900
|
dev@155
|
901 seekStart(): void {
|
dev@155
|
902 this.zoomOnMouseDown = this.timeline.timeContext.zoom;
|
dev@157
|
903 this.offsetOnMouseDown = this.timeline.timeContext.offset;
|
dev@155
|
904 }
|
dev@155
|
905
|
dev@155
|
906 seekEnd(x: number): void {
|
dev@157
|
907 const hasSameZoom: boolean = this.zoomOnMouseDown ===
|
dev@157
|
908 this.timeline.timeContext.zoom;
|
dev@157
|
909 const hasSameOffset: boolean = this.offsetOnMouseDown ===
|
dev@157
|
910 this.timeline.timeContext.offset;
|
dev@157
|
911 if (hasSameZoom && hasSameOffset) {
|
dev@155
|
912 this.seek(x);
|
dev@155
|
913 }
|
dev@155
|
914 }
|
dev@155
|
915
|
dev@154
|
916 seek(x: number): void {
|
dev@154
|
917 if (this.timeline) {
|
dev@154
|
918 const timeContext: any = this.timeline.timeContext;
|
dev@196
|
919 if (this.isSeeking) {
|
dev@196
|
920 this.audioService.seekTo(
|
dev@236
|
921 timeContext.timeToPixel.invert(x) - timeContext.offset
|
dev@196
|
922 );
|
dev@196
|
923 }
|
dev@154
|
924 }
|
dev@154
|
925 }
|
dev@6
|
926 }
|