dev@10
|
1 import {
|
dev@236
|
2 Component,
|
dev@236
|
3 OnInit,
|
dev@236
|
4 ViewChild,
|
dev@236
|
5 ElementRef,
|
dev@236
|
6 Input,
|
dev@236
|
7 AfterViewInit,
|
dev@236
|
8 NgZone,
|
dev@236
|
9 OnDestroy,
|
dev@236
|
10 ChangeDetectorRef
|
dev@10
|
11 } from '@angular/core';
|
dev@196
|
12 import {
|
dev@196
|
13 AudioPlayerService, AudioResource,
|
dev@196
|
14 AudioResourceError
|
dev@236
|
15 } from '../services/audio-player/audio-player.service';
|
dev@36
|
16 import wavesUI from 'waves-ui';
|
dev@63
|
17 import {
|
dev@64
|
18 FeatureExtractionService
|
dev@236
|
19 } from '../services/feature-extraction/feature-extraction.service';
|
dev@236
|
20 import {Subscription} from 'rxjs/Subscription';
|
dev@63
|
21 import {
|
dev@63
|
22 FeatureCollection,
|
dev@236
|
23 FixedSpacedFeatures,
|
dev@236
|
24 SimpleResponse
|
dev@236
|
25 } from 'piper/HigherLevelUtilities';
|
dev@236
|
26 import {toSeconds} from 'piper';
|
dev@236
|
27 import {FeatureList, Feature} from 'piper/Feature';
|
dev@81
|
28 import * as Hammer from 'hammerjs';
|
dev@236
|
29 import {WavesSpectrogramLayer} from '../spectrogram/Spectrogram';
|
dev@8
|
30
|
dev@54
|
31 type Layer = any;
|
dev@54
|
32 type Track = any;
|
dev@59
|
33 type Colour = string;
|
dev@6
|
34
|
dev@6
|
35 @Component({
|
dev@236
|
36 selector: 'ugly-waveform',
|
dev@6
|
37 templateUrl: './waveform.component.html',
|
dev@6
|
38 styleUrls: ['./waveform.component.css']
|
dev@6
|
39 })
|
dev@51
|
40 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
|
dev@20
|
41
|
dev@8
|
42 @ViewChild('track') trackDiv: ElementRef;
|
dev@6
|
43
|
dev@189
|
44 @Input() timeline: Timeline;
|
dev@189
|
45 @Input() trackIdPrefix: string;
|
dev@196
|
46 @Input() set isSubscribedToExtractionService(isSubscribed: boolean) {
|
dev@196
|
47 if (isSubscribed) {
|
dev@196
|
48 if (this.featureExtractionSubscription) {
|
dev@196
|
49 return;
|
dev@196
|
50 }
|
dev@16
|
51
|
dev@196
|
52 const colours = function* () {
|
dev@196
|
53 const circularColours = [
|
dev@196
|
54 'black',
|
dev@196
|
55 'red',
|
dev@196
|
56 'green',
|
dev@196
|
57 'purple',
|
dev@196
|
58 'orange'
|
dev@196
|
59 ];
|
dev@196
|
60 let index = 0;
|
dev@196
|
61 const nColours = circularColours.length;
|
dev@196
|
62 while (true) {
|
dev@196
|
63 yield circularColours[index = ++index % nColours];
|
dev@196
|
64 }
|
dev@196
|
65 }();
|
dev@196
|
66
|
dev@196
|
67 this.featureExtractionSubscription =
|
dev@196
|
68 this.piperService.featuresExtracted$.subscribe(
|
dev@196
|
69 features => {
|
dev@196
|
70 this.renderFeatures(features, colours.next().value);
|
dev@196
|
71 });
|
dev@196
|
72 } else {
|
dev@196
|
73 if (this.featureExtractionSubscription) {
|
dev@196
|
74 this.featureExtractionSubscription.unsubscribe();
|
dev@196
|
75 }
|
dev@196
|
76 }
|
dev@196
|
77 }
|
dev@196
|
78 @Input() set isSubscribedToAudioService(isSubscribed: boolean) {
|
dev@196
|
79 this._isSubscribedToAudioService = isSubscribed;
|
dev@196
|
80 if (isSubscribed) {
|
dev@196
|
81 if (this.onAudioDataSubscription) {
|
dev@196
|
82 return;
|
dev@196
|
83 }
|
dev@196
|
84
|
dev@196
|
85 this.onAudioDataSubscription =
|
dev@196
|
86 this.audioService.audioLoaded$.subscribe(res => {
|
dev@196
|
87 const wasError = (res as AudioResourceError).message != null;
|
dev@196
|
88
|
dev@196
|
89 if (wasError) {
|
dev@196
|
90 console.warn('No audio, display error?');
|
dev@196
|
91 } else {
|
dev@196
|
92 this.audioBuffer = (res as AudioResource).samples;
|
dev@196
|
93 }
|
dev@196
|
94 });
|
dev@196
|
95 } else {
|
dev@196
|
96 if (this.onAudioDataSubscription) {
|
dev@196
|
97 this.onAudioDataSubscription.unsubscribe();
|
dev@196
|
98 }
|
dev@196
|
99 }
|
dev@196
|
100 }
|
dev@196
|
101
|
dev@196
|
102 get isSubscribedToAudioService(): boolean {
|
dev@196
|
103 return this._isSubscribedToAudioService;
|
dev@196
|
104 }
|
dev@196
|
105
|
dev@196
|
106 @Input() set isOneShotExtractor(isOneShot: boolean) {
|
dev@196
|
107 this._isOneShotExtractor = isOneShot;
|
dev@196
|
108 }
|
dev@196
|
109
|
dev@196
|
110 get isOneShotExtractor(): boolean {
|
dev@196
|
111 return this._isOneShotExtractor;
|
dev@196
|
112 }
|
dev@196
|
113
|
dev@196
|
114 @Input() set isSeeking(isSeeking: boolean) {
|
dev@196
|
115 this._isSeeking = isSeeking;
|
dev@196
|
116 if (isSeeking) {
|
dev@196
|
117 if (this.seekedSubscription) {
|
dev@196
|
118 return;
|
dev@196
|
119 }
|
dev@236
|
120 if (this.playingStateSubscription) {
|
dev@196
|
121 return;
|
dev@196
|
122 }
|
dev@196
|
123
|
dev@196
|
124 this.seekedSubscription = this.audioService.seeked$.subscribe(() => {
|
dev@236
|
125 if (!this.isPlaying) {
|
dev@196
|
126 this.animate();
|
dev@236
|
127 }
|
dev@196
|
128 });
|
dev@196
|
129 this.playingStateSubscription =
|
dev@196
|
130 this.audioService.playingStateChange$.subscribe(
|
dev@196
|
131 isPlaying => {
|
dev@196
|
132 this.isPlaying = isPlaying;
|
dev@236
|
133 if (this.isPlaying) {
|
dev@196
|
134 this.animate();
|
dev@236
|
135 }
|
dev@196
|
136 });
|
dev@196
|
137 } else {
|
dev@196
|
138 if (this.isPlaying) {
|
dev@196
|
139 this.isPlaying = false;
|
dev@196
|
140 }
|
dev@196
|
141 if (this.playingStateSubscription) {
|
dev@196
|
142 this.playingStateSubscription.unsubscribe();
|
dev@196
|
143 }
|
dev@196
|
144 if (this.seekedSubscription) {
|
dev@196
|
145 this.seekedSubscription.unsubscribe();
|
dev@196
|
146 }
|
dev@196
|
147 }
|
dev@196
|
148 }
|
dev@196
|
149
|
dev@196
|
150 get isSeeking(): boolean {
|
dev@196
|
151 return this._isSeeking;
|
dev@196
|
152 }
|
dev@196
|
153
|
dev@16
|
154 set audioBuffer(buffer: AudioBuffer) {
|
dev@16
|
155 this._audioBuffer = buffer || undefined;
|
cannam@117
|
156 if (this.audioBuffer) {
|
dev@20
|
157 this.renderWaveform(this.audioBuffer);
|
dev@180
|
158 // this.renderSpectrogram(this.audioBuffer);
|
cannam@117
|
159 }
|
dev@16
|
160 }
|
dev@16
|
161
|
dev@16
|
162 get audioBuffer(): AudioBuffer {
|
dev@16
|
163 return this._audioBuffer;
|
dev@16
|
164 }
|
dev@16
|
165
|
dev@196
|
166 private _audioBuffer: AudioBuffer;
|
dev@196
|
167 private _isSubscribedToAudioService: boolean;
|
dev@196
|
168 private _isOneShotExtractor: boolean;
|
dev@196
|
169 private _isSeeking: boolean;
|
dev@196
|
170 private cursorLayer: any;
|
cannam@254
|
171 private highlightLayer: any;
|
dev@196
|
172 private layers: Layer[];
|
dev@51
|
173 private featureExtractionSubscription: Subscription;
|
dev@53
|
174 private playingStateSubscription: Subscription;
|
dev@53
|
175 private seekedSubscription: Subscription;
|
dev@196
|
176 private onAudioDataSubscription: Subscription;
|
dev@53
|
177 private isPlaying: boolean;
|
dev@155
|
178 private zoomOnMouseDown: number;
|
dev@157
|
179 private offsetOnMouseDown: number;
|
dev@196
|
180 private hasShot: boolean;
|
dev@196
|
181 private isLoading: boolean;
|
dev@51
|
182
|
dev@236
|
183 private static changeColour(layer: Layer, colour: string): void {
|
dev@236
|
184 const butcherShapes = (shape) => {
|
dev@236
|
185 shape.install({color: () => colour});
|
dev@236
|
186 shape.params.color = colour;
|
dev@236
|
187 shape.update(layer._renderingContext, layer.data);
|
dev@236
|
188 };
|
dev@236
|
189
|
dev@236
|
190 layer._$itemCommonShapeMap.forEach(butcherShapes);
|
dev@236
|
191 layer._$itemShapeMap.forEach(butcherShapes);
|
dev@236
|
192 layer.render();
|
dev@236
|
193 layer.update();
|
dev@236
|
194 }
|
dev@236
|
195
|
dev@31
|
196 constructor(private audioService: AudioPlayerService,
|
dev@51
|
197 private piperService: FeatureExtractionService,
|
dev@234
|
198 private ngZone: NgZone,
|
dev@234
|
199 private ref: ChangeDetectorRef) {
|
dev@196
|
200 this.isSubscribedToAudioService = true;
|
dev@196
|
201 this.isSeeking = true;
|
dev@185
|
202 this.layers = [];
|
dev@196
|
203 this.audioBuffer = undefined;
|
dev@54
|
204 this.timeline = undefined;
|
dev@54
|
205 this.cursorLayer = undefined;
|
cannam@254
|
206 this.highlightLayer = undefined;
|
dev@53
|
207 this.isPlaying = false;
|
dev@196
|
208 this.isLoading = true;
|
dev@51
|
209 }
|
dev@51
|
210
|
dev@53
|
211 ngOnInit() {
|
dev@53
|
212 }
|
dev@10
|
213
|
dev@10
|
214 ngAfterViewInit(): void {
|
dev@236
|
215 this.trackIdPrefix = this.trackIdPrefix || 'default';
|
dev@196
|
216 if (this.timeline) {
|
dev@196
|
217 this.renderTimeline(null, true, true);
|
dev@196
|
218 } else {
|
dev@196
|
219 this.renderTimeline();
|
dev@196
|
220 }
|
dev@20
|
221 }
|
dev@20
|
222
|
dev@196
|
223 renderTimeline(duration: number = 1.0,
|
dev@196
|
224 useExistingDuration: boolean = false,
|
dev@196
|
225 isInitialRender: boolean = false): Timeline {
|
dev@18
|
226 const track: HTMLElement = this.trackDiv.nativeElement;
|
dev@236
|
227 track.innerHTML = '';
|
dev@18
|
228 const height: number = track.getBoundingClientRect().height;
|
dev@18
|
229 const width: number = track.getBoundingClientRect().width;
|
dev@18
|
230 const pixelsPerSecond = width / duration;
|
dev@196
|
231 const hasExistingTimeline = this.timeline instanceof wavesUI.core.Timeline;
|
dev@196
|
232
|
dev@196
|
233 if (hasExistingTimeline) {
|
dev@196
|
234 if (!useExistingDuration) {
|
dev@196
|
235 this.timeline.pixelsPerSecond = pixelsPerSecond;
|
dev@196
|
236 this.timeline.visibleWidth = width;
|
dev@196
|
237 }
|
dev@180
|
238 } else {
|
dev@180
|
239 this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
|
dev@180
|
240 }
|
dev@196
|
241 const waveTrack = this.timeline.createTrack(
|
dev@196
|
242 track,
|
dev@196
|
243 height,
|
dev@196
|
244 `wave-${this.trackIdPrefix}`
|
dev@196
|
245 );
|
dev@196
|
246 if (isInitialRender && hasExistingTimeline) {
|
dev@196
|
247 // time axis
|
dev@196
|
248 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@196
|
249 height: height,
|
dev@196
|
250 color: '#b0b0b0'
|
dev@196
|
251 });
|
dev@196
|
252 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
|
dev@196
|
253 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
dev@196
|
254 height: height
|
dev@196
|
255 });
|
dev@196
|
256 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
|
dev@196
|
257 }
|
dev@196
|
258 if ('ontouchstart' in window) {
|
dev@196
|
259 interface Point {
|
dev@196
|
260 x: number;
|
dev@196
|
261 y: number;
|
dev@196
|
262 }
|
dev@196
|
263
|
dev@236
|
264 let zoomGestureJustEnded = false;
|
dev@196
|
265
|
dev@196
|
266 const pixelToExponent: Function = wavesUI.utils.scales.linear()
|
dev@196
|
267 .domain([0, 100]) // 100px => factor 2
|
dev@196
|
268 .range([0, 1]);
|
dev@196
|
269
|
dev@196
|
270 const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => {
|
dev@196
|
271 return Math.pow(
|
dev@196
|
272 Math.pow(p2.x - p1.x, 2) +
|
dev@196
|
273 Math.pow(p2.y - p1.y, 2), 0.5);
|
dev@196
|
274 };
|
dev@196
|
275
|
dev@205
|
276 const calculateMidPoint: (p1: Point, p2: Point) => Point = (p1, p2) => {
|
dev@205
|
277 return {
|
dev@205
|
278 x: 0.5 * (p1.x + p2.x),
|
dev@205
|
279 y: 0.5 * (p1.y + p2.y)
|
dev@205
|
280 };
|
dev@205
|
281 };
|
dev@205
|
282
|
dev@205
|
283 const hammertime = new Hammer.Manager(this.trackDiv.nativeElement, {
|
dev@205
|
284 recognizers: [
|
dev@205
|
285 [Hammer.Pan, { direction: Hammer.DIRECTION_HORIZONTAL }]
|
dev@205
|
286 ]
|
dev@205
|
287 });
|
dev@204
|
288
|
dev@204
|
289 // it seems HammerJs binds the event to the window?
|
dev@204
|
290 // causing these events to propagate to other components?
|
dev@204
|
291 const componentTimeline = this.timeline;
|
dev@204
|
292 let initialZoom;
|
dev@204
|
293 let initialDistance;
|
dev@204
|
294 let offsetAtPanStart;
|
dev@205
|
295 let startX;
|
dev@205
|
296 let isZooming;
|
dev@204
|
297
|
dev@196
|
298 const scroll = (ev) => {
|
dev@236
|
299 if (ev.center.x - startX === 0) {
|
dev@236
|
300 return;
|
dev@236
|
301 }
|
dev@236
|
302
|
dev@196
|
303 if (zoomGestureJustEnded) {
|
dev@196
|
304 zoomGestureJustEnded = false;
|
dev@236
|
305 console.log('Skip this event: likely a single touch dangling from pinch');
|
dev@196
|
306 return;
|
dev@196
|
307 }
|
dev@204
|
308 componentTimeline.timeContext.offset = offsetAtPanStart +
|
dev@204
|
309 componentTimeline.timeContext.timeToPixel.invert(ev.deltaX);
|
dev@204
|
310 componentTimeline.tracks.update();
|
dev@196
|
311 };
|
dev@196
|
312
|
dev@196
|
313 const zoom = (ev) => {
|
dev@236
|
314 if (ev.touches.length < 2) {
|
dev@236
|
315 return;
|
dev@236
|
316 }
|
dev@236
|
317
|
dev@214
|
318 ev.preventDefault();
|
dev@204
|
319 const minZoom = componentTimeline.state.minZoom;
|
dev@204
|
320 const maxZoom = componentTimeline.state.maxZoom;
|
dev@205
|
321 const p1: Point = {
|
dev@218
|
322 x: ev.touches[0].clientX,
|
dev@218
|
323 y: ev.touches[0].clientY
|
dev@205
|
324 };
|
dev@205
|
325 const p2: Point = {
|
dev@218
|
326 x: ev.touches[1].clientX,
|
dev@218
|
327 y: ev.touches[1].clientY
|
dev@205
|
328 };
|
dev@205
|
329 const distance = calculateDistance(p1, p2);
|
dev@205
|
330 const midPoint = calculateMidPoint(p1, p2);
|
dev@196
|
331
|
dev@196
|
332 const lastCenterTime =
|
dev@205
|
333 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
|
dev@196
|
334
|
dev@204
|
335 const exponent = pixelToExponent(distance - initialDistance);
|
dev@204
|
336 const targetZoom = initialZoom * Math.pow(2, exponent);
|
dev@196
|
337
|
dev@204
|
338 componentTimeline.timeContext.zoom =
|
dev@196
|
339 Math.min(Math.max(targetZoom, minZoom), maxZoom);
|
dev@196
|
340
|
dev@196
|
341 const newCenterTime =
|
dev@205
|
342 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
|
dev@196
|
343
|
dev@204
|
344 componentTimeline.timeContext.offset += newCenterTime - lastCenterTime;
|
dev@204
|
345 componentTimeline.tracks.update();
|
dev@196
|
346 };
|
dev@205
|
347 hammertime.on('panstart', (ev) => {
|
dev@204
|
348 offsetAtPanStart = componentTimeline.timeContext.offset;
|
dev@205
|
349 startX = ev.center.x;
|
dev@196
|
350 });
|
dev@196
|
351 hammertime.on('panleft', scroll);
|
dev@196
|
352 hammertime.on('panright', scroll);
|
dev@205
|
353
|
dev@205
|
354
|
dev@205
|
355 const element: HTMLElement = this.trackDiv.nativeElement;
|
dev@205
|
356 element.addEventListener('touchstart', (e) => {
|
dev@236
|
357 if (e.touches.length < 2) {
|
dev@236
|
358 return;
|
dev@236
|
359 }
|
dev@236
|
360
|
dev@205
|
361 isZooming = true;
|
dev@204
|
362 initialZoom = componentTimeline.timeContext.zoom;
|
dev@196
|
363
|
dev@204
|
364 initialDistance = calculateDistance({
|
dev@218
|
365 x: e.touches[0].clientX,
|
dev@218
|
366 y: e.touches[0].clientY
|
dev@196
|
367 }, {
|
dev@218
|
368 x: e.touches[1].clientX,
|
dev@218
|
369 y: e.touches[1].clientY
|
dev@196
|
370 });
|
dev@196
|
371 });
|
dev@205
|
372 element.addEventListener('touchend', () => {
|
dev@205
|
373 if (isZooming) {
|
dev@205
|
374 isZooming = false;
|
dev@205
|
375 zoomGestureJustEnded = true;
|
dev@205
|
376 }
|
dev@205
|
377 });
|
dev@205
|
378 element.addEventListener('touchmove', zoom);
|
dev@196
|
379 }
|
dev@189
|
380 // this.timeline.createTrack(track, height/2, `wave-${this.trackIdPrefix}`);
|
dev@189
|
381 // this.timeline.createTrack(track, height/2, `grid-${this.trackIdPrefix}`);
|
dev@54
|
382 }
|
dev@18
|
383
|
cannam@108
|
384 estimatePercentile(matrix, percentile) {
|
cannam@108
|
385 // our sample is not evenly distributed across the whole data set:
|
cannam@108
|
386 // it is guaranteed to include at least one sample from every
|
cannam@108
|
387 // column, and could sample some values more than once. But it
|
cannam@108
|
388 // should be good enough in most cases (todo: show this)
|
cannam@109
|
389 if (matrix.length === 0) {
|
cannam@109
|
390 return 0.0;
|
cannam@109
|
391 }
|
cannam@108
|
392 const w = matrix.length;
|
cannam@108
|
393 const h = matrix[0].length;
|
cannam@108
|
394 const n = w * h;
|
cannam@109
|
395 const m = (n > 50000 ? 50000 : n); // should base that on the %ile
|
cannam@108
|
396 let m_per = Math.floor(m / w);
|
dev@236
|
397 if (m_per < 1) {
|
dev@236
|
398 m_per = 1;
|
dev@236
|
399 }
|
dev@236
|
400
|
dev@236
|
401 const sample = [];
|
cannam@108
|
402 for (let x = 0; x < w; ++x) {
|
cannam@108
|
403 for (let i = 0; i < m_per; ++i) {
|
cannam@108
|
404 const y = Math.floor(Math.random() * h);
|
cannam@109
|
405 const value = matrix[x][y];
|
cannam@109
|
406 if (!isNaN(value) && value !== Infinity) {
|
cannam@109
|
407 sample.push(value);
|
cannam@109
|
408 }
|
cannam@108
|
409 }
|
cannam@108
|
410 }
|
cannam@109
|
411 if (sample.length === 0) {
|
dev@236
|
412 console.log('WARNING: No samples gathered, even though we hoped for ' +
|
dev@236
|
413 (m_per * w) + ' of them');
|
cannam@109
|
414 return 0.0;
|
cannam@109
|
415 }
|
dev@236
|
416 sample.sort((a, b) => { return a - b; });
|
cannam@108
|
417 const ix = Math.floor((sample.length * percentile) / 100);
|
dev@236
|
418 console.log('Estimating ' + percentile + '-%ile of ' +
|
dev@236
|
419 n + '-sample dataset (' + w + ' x ' + h + ') as value ' + ix +
|
dev@236
|
420 ' of sorted ' + sample.length + '-sample subset');
|
cannam@108
|
421 const estimate = sample[ix];
|
dev@236
|
422 console.log('Estimate is: ' + estimate + ' (where min sampled value = ' +
|
dev@236
|
423 sample[0] + ' and max = ' + sample[sample.length - 1] + ')');
|
cannam@108
|
424 return estimate;
|
cannam@108
|
425 }
|
cannam@108
|
426
|
cannam@108
|
427 interpolatingMapper(hexColours) {
|
cannam@108
|
428 const colours = hexColours.map(n => {
|
cannam@108
|
429 const i = parseInt(n, 16);
|
cannam@118
|
430 return [ ((i >> 16) & 255) / 255.0,
|
cannam@118
|
431 ((i >> 8) & 255) / 255.0,
|
cannam@118
|
432 ((i) & 255) / 255.0 ];
|
cannam@108
|
433 });
|
cannam@108
|
434 const last = colours.length - 1;
|
cannam@108
|
435 return (value => {
|
cannam@108
|
436 const m = value * last;
|
cannam@108
|
437 if (m >= last) {
|
cannam@108
|
438 return colours[last];
|
cannam@108
|
439 }
|
cannam@108
|
440 if (m <= 0) {
|
cannam@108
|
441 return colours[0];
|
cannam@108
|
442 }
|
cannam@108
|
443 const base = Math.floor(m);
|
cannam@108
|
444 const prop0 = base + 1.0 - m;
|
cannam@108
|
445 const prop1 = m - base;
|
cannam@108
|
446 const c0 = colours[base];
|
dev@236
|
447 const c1 = colours[base + 1];
|
cannam@118
|
448 return [ c0[0] * prop0 + c1[0] * prop1,
|
cannam@118
|
449 c0[1] * prop0 + c1[1] * prop1,
|
cannam@118
|
450 c0[2] * prop0 + c1[2] * prop1 ];
|
cannam@108
|
451 });
|
cannam@108
|
452 }
|
dev@110
|
453
|
cannam@108
|
454 iceMapper() {
|
dev@236
|
455 const hexColours = [
|
cannam@108
|
456 // Based on ColorBrewer ylGnBu
|
dev@236
|
457 'ffffff', 'ffff00', 'f7fcf0', 'e0f3db', 'ccebc5', 'a8ddb5',
|
dev@236
|
458 '7bccc4', '4eb3d3', '2b8cbe', '0868ac', '084081', '042040'
|
cannam@108
|
459 ];
|
cannam@108
|
460 hexColours.reverse();
|
cannam@108
|
461 return this.interpolatingMapper(hexColours);
|
cannam@108
|
462 }
|
dev@110
|
463
|
cannam@118
|
464 hsv2rgb(h, s, v) { // all values in range [0, 1]
|
cannam@118
|
465 const i = Math.floor(h * 6);
|
cannam@118
|
466 const f = h * 6 - i;
|
cannam@118
|
467 const p = v * (1 - s);
|
cannam@118
|
468 const q = v * (1 - f * s);
|
cannam@118
|
469 const t = v * (1 - (1 - f) * s);
|
cannam@118
|
470 let r = 0, g = 0, b = 0;
|
cannam@118
|
471 switch (i % 6) {
|
dev@236
|
472 case 0: r = v; g = t; b = p; break;
|
dev@236
|
473 case 1: r = q; g = v; b = p; break;
|
dev@236
|
474 case 2: r = p; g = v; b = t; break;
|
dev@236
|
475 case 3: r = p; g = q; b = v; break;
|
dev@236
|
476 case 4: r = t; g = p; b = v; break;
|
dev@236
|
477 case 5: r = v; g = p; b = q; break;
|
cannam@118
|
478 }
|
cannam@118
|
479 return [ r, g, b ];
|
cannam@118
|
480 }
|
dev@122
|
481
|
cannam@118
|
482 greenMapper() {
|
cannam@118
|
483 const blue = 0.6666;
|
cannam@118
|
484 const pieslice = 0.3333;
|
cannam@118
|
485 return (value => {
|
cannam@118
|
486 const h = blue - value * 2.0 * pieslice;
|
cannam@118
|
487 const s = 0.5 + value / 2.0;
|
cannam@118
|
488 const v = value;
|
cannam@118
|
489 return this.hsv2rgb(h, s, v);
|
cannam@118
|
490 });
|
cannam@118
|
491 }
|
cannam@118
|
492
|
cannam@118
|
493 sunsetMapper() {
|
cannam@118
|
494 return (value => {
|
dev@236
|
495 const r = (value - 0.24) * 2.38;
|
dev@236
|
496 const g = (value - 0.64) * 2.777;
|
cannam@118
|
497 let b = (3.6 * value);
|
dev@236
|
498 if (value > 0.277) {
|
dev@236
|
499 b = 2.0 - b;
|
dev@236
|
500 }
|
cannam@118
|
501 return [ r, g, b ];
|
cannam@118
|
502 });
|
cannam@118
|
503 }
|
cannam@118
|
504
|
dev@122
|
505 clearTimeline(): void {
|
dev@122
|
506 // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly
|
dev@122
|
507 const timeContextChildren = this.timeline.timeContext._children;
|
dev@236
|
508 for (const track of this.timeline.tracks) {
|
dev@122
|
509 if (track.layers.length === 0) { continue; }
|
dev@122
|
510 const trackLayers = Array.from(track.layers);
|
dev@122
|
511 while (trackLayers.length) {
|
dev@236
|
512 const layer: Layer = trackLayers.pop();
|
dev@185
|
513 if (this.layers.includes(layer)) {
|
dev@185
|
514 track.remove(layer);
|
dev@185
|
515 this.layers.splice(this.layers.indexOf(layer), 1);
|
dev@185
|
516 const index = timeContextChildren.indexOf(layer.timeContext);
|
dev@185
|
517 if (index >= 0) {
|
dev@185
|
518 timeContextChildren.splice(index, 1);
|
dev@185
|
519 }
|
dev@185
|
520 layer.destroy();
|
dev@122
|
521 }
|
dev@122
|
522 }
|
dev@122
|
523 }
|
dev@122
|
524 }
|
dev@122
|
525
|
dev@54
|
526 renderWaveform(buffer: AudioBuffer): void {
|
dev@180
|
527 // const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@180
|
528 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@189
|
529 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
|
dev@54
|
530 if (this.timeline) {
|
dev@54
|
531 // resize
|
dev@54
|
532 const width = this.trackDiv.nativeElement.getBoundingClientRect().width;
|
dev@55
|
533
|
dev@122
|
534 this.clearTimeline();
|
dev@59
|
535
|
dev@54
|
536 this.timeline.visibleWidth = width;
|
dev@54
|
537 this.timeline.pixelsPerSecond = width / buffer.duration;
|
cannam@117
|
538 waveTrack.height = height;
|
dev@54
|
539 } else {
|
dev@236
|
540 this.renderTimeline(buffer.duration);
|
dev@54
|
541 }
|
dev@83
|
542 this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration;
|
cannam@106
|
543
|
dev@18
|
544 // time axis
|
dev@18
|
545 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@18
|
546 height: height,
|
cannam@106
|
547 color: '#b0b0b0'
|
dev@18
|
548 });
|
cannam@117
|
549 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
|
dev@18
|
550
|
cannam@161
|
551 const nchannels = buffer.numberOfChannels;
|
cannam@161
|
552 const totalWaveHeight = height * 0.9;
|
cannam@161
|
553 const waveHeight = totalWaveHeight / nchannels;
|
dev@189
|
554
|
cannam@161
|
555 for (let ch = 0; ch < nchannels; ++ch) {
|
dev@236
|
556 console.log('about to construct a waveform layer for channel ' + ch);
|
cannam@161
|
557 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
|
dev@236
|
558 top: (height - totalWaveHeight) / 2 + waveHeight * ch,
|
dev@236
|
559 height: waveHeight,
|
dev@236
|
560 color: 'darkblue',
|
dev@236
|
561 channel: ch
|
cannam@161
|
562 });
|
cannam@161
|
563 this.addLayer(waveformLayer, waveTrack, this.timeline.timeContext);
|
cannam@161
|
564 }
|
cannam@117
|
565
|
dev@53
|
566 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
dev@31
|
567 height: height
|
dev@31
|
568 });
|
cannam@117
|
569 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
|
dev@51
|
570 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
|
cannam@117
|
571 waveTrack.render();
|
cannam@117
|
572 waveTrack.update();
|
dev@81
|
573
|
dev@196
|
574 this.isLoading = false;
|
dev@234
|
575 this.ref.markForCheck();
|
dev@53
|
576 this.animate();
|
dev@53
|
577 }
|
dev@53
|
578
|
cannam@117
|
579 renderSpectrogram(buffer: AudioBuffer): void {
|
cannam@117
|
580 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@189
|
581 const gridTrack = this.timeline.getTrackById(`grid-${this.trackIdPrefix}`);
|
cannam@117
|
582
|
dev@129
|
583 const spectrogramLayer = new WavesSpectrogramLayer(buffer, {
|
cannam@221
|
584 top: 0,
|
cannam@221
|
585 height: height,
|
cannam@117
|
586 stepSize: 512,
|
dev@129
|
587 blockSize: 1024,
|
cannam@118
|
588 normalise: 'none',
|
cannam@118
|
589 mapper: this.sunsetMapper()
|
cannam@117
|
590 });
|
cannam@117
|
591 this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext);
|
cannam@117
|
592
|
cannam@117
|
593 this.timeline.tracks.update();
|
cannam@117
|
594 }
|
cannam@117
|
595
|
dev@53
|
596 // TODO refactor - this doesn't belong here
|
dev@64
|
597 private renderFeatures(extracted: SimpleResponse, colour: Colour): void {
|
dev@196
|
598 if (this.isOneShotExtractor && !this.hasShot) {
|
dev@196
|
599 this.featureExtractionSubscription.unsubscribe();
|
dev@196
|
600 this.hasShot = true;
|
dev@196
|
601 }
|
dev@196
|
602
|
dev@236
|
603 if (!extracted.hasOwnProperty('features')
|
dev@236
|
604 || !extracted.hasOwnProperty('outputDescriptor')) {
|
dev@236
|
605 return;
|
dev@236
|
606 }
|
dev@236
|
607 if (!extracted.features.hasOwnProperty('shape')
|
dev@236
|
608 || !extracted.features.hasOwnProperty('data')) {
|
dev@236
|
609 return;
|
dev@236
|
610 }
|
dev@64
|
611 const features: FeatureCollection = (extracted.features as FeatureCollection);
|
dev@64
|
612 const outputDescriptor = extracted.outputDescriptor;
|
dev@196
|
613 // const height = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@196
|
614 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@189
|
615 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
|
dev@64
|
616
|
dev@64
|
617 // TODO refactor all of this
|
dev@63
|
618 switch (features.shape) {
|
dev@64
|
619 case 'vector': {
|
dev@63
|
620 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
|
dev@63
|
621 const featureData = (features.data as Float32Array);
|
dev@236
|
622 if (featureData.length === 0) {
|
dev@236
|
623 return;
|
dev@236
|
624 }
|
dev@63
|
625 const normalisationFactor = 1.0 /
|
dev@63
|
626 featureData.reduce(
|
dev@63
|
627 (currentMax, feature) => Math.max(currentMax, feature),
|
dev@63
|
628 -Infinity
|
dev@63
|
629 );
|
dev@67
|
630
|
dev@63
|
631 const plotData = [...featureData].map((feature, i) => {
|
dev@63
|
632 return {
|
dev@63
|
633 cx: i * stepDuration,
|
cannam@254
|
634 cy: feature * normalisationFactor,
|
cannam@254
|
635 value: feature
|
dev@63
|
636 };
|
dev@63
|
637 });
|
dev@67
|
638
|
dev@236
|
639 const lineLayer = new wavesUI.helpers.LineLayer(plotData, {
|
dev@63
|
640 color: colour,
|
dev@64
|
641 height: height
|
dev@63
|
642 });
|
dev@122
|
643 this.addLayer(
|
dev@105
|
644 lineLayer,
|
cannam@117
|
645 waveTrack,
|
dev@63
|
646 this.timeline.timeContext
|
dev@122
|
647 );
|
cannam@254
|
648 this.highlightLayer = new wavesUI.helpers.HighlightLayer(lineLayer, {
|
cannam@254
|
649 color: colour,
|
cannam@254
|
650 opacity: 0.7,
|
cannam@254
|
651 height: height
|
cannam@254
|
652 });
|
cannam@254
|
653 this.addLayer(
|
cannam@254
|
654 this.highlightLayer,
|
cannam@254
|
655 waveTrack,
|
cannam@254
|
656 this.timeline.timeContext
|
cannam@254
|
657 );
|
dev@63
|
658 break;
|
dev@64
|
659 }
|
dev@64
|
660 case 'list': {
|
dev@64
|
661 const featureData = (features.data as FeatureList);
|
dev@236
|
662 if (featureData.length === 0) {
|
dev@236
|
663 return;
|
dev@236
|
664 }
|
dev@64
|
665 // TODO look at output descriptor instead of directly inspecting features
|
dev@64
|
666 const hasDuration = outputDescriptor.configured.hasDuration;
|
dev@64
|
667 const isMarker = !hasDuration
|
dev@64
|
668 && outputDescriptor.configured.binCount === 0
|
dev@64
|
669 && featureData[0].featureValues == null;
|
dev@64
|
670 const isRegion = hasDuration
|
dev@64
|
671 && featureData[0].timestamp != null;
|
dev@236
|
672 console.log('Have list features: length ' + featureData.length +
|
dev@236
|
673 ', isMarker ' + isMarker + ', isRegion ' + isRegion +
|
dev@236
|
674 ', hasDuration ' + hasDuration);
|
dev@64
|
675 // TODO refactor, this is incomprehensible
|
dev@64
|
676 if (isMarker) {
|
dev@236
|
677 const plotData = featureData.map(feature => ({
|
dev@236
|
678 time: toSeconds(feature.timestamp),
|
dev@236
|
679 label: feature.label
|
dev@236
|
680 }));
|
dev@236
|
681 const featureLayer = new wavesUI.helpers.TickLayer(plotData, {
|
dev@64
|
682 height: height,
|
dev@64
|
683 color: colour,
|
cannam@152
|
684 labelPosition: 'bottom',
|
cannam@152
|
685 shadeSegments: true
|
dev@64
|
686 });
|
dev@122
|
687 this.addLayer(
|
cannam@149
|
688 featureLayer,
|
cannam@117
|
689 waveTrack,
|
dev@64
|
690 this.timeline.timeContext
|
dev@122
|
691 );
|
dev@64
|
692 } else if (isRegion) {
|
dev@236
|
693 console.log('Output is of region type');
|
dev@67
|
694 const binCount = outputDescriptor.configured.binCount || 0;
|
dev@67
|
695 const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ;
|
dev@64
|
696 const getSegmentArgs = () => {
|
dev@64
|
697 if (isBarRegion) {
|
dev@64
|
698
|
dev@67
|
699 // TODO refactor - this is messy
|
dev@67
|
700 interface FoldsToNumber<T> {
|
dev@67
|
701 reduce(fn: (previousValue: number,
|
dev@67
|
702 currentValue: T,
|
dev@67
|
703 currentIndex: number,
|
dev@67
|
704 array: ArrayLike<T>) => number,
|
dev@67
|
705 initialValue?: number): number;
|
dev@67
|
706 }
|
dev@64
|
707
|
dev@67
|
708 // TODO potentially change impl., i.e avoid reduce
|
dev@67
|
709 const findMin = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
710 return arr.reduce((min, val) => Math.min(min, getElement(val)), Infinity);
|
dev@67
|
711 };
|
dev@67
|
712
|
dev@67
|
713 const findMax = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
714 return arr.reduce((min, val) => Math.max(min, getElement(val)), -Infinity);
|
dev@67
|
715 };
|
dev@67
|
716
|
dev@67
|
717 const min = findMin<Feature>(featureData, (x: Feature) => {
|
dev@67
|
718 return findMin<number>(x.featureValues, y => y);
|
dev@67
|
719 });
|
dev@67
|
720
|
dev@67
|
721 const max = findMax<Feature>(featureData, (x: Feature) => {
|
dev@67
|
722 return findMax<number>(x.featureValues, y => y);
|
dev@67
|
723 });
|
dev@67
|
724
|
dev@67
|
725 const barHeight = 1.0 / height;
|
dev@64
|
726 return [
|
dev@67
|
727 featureData.reduce((bars, feature) => {
|
dev@67
|
728 const staticProperties = {
|
dev@64
|
729 x: toSeconds(feature.timestamp),
|
dev@64
|
730 width: toSeconds(feature.duration),
|
dev@67
|
731 height: min + barHeight,
|
dev@64
|
732 color: colour,
|
dev@64
|
733 opacity: 0.8
|
dev@67
|
734 };
|
dev@67
|
735 // TODO avoid copying Float32Array to an array - map is problematic here
|
dev@67
|
736 return bars.concat([...feature.featureValues]
|
dev@236
|
737 .map(val => Object.assign({}, staticProperties, {y: val})));
|
dev@67
|
738 }, []),
|
dev@67
|
739 {yDomain: [min, max + barHeight], height: height} as any
|
dev@67
|
740 ];
|
dev@64
|
741 } else {
|
dev@236
|
742 return [featureData.map(feature => ({
|
dev@236
|
743 x: toSeconds(feature.timestamp),
|
dev@236
|
744 width: toSeconds(feature.duration),
|
dev@236
|
745 color: colour,
|
dev@236
|
746 opacity: 0.8
|
dev@236
|
747 })), {height: height}];
|
dev@64
|
748 }
|
dev@64
|
749 };
|
dev@64
|
750
|
dev@236
|
751 const segmentLayer = new wavesUI.helpers.SegmentLayer(
|
dev@64
|
752 ...getSegmentArgs()
|
dev@64
|
753 );
|
dev@122
|
754 this.addLayer(
|
dev@64
|
755 segmentLayer,
|
cannam@117
|
756 waveTrack,
|
dev@64
|
757 this.timeline.timeContext
|
dev@122
|
758 );
|
dev@64
|
759 }
|
dev@64
|
760 break;
|
dev@64
|
761 }
|
cannam@106
|
762 case 'matrix': {
|
cannam@108
|
763 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
|
dev@236
|
764 // !!! + start time
|
cannam@108
|
765 const matrixData = (features.data as Float32Array[]);
|
dev@236
|
766
|
dev@236
|
767 if (matrixData.length === 0) {
|
dev@236
|
768 return;
|
dev@236
|
769 }
|
dev@236
|
770
|
dev@236
|
771 console.log('matrix data length = ' + matrixData.length);
|
dev@236
|
772 console.log('height of first column = ' + matrixData[0].length);
|
cannam@109
|
773 const targetValue = this.estimatePercentile(matrixData, 95);
|
cannam@108
|
774 const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0);
|
dev@236
|
775 console.log('setting gain to ' + gain);
|
cannam@120
|
776 const matrixEntity =
|
cannam@120
|
777 new wavesUI.utils.PrefilledMatrixEntity(matrixData,
|
cannam@120
|
778 0, // startTime
|
cannam@120
|
779 stepDuration);
|
dev@236
|
780 const matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, {
|
cannam@108
|
781 gain,
|
cannam@221
|
782 top: 0,
|
cannam@221
|
783 height: height,
|
cannam@109
|
784 normalise: 'none',
|
cannam@108
|
785 mapper: this.iceMapper()
|
cannam@108
|
786 });
|
dev@122
|
787 this.addLayer(
|
cannam@108
|
788 matrixLayer,
|
cannam@117
|
789 waveTrack,
|
cannam@108
|
790 this.timeline.timeContext
|
dev@122
|
791 );
|
cannam@108
|
792 break;
|
cannam@106
|
793 }
|
dev@67
|
794 default:
|
dev@236
|
795 console.log(
|
dev@236
|
796 `Cannot render an appropriate layer for feature shape '${features.shape}'`
|
dev@236
|
797 );
|
dev@63
|
798 }
|
dev@59
|
799
|
dev@196
|
800 this.isLoading = false;
|
dev@234
|
801 this.ref.markForCheck();
|
dev@56
|
802 this.timeline.tracks.update();
|
dev@53
|
803 }
|
dev@53
|
804
|
dev@53
|
805 private animate(): void {
|
dev@236
|
806 if (!this.isSeeking) {
|
dev@236
|
807 return;
|
dev@236
|
808 }
|
dev@196
|
809
|
dev@31
|
810 this.ngZone.runOutsideAngular(() => {
|
dev@31
|
811 // listen for time passing...
|
dev@31
|
812 const updateSeekingCursor = () => {
|
dev@53
|
813 const currentTime = this.audioService.getCurrentTime();
|
dev@53
|
814 this.cursorLayer.currentPosition = currentTime;
|
dev@53
|
815 this.cursorLayer.update();
|
dev@53
|
816
|
cannam@254
|
817 if (typeof(this.highlightLayer) !== 'undefined') {
|
cannam@254
|
818 this.highlightLayer.currentPosition = currentTime;
|
cannam@254
|
819 this.highlightLayer.update();
|
cannam@254
|
820 }
|
cannam@254
|
821
|
dev@53
|
822 const currentOffset = this.timeline.timeContext.offset;
|
dev@53
|
823 const offsetTimestamp = currentOffset
|
dev@53
|
824 + currentTime;
|
dev@53
|
825
|
dev@53
|
826 const visibleDuration = this.timeline.timeContext.visibleDuration;
|
dev@53
|
827 // TODO reduce duplication between directions and make more declarative
|
dev@53
|
828 // this kinda logic should also be tested
|
dev@53
|
829 const mustPageForward = offsetTimestamp > visibleDuration;
|
dev@53
|
830 const mustPageBackward = currentTime < -currentOffset;
|
dev@53
|
831
|
dev@53
|
832 if (mustPageForward) {
|
dev@53
|
833 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
|
dev@53
|
834
|
cannam@106
|
835 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
cannam@106
|
836 -currentTime + 0.5 * visibleDuration :
|
cannam@106
|
837 currentOffset - visibleDuration;
|
dev@51
|
838 this.timeline.tracks.update();
|
dev@34
|
839 }
|
dev@53
|
840
|
dev@53
|
841 if (mustPageBackward) {
|
dev@53
|
842 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
|
cannam@106
|
843 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
cannam@106
|
844 -currentTime + 0.5 * visibleDuration :
|
cannam@106
|
845 currentOffset + visibleDuration;
|
dev@51
|
846 this.timeline.tracks.update();
|
dev@34
|
847 }
|
dev@53
|
848
|
dev@236
|
849 if (this.isPlaying) {
|
dev@53
|
850 requestAnimationFrame(updateSeekingCursor);
|
dev@236
|
851 }
|
dev@31
|
852 };
|
dev@31
|
853 updateSeekingCursor();
|
dev@31
|
854 });
|
dev@6
|
855 }
|
dev@16
|
856
|
dev@122
|
857 private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void {
|
dev@54
|
858 timeContext.zoom = 1.0;
|
dev@54
|
859 if (!layer.timeContext) {
|
dev@54
|
860 layer.setTimeContext(isAxis ?
|
dev@54
|
861 timeContext : new wavesUI.core.LayerTimeContext(timeContext));
|
dev@54
|
862 }
|
dev@54
|
863 track.add(layer);
|
dev@185
|
864 this.layers.push(layer);
|
dev@54
|
865 layer.render();
|
dev@54
|
866 layer.update();
|
dev@122
|
867 if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) {
|
dev@112
|
868 track.$layout.appendChild(this.cursorLayer.$el);
|
dev@112
|
869 }
|
dev@59
|
870 }
|
dev@59
|
871
|
dev@51
|
872 ngOnDestroy(): void {
|
dev@236
|
873 if (this.featureExtractionSubscription) {
|
dev@196
|
874 this.featureExtractionSubscription.unsubscribe();
|
dev@236
|
875 }
|
dev@236
|
876 if (this.playingStateSubscription) {
|
dev@196
|
877 this.playingStateSubscription.unsubscribe();
|
dev@236
|
878 }
|
dev@236
|
879 if (this.seekedSubscription) {
|
dev@196
|
880 this.seekedSubscription.unsubscribe();
|
dev@236
|
881 }
|
dev@236
|
882 if (this.onAudioDataSubscription) {
|
dev@196
|
883 this.onAudioDataSubscription.unsubscribe();
|
dev@236
|
884 }
|
dev@51
|
885 }
|
dev@154
|
886
|
dev@155
|
887 seekStart(): void {
|
dev@155
|
888 this.zoomOnMouseDown = this.timeline.timeContext.zoom;
|
dev@157
|
889 this.offsetOnMouseDown = this.timeline.timeContext.offset;
|
dev@155
|
890 }
|
dev@155
|
891
|
dev@155
|
892 seekEnd(x: number): void {
|
dev@157
|
893 const hasSameZoom: boolean = this.zoomOnMouseDown ===
|
dev@157
|
894 this.timeline.timeContext.zoom;
|
dev@157
|
895 const hasSameOffset: boolean = this.offsetOnMouseDown ===
|
dev@157
|
896 this.timeline.timeContext.offset;
|
dev@157
|
897 if (hasSameZoom && hasSameOffset) {
|
dev@155
|
898 this.seek(x);
|
dev@155
|
899 }
|
dev@155
|
900 }
|
dev@155
|
901
|
dev@154
|
902 seek(x: number): void {
|
dev@154
|
903 if (this.timeline) {
|
dev@154
|
904 const timeContext: any = this.timeline.timeContext;
|
dev@196
|
905 if (this.isSeeking) {
|
dev@196
|
906 this.audioService.seekTo(
|
dev@236
|
907 timeContext.timeToPixel.invert(x) - timeContext.offset
|
dev@196
|
908 );
|
dev@196
|
909 }
|
dev@154
|
910 }
|
dev@154
|
911 }
|
dev@6
|
912 }
|