dev@10
|
1 import {
|
dev@51
|
2 Component, OnInit, ViewChild, ElementRef, Input, AfterViewInit, NgZone,
|
dev@234
|
3 OnDestroy, ChangeDetectorRef
|
dev@10
|
4 } from '@angular/core';
|
dev@196
|
5 import {
|
dev@196
|
6 AudioPlayerService, AudioResource,
|
dev@196
|
7 AudioResourceError
|
dev@196
|
8 } from "../services/audio-player/audio-player.service";
|
dev@36
|
9 import wavesUI from 'waves-ui';
|
dev@63
|
10 import {
|
dev@64
|
11 FeatureExtractionService
|
dev@63
|
12 } from "../services/feature-extraction/feature-extraction.service";
|
dev@51
|
13 import {Subscription} from "rxjs";
|
dev@63
|
14 import {
|
dev@63
|
15 FeatureCollection,
|
dev@64
|
16 FixedSpacedFeatures, SimpleResponse
|
dev@63
|
17 } from "piper/HigherLevelUtilities";
|
dev@53
|
18 import {toSeconds} from "piper";
|
dev@67
|
19 import {FeatureList, Feature} from "piper/Feature";
|
dev@81
|
20 import * as Hammer from 'hammerjs';
|
dev@129
|
21 import {WavesSpectrogramLayer} from "../spectrogram/Spectrogram";
|
dev@8
|
22
|
dev@54
|
23 type Layer = any;
|
dev@54
|
24 type Track = any;
|
dev@59
|
25 type Colour = string;
|
dev@6
|
26
|
dev@6
|
27 @Component({
|
dev@6
|
28 selector: 'app-waveform',
|
dev@6
|
29 templateUrl: './waveform.component.html',
|
dev@6
|
30 styleUrls: ['./waveform.component.css']
|
dev@6
|
31 })
|
dev@51
|
32 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
|
dev@20
|
33
|
dev@8
|
34 @ViewChild('track') trackDiv: ElementRef;
|
dev@6
|
35
|
dev@189
|
36 @Input() timeline: Timeline;
|
dev@189
|
37 @Input() trackIdPrefix: string;
|
dev@196
|
38 @Input() set isSubscribedToExtractionService(isSubscribed: boolean) {
|
dev@196
|
39 if (isSubscribed) {
|
dev@196
|
40 if (this.featureExtractionSubscription) {
|
dev@196
|
41 return;
|
dev@196
|
42 }
|
dev@16
|
43
|
dev@196
|
44 const colours = function* () {
|
dev@196
|
45 const circularColours = [
|
dev@196
|
46 'black',
|
dev@196
|
47 'red',
|
dev@196
|
48 'green',
|
dev@196
|
49 'purple',
|
dev@196
|
50 'orange'
|
dev@196
|
51 ];
|
dev@196
|
52 let index = 0;
|
dev@196
|
53 const nColours = circularColours.length;
|
dev@196
|
54 while (true) {
|
dev@196
|
55 yield circularColours[index = ++index % nColours];
|
dev@196
|
56 }
|
dev@196
|
57 }();
|
dev@196
|
58
|
dev@196
|
59 this.featureExtractionSubscription =
|
dev@196
|
60 this.piperService.featuresExtracted$.subscribe(
|
dev@196
|
61 features => {
|
dev@196
|
62 this.renderFeatures(features, colours.next().value);
|
dev@196
|
63 });
|
dev@196
|
64 } else {
|
dev@196
|
65 if (this.featureExtractionSubscription) {
|
dev@196
|
66 this.featureExtractionSubscription.unsubscribe();
|
dev@196
|
67 }
|
dev@196
|
68 }
|
dev@196
|
69 }
|
dev@196
|
70 @Input() set isSubscribedToAudioService(isSubscribed: boolean) {
|
dev@196
|
71 this._isSubscribedToAudioService = isSubscribed;
|
dev@196
|
72 if (isSubscribed) {
|
dev@196
|
73 if (this.onAudioDataSubscription) {
|
dev@196
|
74 return;
|
dev@196
|
75 }
|
dev@196
|
76
|
dev@196
|
77 this.onAudioDataSubscription =
|
dev@196
|
78 this.audioService.audioLoaded$.subscribe(res => {
|
dev@196
|
79 const wasError = (res as AudioResourceError).message != null;
|
dev@196
|
80
|
dev@196
|
81 if (wasError) {
|
dev@196
|
82 console.warn('No audio, display error?');
|
dev@196
|
83 } else {
|
dev@196
|
84 this.audioBuffer = (res as AudioResource).samples;
|
dev@196
|
85 }
|
dev@196
|
86 });
|
dev@196
|
87 } else {
|
dev@196
|
88 if (this.onAudioDataSubscription) {
|
dev@196
|
89 this.onAudioDataSubscription.unsubscribe();
|
dev@196
|
90 }
|
dev@196
|
91 }
|
dev@196
|
92 }
|
dev@196
|
93
|
dev@196
|
94 get isSubscribedToAudioService(): boolean {
|
dev@196
|
95 return this._isSubscribedToAudioService;
|
dev@196
|
96 }
|
dev@196
|
97
|
dev@196
|
98 @Input() set isOneShotExtractor(isOneShot: boolean) {
|
dev@196
|
99 this._isOneShotExtractor = isOneShot;
|
dev@196
|
100 }
|
dev@196
|
101
|
dev@196
|
102 get isOneShotExtractor(): boolean {
|
dev@196
|
103 return this._isOneShotExtractor;
|
dev@196
|
104 }
|
dev@196
|
105
|
dev@196
|
106 @Input() set isSeeking(isSeeking: boolean) {
|
dev@196
|
107 this._isSeeking = isSeeking;
|
dev@196
|
108 if (isSeeking) {
|
dev@196
|
109 if (this.seekedSubscription) {
|
dev@196
|
110 return;
|
dev@196
|
111 }
|
dev@196
|
112 if(this.playingStateSubscription) {
|
dev@196
|
113 return;
|
dev@196
|
114 }
|
dev@196
|
115
|
dev@196
|
116 this.seekedSubscription = this.audioService.seeked$.subscribe(() => {
|
dev@196
|
117 if (!this.isPlaying)
|
dev@196
|
118 this.animate();
|
dev@196
|
119 });
|
dev@196
|
120 this.playingStateSubscription =
|
dev@196
|
121 this.audioService.playingStateChange$.subscribe(
|
dev@196
|
122 isPlaying => {
|
dev@196
|
123 this.isPlaying = isPlaying;
|
dev@196
|
124 if (this.isPlaying)
|
dev@196
|
125 this.animate();
|
dev@196
|
126 });
|
dev@196
|
127 } else {
|
dev@196
|
128 if (this.isPlaying) {
|
dev@196
|
129 this.isPlaying = false;
|
dev@196
|
130 }
|
dev@196
|
131 if (this.playingStateSubscription) {
|
dev@196
|
132 this.playingStateSubscription.unsubscribe();
|
dev@196
|
133 }
|
dev@196
|
134 if (this.seekedSubscription) {
|
dev@196
|
135 this.seekedSubscription.unsubscribe();
|
dev@196
|
136 }
|
dev@196
|
137 }
|
dev@196
|
138 }
|
dev@196
|
139
|
dev@196
|
140 get isSeeking(): boolean {
|
dev@196
|
141 return this._isSeeking;
|
dev@196
|
142 }
|
dev@196
|
143
|
dev@16
|
144 set audioBuffer(buffer: AudioBuffer) {
|
dev@16
|
145 this._audioBuffer = buffer || undefined;
|
cannam@117
|
146 if (this.audioBuffer) {
|
dev@20
|
147 this.renderWaveform(this.audioBuffer);
|
dev@180
|
148 // this.renderSpectrogram(this.audioBuffer);
|
cannam@117
|
149 }
|
dev@16
|
150 }
|
dev@16
|
151
|
dev@16
|
152 get audioBuffer(): AudioBuffer {
|
dev@16
|
153 return this._audioBuffer;
|
dev@16
|
154 }
|
dev@16
|
155
|
dev@196
|
156 private _audioBuffer: AudioBuffer;
|
dev@196
|
157 private _isSubscribedToAudioService: boolean;
|
dev@196
|
158 private _isOneShotExtractor: boolean;
|
dev@196
|
159 private _isSeeking: boolean;
|
dev@196
|
160 private cursorLayer: any;
|
dev@196
|
161 private layers: Layer[];
|
dev@51
|
162 private featureExtractionSubscription: Subscription;
|
dev@53
|
163 private playingStateSubscription: Subscription;
|
dev@53
|
164 private seekedSubscription: Subscription;
|
dev@196
|
165 private onAudioDataSubscription: Subscription;
|
dev@53
|
166 private isPlaying: boolean;
|
dev@155
|
167 private zoomOnMouseDown: number;
|
dev@157
|
168 private offsetOnMouseDown: number;
|
dev@196
|
169 private hasShot: boolean;
|
dev@196
|
170 private isLoading: boolean;
|
dev@51
|
171
|
dev@31
|
172 constructor(private audioService: AudioPlayerService,
|
dev@51
|
173 private piperService: FeatureExtractionService,
|
dev@234
|
174 private ngZone: NgZone,
|
dev@234
|
175 private ref: ChangeDetectorRef) {
|
dev@196
|
176 this.isSubscribedToAudioService = true;
|
dev@196
|
177 this.isSeeking = true;
|
dev@185
|
178 this.layers = [];
|
dev@196
|
179 this.audioBuffer = undefined;
|
dev@54
|
180 this.timeline = undefined;
|
dev@54
|
181 this.cursorLayer = undefined;
|
dev@53
|
182 this.isPlaying = false;
|
dev@196
|
183 this.isLoading = true;
|
dev@51
|
184 }
|
dev@51
|
185
|
dev@53
|
186 ngOnInit() {
|
dev@53
|
187 }
|
dev@10
|
188
|
dev@10
|
189 ngAfterViewInit(): void {
|
dev@189
|
190 this.trackIdPrefix = this.trackIdPrefix || "default";
|
dev@196
|
191 if (this.timeline) {
|
dev@196
|
192 this.renderTimeline(null, true, true);
|
dev@196
|
193 } else {
|
dev@196
|
194 this.renderTimeline();
|
dev@196
|
195 }
|
dev@20
|
196 }
|
dev@20
|
197
|
dev@196
|
198 renderTimeline(duration: number = 1.0,
|
dev@196
|
199 useExistingDuration: boolean = false,
|
dev@196
|
200 isInitialRender: boolean = false): Timeline {
|
dev@18
|
201 const track: HTMLElement = this.trackDiv.nativeElement;
|
dev@20
|
202 track.innerHTML = "";
|
dev@18
|
203 const height: number = track.getBoundingClientRect().height;
|
dev@18
|
204 const width: number = track.getBoundingClientRect().width;
|
dev@18
|
205 const pixelsPerSecond = width / duration;
|
dev@196
|
206 const hasExistingTimeline = this.timeline instanceof wavesUI.core.Timeline;
|
dev@196
|
207
|
dev@196
|
208 if (hasExistingTimeline) {
|
dev@196
|
209 if (!useExistingDuration) {
|
dev@196
|
210 this.timeline.pixelsPerSecond = pixelsPerSecond;
|
dev@196
|
211 this.timeline.visibleWidth = width;
|
dev@196
|
212 }
|
dev@180
|
213 } else {
|
dev@180
|
214 this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
|
dev@180
|
215 }
|
dev@196
|
216 const waveTrack = this.timeline.createTrack(
|
dev@196
|
217 track,
|
dev@196
|
218 height,
|
dev@196
|
219 `wave-${this.trackIdPrefix}`
|
dev@196
|
220 );
|
dev@196
|
221 if (isInitialRender && hasExistingTimeline) {
|
dev@196
|
222 // time axis
|
dev@196
|
223 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@196
|
224 height: height,
|
dev@196
|
225 color: '#b0b0b0'
|
dev@196
|
226 });
|
dev@196
|
227 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
|
dev@196
|
228 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
dev@196
|
229 height: height
|
dev@196
|
230 });
|
dev@196
|
231 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
|
dev@196
|
232 }
|
dev@196
|
233 if ('ontouchstart' in window) {
|
dev@196
|
234 interface Point {
|
dev@196
|
235 x: number;
|
dev@196
|
236 y: number;
|
dev@196
|
237 }
|
dev@196
|
238
|
dev@196
|
239 let zoomGestureJustEnded: boolean = false;
|
dev@196
|
240
|
dev@196
|
241 const pixelToExponent: Function = wavesUI.utils.scales.linear()
|
dev@196
|
242 .domain([0, 100]) // 100px => factor 2
|
dev@196
|
243 .range([0, 1]);
|
dev@196
|
244
|
dev@196
|
245 const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => {
|
dev@196
|
246 return Math.pow(
|
dev@196
|
247 Math.pow(p2.x - p1.x, 2) +
|
dev@196
|
248 Math.pow(p2.y - p1.y, 2), 0.5);
|
dev@196
|
249 };
|
dev@196
|
250
|
dev@205
|
251 const calculateMidPoint: (p1: Point, p2: Point) => Point = (p1, p2) => {
|
dev@205
|
252 return {
|
dev@205
|
253 x: 0.5 * (p1.x + p2.x),
|
dev@205
|
254 y: 0.5 * (p1.y + p2.y)
|
dev@205
|
255 };
|
dev@205
|
256 };
|
dev@205
|
257
|
dev@205
|
258 const hammertime = new Hammer.Manager(this.trackDiv.nativeElement, {
|
dev@205
|
259 recognizers: [
|
dev@205
|
260 [Hammer.Pan, { direction: Hammer.DIRECTION_HORIZONTAL }]
|
dev@205
|
261 ]
|
dev@205
|
262 });
|
dev@204
|
263
|
dev@204
|
264 // it seems HammerJs binds the event to the window?
|
dev@204
|
265 // causing these events to propagate to other components?
|
dev@204
|
266 const componentTimeline = this.timeline;
|
dev@204
|
267 let initialZoom;
|
dev@204
|
268 let initialDistance;
|
dev@204
|
269 let offsetAtPanStart;
|
dev@205
|
270 let startX;
|
dev@205
|
271 let isZooming;
|
dev@204
|
272
|
dev@196
|
273 const scroll = (ev) => {
|
dev@205
|
274 if (ev.center.x - startX === 0) return;
|
dev@196
|
275 if (zoomGestureJustEnded) {
|
dev@196
|
276 zoomGestureJustEnded = false;
|
dev@196
|
277 console.log("Skip this event: likely a single touch dangling from pinch");
|
dev@196
|
278 return;
|
dev@196
|
279 }
|
dev@204
|
280 componentTimeline.timeContext.offset = offsetAtPanStart +
|
dev@204
|
281 componentTimeline.timeContext.timeToPixel.invert(ev.deltaX);
|
dev@204
|
282 componentTimeline.tracks.update();
|
dev@196
|
283 };
|
dev@196
|
284
|
dev@196
|
285 const zoom = (ev) => {
|
dev@218
|
286 if (ev.touches.length < 2) return;
|
dev@214
|
287 ev.preventDefault();
|
dev@204
|
288 const minZoom = componentTimeline.state.minZoom;
|
dev@204
|
289 const maxZoom = componentTimeline.state.maxZoom;
|
dev@205
|
290 const p1: Point = {
|
dev@218
|
291 x: ev.touches[0].clientX,
|
dev@218
|
292 y: ev.touches[0].clientY
|
dev@205
|
293 };
|
dev@205
|
294 const p2: Point = {
|
dev@218
|
295 x: ev.touches[1].clientX,
|
dev@218
|
296 y: ev.touches[1].clientY
|
dev@205
|
297 };
|
dev@205
|
298 const distance = calculateDistance(p1, p2);
|
dev@205
|
299 const midPoint = calculateMidPoint(p1, p2);
|
dev@196
|
300
|
dev@196
|
301 const lastCenterTime =
|
dev@205
|
302 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
|
dev@196
|
303
|
dev@204
|
304 const exponent = pixelToExponent(distance - initialDistance);
|
dev@204
|
305 const targetZoom = initialZoom * Math.pow(2, exponent);
|
dev@196
|
306
|
dev@204
|
307 componentTimeline.timeContext.zoom =
|
dev@196
|
308 Math.min(Math.max(targetZoom, minZoom), maxZoom);
|
dev@196
|
309
|
dev@196
|
310 const newCenterTime =
|
dev@205
|
311 componentTimeline.timeContext.timeToPixel.invert(midPoint.x);
|
dev@196
|
312
|
dev@204
|
313 componentTimeline.timeContext.offset += newCenterTime - lastCenterTime;
|
dev@204
|
314 componentTimeline.tracks.update();
|
dev@196
|
315 };
|
dev@205
|
316 hammertime.on('panstart', (ev) => {
|
dev@204
|
317 offsetAtPanStart = componentTimeline.timeContext.offset;
|
dev@205
|
318 startX = ev.center.x;
|
dev@196
|
319 });
|
dev@196
|
320 hammertime.on('panleft', scroll);
|
dev@196
|
321 hammertime.on('panright', scroll);
|
dev@205
|
322
|
dev@205
|
323
|
dev@205
|
324 const element: HTMLElement = this.trackDiv.nativeElement;
|
dev@205
|
325 element.addEventListener('touchstart', (e) => {
|
dev@218
|
326 if (e.touches.length < 2) return;
|
dev@205
|
327 isZooming = true;
|
dev@204
|
328 initialZoom = componentTimeline.timeContext.zoom;
|
dev@196
|
329
|
dev@204
|
330 initialDistance = calculateDistance({
|
dev@218
|
331 x: e.touches[0].clientX,
|
dev@218
|
332 y: e.touches[0].clientY
|
dev@196
|
333 }, {
|
dev@218
|
334 x: e.touches[1].clientX,
|
dev@218
|
335 y: e.touches[1].clientY
|
dev@196
|
336 });
|
dev@196
|
337 });
|
dev@205
|
338 element.addEventListener('touchend', () => {
|
dev@205
|
339 if (isZooming) {
|
dev@205
|
340 isZooming = false;
|
dev@205
|
341 zoomGestureJustEnded = true;
|
dev@205
|
342 }
|
dev@205
|
343 });
|
dev@205
|
344 element.addEventListener('touchmove', zoom);
|
dev@196
|
345 }
|
dev@189
|
346 // this.timeline.createTrack(track, height/2, `wave-${this.trackIdPrefix}`);
|
dev@189
|
347 // this.timeline.createTrack(track, height/2, `grid-${this.trackIdPrefix}`);
|
dev@54
|
348 }
|
dev@18
|
349
|
cannam@108
|
350 estimatePercentile(matrix, percentile) {
|
cannam@108
|
351 // our sample is not evenly distributed across the whole data set:
|
cannam@108
|
352 // it is guaranteed to include at least one sample from every
|
cannam@108
|
353 // column, and could sample some values more than once. But it
|
cannam@108
|
354 // should be good enough in most cases (todo: show this)
|
cannam@109
|
355 if (matrix.length === 0) {
|
cannam@109
|
356 return 0.0;
|
cannam@109
|
357 }
|
cannam@108
|
358 const w = matrix.length;
|
cannam@108
|
359 const h = matrix[0].length;
|
cannam@108
|
360 const n = w * h;
|
cannam@109
|
361 const m = (n > 50000 ? 50000 : n); // should base that on the %ile
|
cannam@108
|
362 let m_per = Math.floor(m / w);
|
cannam@108
|
363 if (m_per < 1) m_per = 1;
|
cannam@108
|
364 let sample = [];
|
cannam@108
|
365 for (let x = 0; x < w; ++x) {
|
cannam@108
|
366 for (let i = 0; i < m_per; ++i) {
|
cannam@108
|
367 const y = Math.floor(Math.random() * h);
|
cannam@109
|
368 const value = matrix[x][y];
|
cannam@109
|
369 if (!isNaN(value) && value !== Infinity) {
|
cannam@109
|
370 sample.push(value);
|
cannam@109
|
371 }
|
cannam@108
|
372 }
|
cannam@108
|
373 }
|
cannam@109
|
374 if (sample.length === 0) {
|
cannam@109
|
375 console.log("WARNING: No samples gathered, even though we hoped for " +
|
cannam@109
|
376 (m_per * w) + " of them");
|
cannam@109
|
377 return 0.0;
|
cannam@109
|
378 }
|
cannam@108
|
379 sample.sort((a,b) => { return a - b; });
|
cannam@108
|
380 const ix = Math.floor((sample.length * percentile) / 100);
|
cannam@108
|
381 console.log("Estimating " + percentile + "-%ile of " +
|
cannam@108
|
382 n + "-sample dataset (" + w + " x " + h + ") as value " + ix +
|
cannam@108
|
383 " of sorted " + sample.length + "-sample subset");
|
cannam@108
|
384 const estimate = sample[ix];
|
cannam@108
|
385 console.log("Estimate is: " + estimate + " (where min sampled value = " +
|
cannam@108
|
386 sample[0] + " and max = " + sample[sample.length-1] + ")");
|
cannam@108
|
387 return estimate;
|
cannam@108
|
388 }
|
cannam@108
|
389
|
cannam@108
|
390 interpolatingMapper(hexColours) {
|
cannam@108
|
391 const colours = hexColours.map(n => {
|
cannam@108
|
392 const i = parseInt(n, 16);
|
cannam@118
|
393 return [ ((i >> 16) & 255) / 255.0,
|
cannam@118
|
394 ((i >> 8) & 255) / 255.0,
|
cannam@118
|
395 ((i) & 255) / 255.0 ];
|
cannam@108
|
396 });
|
cannam@108
|
397 const last = colours.length - 1;
|
cannam@108
|
398 return (value => {
|
cannam@108
|
399 const m = value * last;
|
cannam@108
|
400 if (m >= last) {
|
cannam@108
|
401 return colours[last];
|
cannam@108
|
402 }
|
cannam@108
|
403 if (m <= 0) {
|
cannam@108
|
404 return colours[0];
|
cannam@108
|
405 }
|
cannam@108
|
406 const base = Math.floor(m);
|
cannam@108
|
407 const prop0 = base + 1.0 - m;
|
cannam@108
|
408 const prop1 = m - base;
|
cannam@108
|
409 const c0 = colours[base];
|
cannam@108
|
410 const c1 = colours[base+1];
|
cannam@118
|
411 return [ c0[0] * prop0 + c1[0] * prop1,
|
cannam@118
|
412 c0[1] * prop0 + c1[1] * prop1,
|
cannam@118
|
413 c0[2] * prop0 + c1[2] * prop1 ];
|
cannam@108
|
414 });
|
cannam@108
|
415 }
|
dev@110
|
416
|
cannam@108
|
417 iceMapper() {
|
dev@110
|
418 let hexColours = [
|
cannam@108
|
419 // Based on ColorBrewer ylGnBu
|
cannam@108
|
420 "ffffff", "ffff00", "f7fcf0", "e0f3db", "ccebc5", "a8ddb5",
|
cannam@108
|
421 "7bccc4", "4eb3d3", "2b8cbe", "0868ac", "084081", "042040"
|
cannam@108
|
422 ];
|
cannam@108
|
423 hexColours.reverse();
|
cannam@108
|
424 return this.interpolatingMapper(hexColours);
|
cannam@108
|
425 }
|
dev@110
|
426
|
cannam@118
|
427 hsv2rgb(h, s, v) { // all values in range [0, 1]
|
cannam@118
|
428 const i = Math.floor(h * 6);
|
cannam@118
|
429 const f = h * 6 - i;
|
cannam@118
|
430 const p = v * (1 - s);
|
cannam@118
|
431 const q = v * (1 - f * s);
|
cannam@118
|
432 const t = v * (1 - (1 - f) * s);
|
cannam@118
|
433 let r = 0, g = 0, b = 0;
|
cannam@118
|
434 switch (i % 6) {
|
cannam@118
|
435 case 0: r = v, g = t, b = p; break;
|
cannam@118
|
436 case 1: r = q, g = v, b = p; break;
|
cannam@118
|
437 case 2: r = p, g = v, b = t; break;
|
cannam@118
|
438 case 3: r = p, g = q, b = v; break;
|
cannam@118
|
439 case 4: r = t, g = p, b = v; break;
|
cannam@118
|
440 case 5: r = v, g = p, b = q; break;
|
cannam@118
|
441 }
|
cannam@118
|
442 return [ r, g, b ];
|
cannam@118
|
443 }
|
dev@122
|
444
|
cannam@118
|
445 greenMapper() {
|
cannam@118
|
446 const blue = 0.6666;
|
cannam@118
|
447 const pieslice = 0.3333;
|
cannam@118
|
448 return (value => {
|
cannam@118
|
449 const h = blue - value * 2.0 * pieslice;
|
cannam@118
|
450 const s = 0.5 + value / 2.0;
|
cannam@118
|
451 const v = value;
|
cannam@118
|
452 return this.hsv2rgb(h, s, v);
|
cannam@118
|
453 });
|
cannam@118
|
454 }
|
cannam@118
|
455
|
cannam@118
|
456 sunsetMapper() {
|
cannam@118
|
457 return (value => {
|
cannam@118
|
458 let r = (value - 0.24) * 2.38;
|
cannam@118
|
459 let g = (value - 0.64) * 2.777;
|
cannam@118
|
460 let b = (3.6 * value);
|
cannam@118
|
461 if (value > 0.277) b = 2.0 - b;
|
cannam@118
|
462 return [ r, g, b ];
|
cannam@118
|
463 });
|
cannam@118
|
464 }
|
cannam@118
|
465
|
dev@122
|
466 clearTimeline(): void {
|
dev@122
|
467 // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly
|
dev@122
|
468 const timeContextChildren = this.timeline.timeContext._children;
|
dev@122
|
469 for (let track of this.timeline.tracks) {
|
dev@122
|
470 if (track.layers.length === 0) { continue; }
|
dev@122
|
471 const trackLayers = Array.from(track.layers);
|
dev@122
|
472 while (trackLayers.length) {
|
dev@122
|
473 let layer: Layer = trackLayers.pop();
|
dev@185
|
474 if (this.layers.includes(layer)) {
|
dev@185
|
475 track.remove(layer);
|
dev@185
|
476 this.layers.splice(this.layers.indexOf(layer), 1);
|
dev@185
|
477 const index = timeContextChildren.indexOf(layer.timeContext);
|
dev@185
|
478 if (index >= 0) {
|
dev@185
|
479 timeContextChildren.splice(index, 1);
|
dev@185
|
480 }
|
dev@185
|
481 layer.destroy();
|
dev@122
|
482 }
|
dev@122
|
483 }
|
dev@122
|
484 }
|
dev@122
|
485 }
|
dev@122
|
486
|
dev@54
|
487 renderWaveform(buffer: AudioBuffer): void {
|
dev@180
|
488 // const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@180
|
489 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@189
|
490 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
|
dev@54
|
491 if (this.timeline) {
|
dev@54
|
492 // resize
|
dev@54
|
493 const width = this.trackDiv.nativeElement.getBoundingClientRect().width;
|
dev@55
|
494
|
dev@122
|
495 this.clearTimeline();
|
dev@59
|
496
|
dev@54
|
497 this.timeline.visibleWidth = width;
|
dev@54
|
498 this.timeline.pixelsPerSecond = width / buffer.duration;
|
cannam@117
|
499 waveTrack.height = height;
|
dev@54
|
500 } else {
|
dev@180
|
501 this.renderTimeline(buffer.duration)
|
dev@54
|
502 }
|
dev@83
|
503 this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration;
|
cannam@106
|
504
|
dev@18
|
505 // time axis
|
dev@18
|
506 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@18
|
507 height: height,
|
cannam@106
|
508 color: '#b0b0b0'
|
dev@18
|
509 });
|
cannam@117
|
510 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
|
dev@18
|
511
|
cannam@161
|
512 const nchannels = buffer.numberOfChannels;
|
cannam@161
|
513 const totalWaveHeight = height * 0.9;
|
cannam@161
|
514 const waveHeight = totalWaveHeight / nchannels;
|
dev@189
|
515
|
cannam@161
|
516 for (let ch = 0; ch < nchannels; ++ch) {
|
cannam@161
|
517 console.log("about to construct a waveform layer for channel " + ch);
|
cannam@161
|
518 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
|
cannam@161
|
519 top: (height - totalWaveHeight)/2 + waveHeight * ch,
|
cannam@161
|
520 height: waveHeight,
|
cannam@161
|
521 color: 'darkblue',
|
cannam@161
|
522 channel: ch
|
cannam@161
|
523 });
|
cannam@161
|
524 this.addLayer(waveformLayer, waveTrack, this.timeline.timeContext);
|
cannam@161
|
525 }
|
cannam@117
|
526
|
dev@53
|
527 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
dev@31
|
528 height: height
|
dev@31
|
529 });
|
cannam@117
|
530 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
|
dev@51
|
531 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
|
cannam@117
|
532 waveTrack.render();
|
cannam@117
|
533 waveTrack.update();
|
dev@81
|
534
|
dev@196
|
535 this.isLoading = false;
|
dev@234
|
536 this.ref.markForCheck();
|
dev@53
|
537 this.animate();
|
dev@53
|
538 }
|
dev@53
|
539
|
cannam@117
|
540 renderSpectrogram(buffer: AudioBuffer): void {
|
cannam@117
|
541 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@189
|
542 const gridTrack = this.timeline.getTrackById(`grid-${this.trackIdPrefix}`);
|
cannam@117
|
543
|
dev@129
|
544 const spectrogramLayer = new WavesSpectrogramLayer(buffer, {
|
cannam@221
|
545 top: 0,
|
cannam@221
|
546 height: height,
|
cannam@117
|
547 stepSize: 512,
|
dev@129
|
548 blockSize: 1024,
|
cannam@118
|
549 normalise: 'none',
|
cannam@118
|
550 mapper: this.sunsetMapper()
|
cannam@117
|
551 });
|
cannam@117
|
552 this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext);
|
cannam@117
|
553
|
cannam@117
|
554 this.timeline.tracks.update();
|
cannam@117
|
555 }
|
cannam@117
|
556
|
dev@53
|
557 // TODO refactor - this doesn't belong here
|
dev@64
|
558 private renderFeatures(extracted: SimpleResponse, colour: Colour): void {
|
dev@196
|
559 if (this.isOneShotExtractor && !this.hasShot) {
|
dev@196
|
560 this.featureExtractionSubscription.unsubscribe();
|
dev@196
|
561 this.hasShot = true;
|
dev@196
|
562 }
|
dev@196
|
563
|
dev@64
|
564 if (!extracted.hasOwnProperty('features') || !extracted.hasOwnProperty('outputDescriptor')) return;
|
dev@64
|
565 if (!extracted.features.hasOwnProperty('shape') || !extracted.features.hasOwnProperty('data')) return;
|
dev@64
|
566 const features: FeatureCollection = (extracted.features as FeatureCollection);
|
dev@64
|
567 const outputDescriptor = extracted.outputDescriptor;
|
dev@196
|
568 // const height = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@196
|
569 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@189
|
570 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`);
|
dev@64
|
571
|
dev@64
|
572 // TODO refactor all of this
|
dev@63
|
573 switch (features.shape) {
|
dev@64
|
574 case 'vector': {
|
dev@63
|
575 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
|
dev@63
|
576 const featureData = (features.data as Float32Array);
|
dev@68
|
577 if (featureData.length === 0) return;
|
dev@63
|
578 const normalisationFactor = 1.0 /
|
dev@63
|
579 featureData.reduce(
|
dev@63
|
580 (currentMax, feature) => Math.max(currentMax, feature),
|
dev@63
|
581 -Infinity
|
dev@63
|
582 );
|
dev@67
|
583
|
dev@63
|
584 const plotData = [...featureData].map((feature, i) => {
|
dev@63
|
585 return {
|
dev@63
|
586 cx: i * stepDuration,
|
dev@63
|
587 cy: feature * normalisationFactor
|
dev@63
|
588 };
|
dev@63
|
589 });
|
dev@67
|
590
|
dev@105
|
591 let lineLayer = new wavesUI.helpers.LineLayer(plotData, {
|
dev@63
|
592 color: colour,
|
dev@64
|
593 height: height
|
dev@63
|
594 });
|
dev@122
|
595 this.addLayer(
|
dev@105
|
596 lineLayer,
|
cannam@117
|
597 waveTrack,
|
dev@63
|
598 this.timeline.timeContext
|
dev@122
|
599 );
|
dev@63
|
600 break;
|
dev@64
|
601 }
|
dev@64
|
602 case 'list': {
|
dev@64
|
603 const featureData = (features.data as FeatureList);
|
dev@68
|
604 if (featureData.length === 0) return;
|
dev@64
|
605 // TODO look at output descriptor instead of directly inspecting features
|
dev@64
|
606 const hasDuration = outputDescriptor.configured.hasDuration;
|
dev@64
|
607 const isMarker = !hasDuration
|
dev@64
|
608 && outputDescriptor.configured.binCount === 0
|
dev@64
|
609 && featureData[0].featureValues == null;
|
dev@64
|
610 const isRegion = hasDuration
|
dev@64
|
611 && featureData[0].timestamp != null;
|
cannam@149
|
612 console.log("Have list features: length " + featureData.length +
|
cannam@149
|
613 ", isMarker " + isMarker + ", isRegion " + isRegion +
|
cannam@149
|
614 ", hasDuration " + hasDuration);
|
dev@64
|
615 // TODO refactor, this is incomprehensible
|
dev@64
|
616 if (isMarker) {
|
dev@64
|
617 const plotData = featureData.map(feature => {
|
cannam@152
|
618 return {
|
cannam@152
|
619 time: toSeconds(feature.timestamp),
|
cannam@152
|
620 label: feature.label
|
cannam@152
|
621 }
|
dev@64
|
622 });
|
cannam@149
|
623 let featureLayer = new wavesUI.helpers.TickLayer(plotData, {
|
dev@64
|
624 height: height,
|
dev@64
|
625 color: colour,
|
cannam@152
|
626 labelPosition: 'bottom',
|
cannam@152
|
627 shadeSegments: true
|
dev@64
|
628 });
|
dev@122
|
629 this.addLayer(
|
cannam@149
|
630 featureLayer,
|
cannam@117
|
631 waveTrack,
|
dev@64
|
632 this.timeline.timeContext
|
dev@122
|
633 );
|
dev@64
|
634 } else if (isRegion) {
|
cannam@149
|
635 console.log("Output is of region type");
|
dev@67
|
636 const binCount = outputDescriptor.configured.binCount || 0;
|
dev@67
|
637 const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ;
|
dev@64
|
638 const getSegmentArgs = () => {
|
dev@64
|
639 if (isBarRegion) {
|
dev@64
|
640
|
dev@67
|
641 // TODO refactor - this is messy
|
dev@67
|
642 interface FoldsToNumber<T> {
|
dev@67
|
643 reduce(fn: (previousValue: number,
|
dev@67
|
644 currentValue: T,
|
dev@67
|
645 currentIndex: number,
|
dev@67
|
646 array: ArrayLike<T>) => number,
|
dev@67
|
647 initialValue?: number): number;
|
dev@67
|
648 }
|
dev@64
|
649
|
dev@67
|
650 // TODO potentially change impl., i.e avoid reduce
|
dev@67
|
651 const findMin = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
652 return arr.reduce((min, val) => Math.min(min, getElement(val)), Infinity);
|
dev@67
|
653 };
|
dev@67
|
654
|
dev@67
|
655 const findMax = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
656 return arr.reduce((min, val) => Math.max(min, getElement(val)), -Infinity);
|
dev@67
|
657 };
|
dev@67
|
658
|
dev@67
|
659 const min = findMin<Feature>(featureData, (x: Feature) => {
|
dev@67
|
660 return findMin<number>(x.featureValues, y => y);
|
dev@67
|
661 });
|
dev@67
|
662
|
dev@67
|
663 const max = findMax<Feature>(featureData, (x: Feature) => {
|
dev@67
|
664 return findMax<number>(x.featureValues, y => y);
|
dev@67
|
665 });
|
dev@67
|
666
|
dev@67
|
667 const barHeight = 1.0 / height;
|
dev@64
|
668 return [
|
dev@67
|
669 featureData.reduce((bars, feature) => {
|
dev@67
|
670 const staticProperties = {
|
dev@64
|
671 x: toSeconds(feature.timestamp),
|
dev@64
|
672 width: toSeconds(feature.duration),
|
dev@67
|
673 height: min + barHeight,
|
dev@64
|
674 color: colour,
|
dev@64
|
675 opacity: 0.8
|
dev@67
|
676 };
|
dev@67
|
677 // TODO avoid copying Float32Array to an array - map is problematic here
|
dev@67
|
678 return bars.concat([...feature.featureValues]
|
dev@67
|
679 .map(val => Object.assign({}, staticProperties, {y: val})))
|
dev@67
|
680 }, []),
|
dev@67
|
681 {yDomain: [min, max + barHeight], height: height} as any
|
dev@67
|
682 ];
|
dev@64
|
683 } else {
|
dev@64
|
684 return [featureData.map(feature => {
|
dev@64
|
685 return {
|
dev@64
|
686 x: toSeconds(feature.timestamp),
|
dev@64
|
687 width: toSeconds(feature.duration),
|
dev@64
|
688 color: colour,
|
dev@64
|
689 opacity: 0.8
|
dev@64
|
690 }
|
dev@64
|
691 }), {height: height}];
|
dev@64
|
692 }
|
dev@64
|
693 };
|
dev@64
|
694
|
dev@64
|
695 let segmentLayer = new wavesUI.helpers.SegmentLayer(
|
dev@64
|
696 ...getSegmentArgs()
|
dev@64
|
697 );
|
dev@122
|
698 this.addLayer(
|
dev@64
|
699 segmentLayer,
|
cannam@117
|
700 waveTrack,
|
dev@64
|
701 this.timeline.timeContext
|
dev@122
|
702 );
|
dev@64
|
703 }
|
dev@64
|
704 break;
|
dev@64
|
705 }
|
cannam@106
|
706 case 'matrix': {
|
cannam@108
|
707 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
|
cannam@120
|
708 //!!! + start time
|
cannam@108
|
709 const matrixData = (features.data as Float32Array[]);
|
cannam@108
|
710 if (matrixData.length === 0) return;
|
cannam@109
|
711 console.log("matrix data length = " + matrixData.length);
|
cannam@109
|
712 console.log("height of first column = " + matrixData[0].length);
|
cannam@109
|
713 const targetValue = this.estimatePercentile(matrixData, 95);
|
cannam@108
|
714 const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0);
|
cannam@108
|
715 console.log("setting gain to " + gain);
|
cannam@120
|
716 const matrixEntity =
|
cannam@120
|
717 new wavesUI.utils.PrefilledMatrixEntity(matrixData,
|
cannam@120
|
718 0, // startTime
|
cannam@120
|
719 stepDuration);
|
cannam@108
|
720 let matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, {
|
cannam@108
|
721 gain,
|
cannam@221
|
722 top: 0,
|
cannam@221
|
723 height: height,
|
cannam@109
|
724 normalise: 'none',
|
cannam@108
|
725 mapper: this.iceMapper()
|
cannam@108
|
726 });
|
dev@122
|
727 this.addLayer(
|
cannam@108
|
728 matrixLayer,
|
cannam@117
|
729 waveTrack,
|
cannam@108
|
730 this.timeline.timeContext
|
dev@122
|
731 );
|
cannam@108
|
732 break;
|
cannam@106
|
733 }
|
dev@67
|
734 default:
|
cannam@106
|
735 console.log("Cannot render an appropriate layer for feature shape '" +
|
cannam@106
|
736 features.shape + "'");
|
dev@63
|
737 }
|
dev@59
|
738
|
dev@196
|
739 this.isLoading = false;
|
dev@234
|
740 this.ref.markForCheck();
|
dev@56
|
741 this.timeline.tracks.update();
|
dev@53
|
742 }
|
dev@53
|
743
|
dev@53
|
744 private animate(): void {
|
dev@196
|
745 if (!this.isSeeking) return;
|
dev@196
|
746
|
dev@31
|
747 this.ngZone.runOutsideAngular(() => {
|
dev@31
|
748 // listen for time passing...
|
dev@31
|
749 const updateSeekingCursor = () => {
|
dev@53
|
750 const currentTime = this.audioService.getCurrentTime();
|
dev@53
|
751 this.cursorLayer.currentPosition = currentTime;
|
dev@53
|
752 this.cursorLayer.update();
|
dev@53
|
753
|
dev@53
|
754 const currentOffset = this.timeline.timeContext.offset;
|
dev@53
|
755 const offsetTimestamp = currentOffset
|
dev@53
|
756 + currentTime;
|
dev@53
|
757
|
dev@53
|
758 const visibleDuration = this.timeline.timeContext.visibleDuration;
|
dev@53
|
759 // TODO reduce duplication between directions and make more declarative
|
dev@53
|
760 // this kinda logic should also be tested
|
dev@53
|
761 const mustPageForward = offsetTimestamp > visibleDuration;
|
dev@53
|
762 const mustPageBackward = currentTime < -currentOffset;
|
dev@53
|
763
|
dev@53
|
764 if (mustPageForward) {
|
dev@53
|
765 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
|
dev@53
|
766
|
cannam@106
|
767 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
cannam@106
|
768 -currentTime + 0.5 * visibleDuration :
|
cannam@106
|
769 currentOffset - visibleDuration;
|
dev@51
|
770 this.timeline.tracks.update();
|
dev@34
|
771 }
|
dev@53
|
772
|
dev@53
|
773 if (mustPageBackward) {
|
dev@53
|
774 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
|
cannam@106
|
775 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
cannam@106
|
776 -currentTime + 0.5 * visibleDuration :
|
cannam@106
|
777 currentOffset + visibleDuration;
|
dev@51
|
778 this.timeline.tracks.update();
|
dev@34
|
779 }
|
dev@53
|
780
|
dev@53
|
781 if (this.isPlaying)
|
dev@53
|
782 requestAnimationFrame(updateSeekingCursor);
|
dev@31
|
783 };
|
dev@31
|
784 updateSeekingCursor();
|
dev@31
|
785 });
|
dev@6
|
786 }
|
dev@16
|
787
|
dev@122
|
788 private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void {
|
dev@54
|
789 timeContext.zoom = 1.0;
|
dev@54
|
790 if (!layer.timeContext) {
|
dev@54
|
791 layer.setTimeContext(isAxis ?
|
dev@54
|
792 timeContext : new wavesUI.core.LayerTimeContext(timeContext));
|
dev@54
|
793 }
|
dev@54
|
794 track.add(layer);
|
dev@185
|
795 this.layers.push(layer);
|
dev@54
|
796 layer.render();
|
dev@54
|
797 layer.update();
|
dev@122
|
798 if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) {
|
dev@112
|
799 track.$layout.appendChild(this.cursorLayer.$el);
|
dev@112
|
800 }
|
dev@59
|
801 }
|
dev@59
|
802
|
dev@59
|
803 private static changeColour(layer: Layer, colour: string): void {
|
dev@59
|
804 const butcherShapes = (shape) => {
|
dev@59
|
805 shape.install({color: () => colour});
|
dev@59
|
806 shape.params.color = colour;
|
dev@59
|
807 shape.update(layer._renderingContext, layer.data);
|
dev@59
|
808 };
|
dev@59
|
809
|
dev@59
|
810 layer._$itemCommonShapeMap.forEach(butcherShapes);
|
dev@59
|
811 layer._$itemShapeMap.forEach(butcherShapes);
|
dev@59
|
812 layer.render();
|
dev@59
|
813 layer.update();
|
dev@54
|
814 }
|
dev@54
|
815
|
dev@51
|
816 ngOnDestroy(): void {
|
dev@196
|
817 if (this.featureExtractionSubscription)
|
dev@196
|
818 this.featureExtractionSubscription.unsubscribe();
|
dev@196
|
819 if (this.playingStateSubscription)
|
dev@196
|
820 this.playingStateSubscription.unsubscribe();
|
dev@196
|
821 if (this.seekedSubscription)
|
dev@196
|
822 this.seekedSubscription.unsubscribe();
|
dev@196
|
823 if (this.onAudioDataSubscription)
|
dev@196
|
824 this.onAudioDataSubscription.unsubscribe();
|
dev@51
|
825 }
|
dev@154
|
826
|
dev@155
|
827 seekStart(): void {
|
dev@155
|
828 this.zoomOnMouseDown = this.timeline.timeContext.zoom;
|
dev@157
|
829 this.offsetOnMouseDown = this.timeline.timeContext.offset;
|
dev@155
|
830 }
|
dev@155
|
831
|
dev@155
|
832 seekEnd(x: number): void {
|
dev@157
|
833 const hasSameZoom: boolean = this.zoomOnMouseDown ===
|
dev@157
|
834 this.timeline.timeContext.zoom;
|
dev@157
|
835 const hasSameOffset: boolean = this.offsetOnMouseDown ===
|
dev@157
|
836 this.timeline.timeContext.offset;
|
dev@157
|
837 if (hasSameZoom && hasSameOffset) {
|
dev@155
|
838 this.seek(x);
|
dev@155
|
839 }
|
dev@155
|
840 }
|
dev@155
|
841
|
dev@154
|
842 seek(x: number): void {
|
dev@154
|
843 if (this.timeline) {
|
dev@154
|
844 const timeContext: any = this.timeline.timeContext;
|
dev@196
|
845 if (this.isSeeking) {
|
dev@196
|
846 this.audioService.seekTo(
|
dev@196
|
847 timeContext.timeToPixel.invert(x)- timeContext.offset
|
dev@196
|
848 );
|
dev@196
|
849 }
|
dev@154
|
850 }
|
dev@154
|
851 }
|
dev@6
|
852 }
|