dev@10
|
1 import {
|
dev@51
|
2 Component, OnInit, ViewChild, ElementRef, Input, AfterViewInit, NgZone,
|
dev@51
|
3 OnDestroy
|
dev@10
|
4 } from '@angular/core';
|
dev@39
|
5 import {AudioPlayerService} from "../services/audio-player/audio-player.service";
|
dev@36
|
6 import wavesUI from 'waves-ui';
|
dev@63
|
7 import {
|
dev@64
|
8 FeatureExtractionService
|
dev@63
|
9 } from "../services/feature-extraction/feature-extraction.service";
|
dev@51
|
10 import {Subscription} from "rxjs";
|
dev@63
|
11 import {
|
dev@63
|
12 FeatureCollection,
|
dev@64
|
13 FixedSpacedFeatures, SimpleResponse
|
dev@63
|
14 } from "piper/HigherLevelUtilities";
|
dev@53
|
15 import {toSeconds} from "piper";
|
dev@67
|
16 import {FeatureList, Feature} from "piper/Feature";
|
dev@81
|
17 import * as Hammer from 'hammerjs';
|
dev@129
|
18 import {WavesSpectrogramLayer} from "../spectrogram/Spectrogram";
|
dev@180
|
19 import {PartialEventEmitter} from "../notebook-feed/notebook-feed.component";
|
dev@8
|
20
|
dev@54
|
21 type Layer = any;
|
dev@54
|
22 type Track = any;
|
dev@59
|
23 type Colour = string;
|
dev@6
|
24
|
dev@6
|
25 @Component({
|
dev@6
|
26 selector: 'app-waveform',
|
dev@6
|
27 templateUrl: './waveform.component.html',
|
dev@6
|
28 styleUrls: ['./waveform.component.css']
|
dev@6
|
29 })
|
dev@51
|
30 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
|
dev@20
|
31
|
dev@8
|
32 @ViewChild('track') trackDiv: ElementRef;
|
dev@6
|
33
|
dev@180
|
34 @Input() timeContext: TimelineTimeContext & PartialEventEmitter;
|
dev@54
|
35 private _audioBuffer: AudioBuffer;
|
dev@54
|
36 private timeline: Timeline;
|
dev@54
|
37 private cursorLayer: any;
|
dev@16
|
38
|
dev@16
|
39 @Input()
|
dev@16
|
40 set audioBuffer(buffer: AudioBuffer) {
|
dev@16
|
41 this._audioBuffer = buffer || undefined;
|
cannam@117
|
42 if (this.audioBuffer) {
|
dev@20
|
43 this.renderWaveform(this.audioBuffer);
|
dev@180
|
44 // this.renderSpectrogram(this.audioBuffer);
|
cannam@117
|
45 }
|
dev@16
|
46 }
|
dev@16
|
47
|
dev@16
|
48 get audioBuffer(): AudioBuffer {
|
dev@16
|
49 return this._audioBuffer;
|
dev@16
|
50 }
|
dev@16
|
51
|
dev@51
|
52 private featureExtractionSubscription: Subscription;
|
dev@53
|
53 private playingStateSubscription: Subscription;
|
dev@53
|
54 private seekedSubscription: Subscription;
|
dev@53
|
55 private isPlaying: boolean;
|
dev@110
|
56 private offsetAtPanStart: number;
|
dev@110
|
57 private initialZoom: number;
|
dev@110
|
58 private initialDistance: number;
|
dev@155
|
59 private zoomOnMouseDown: number;
|
dev@157
|
60 private offsetOnMouseDown: number;
|
dev@51
|
61
|
dev@31
|
62 constructor(private audioService: AudioPlayerService,
|
dev@51
|
63 private piperService: FeatureExtractionService,
|
dev@51
|
64 public ngZone: NgZone) {
|
dev@54
|
65 this._audioBuffer = undefined;
|
dev@54
|
66 this.timeline = undefined;
|
dev@54
|
67 this.cursorLayer = undefined;
|
dev@53
|
68 this.isPlaying = false;
|
dev@59
|
69 const colours = function* () {
|
dev@59
|
70 const circularColours = [
|
dev@59
|
71 'black',
|
dev@59
|
72 'red',
|
dev@59
|
73 'green',
|
dev@59
|
74 'purple',
|
dev@59
|
75 'orange'
|
dev@59
|
76 ];
|
dev@59
|
77 let index = 0;
|
dev@59
|
78 const nColours = circularColours.length;
|
dev@59
|
79 while (true) {
|
dev@59
|
80 yield circularColours[index = ++index % nColours];
|
dev@59
|
81 }
|
dev@59
|
82 }();
|
dev@59
|
83
|
dev@51
|
84 this.featureExtractionSubscription = piperService.featuresExtracted$.subscribe(
|
dev@51
|
85 features => {
|
dev@59
|
86 this.renderFeatures(features, colours.next().value);
|
dev@51
|
87 });
|
dev@53
|
88 this.playingStateSubscription = audioService.playingStateChange$.subscribe(
|
dev@53
|
89 isPlaying => {
|
dev@53
|
90 this.isPlaying = isPlaying;
|
dev@53
|
91 if (this.isPlaying)
|
dev@53
|
92 this.animate();
|
dev@53
|
93 });
|
dev@53
|
94 this.seekedSubscription = audioService.seeked$.subscribe(() => {
|
dev@53
|
95 if (!this.isPlaying)
|
dev@53
|
96 this.animate();
|
dev@53
|
97 });
|
dev@51
|
98 }
|
dev@51
|
99
|
dev@53
|
100 ngOnInit() {
|
dev@53
|
101 }
|
dev@10
|
102
|
dev@10
|
103 ngAfterViewInit(): void {
|
dev@180
|
104 this.renderTimeline();
|
dev@20
|
105 }
|
dev@20
|
106
|
dev@20
|
107 renderTimeline(duration: number = 1.0): Timeline {
|
dev@18
|
108 const track: HTMLElement = this.trackDiv.nativeElement;
|
dev@20
|
109 track.innerHTML = "";
|
dev@18
|
110 const height: number = track.getBoundingClientRect().height;
|
dev@18
|
111 const width: number = track.getBoundingClientRect().width;
|
dev@18
|
112 const pixelsPerSecond = width / duration;
|
dev@180
|
113 if (this.timeline instanceof wavesUI.core.Timeline) {
|
dev@180
|
114 this.timeline.pixelsPerSecond = pixelsPerSecond;
|
dev@180
|
115 this.timeline.visibleWidth = width;
|
dev@180
|
116 } else {
|
dev@180
|
117 this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
|
dev@180
|
118 }
|
dev@180
|
119 if (this.timeContext instanceof wavesUI.core.TimelineTimeContext) {
|
dev@180
|
120 console.warn('Has shared timeline');
|
dev@180
|
121 this.timeline.timeContext = this.timeContext;
|
dev@180
|
122 this.timeContext.on('zoom', () => {
|
dev@180
|
123 this.timeline.tracks.update();
|
dev@180
|
124 });
|
dev@180
|
125 this.timeContext.on('offset', () => {
|
dev@180
|
126 this.timeline.tracks.update();
|
dev@180
|
127 });
|
dev@180
|
128 }
|
dev@180
|
129 this.timeline.createTrack(track, height, 'wave');
|
dev@180
|
130 // this.timeline.createTrack(track, height/2, 'wave');
|
dev@180
|
131 // this.timeline.createTrack(track, height/2, 'grid');
|
dev@54
|
132 }
|
dev@18
|
133
|
cannam@108
|
134 estimatePercentile(matrix, percentile) {
|
cannam@108
|
135 // our sample is not evenly distributed across the whole data set:
|
cannam@108
|
136 // it is guaranteed to include at least one sample from every
|
cannam@108
|
137 // column, and could sample some values more than once. But it
|
cannam@108
|
138 // should be good enough in most cases (todo: show this)
|
cannam@109
|
139 if (matrix.length === 0) {
|
cannam@109
|
140 return 0.0;
|
cannam@109
|
141 }
|
cannam@108
|
142 const w = matrix.length;
|
cannam@108
|
143 const h = matrix[0].length;
|
cannam@108
|
144 const n = w * h;
|
cannam@109
|
145 const m = (n > 50000 ? 50000 : n); // should base that on the %ile
|
cannam@108
|
146 let m_per = Math.floor(m / w);
|
cannam@108
|
147 if (m_per < 1) m_per = 1;
|
cannam@108
|
148 let sample = [];
|
cannam@108
|
149 for (let x = 0; x < w; ++x) {
|
cannam@108
|
150 for (let i = 0; i < m_per; ++i) {
|
cannam@108
|
151 const y = Math.floor(Math.random() * h);
|
cannam@109
|
152 const value = matrix[x][y];
|
cannam@109
|
153 if (!isNaN(value) && value !== Infinity) {
|
cannam@109
|
154 sample.push(value);
|
cannam@109
|
155 }
|
cannam@108
|
156 }
|
cannam@108
|
157 }
|
cannam@109
|
158 if (sample.length === 0) {
|
cannam@109
|
159 console.log("WARNING: No samples gathered, even though we hoped for " +
|
cannam@109
|
160 (m_per * w) + " of them");
|
cannam@109
|
161 return 0.0;
|
cannam@109
|
162 }
|
cannam@108
|
163 sample.sort((a,b) => { return a - b; });
|
cannam@108
|
164 const ix = Math.floor((sample.length * percentile) / 100);
|
cannam@108
|
165 console.log("Estimating " + percentile + "-%ile of " +
|
cannam@108
|
166 n + "-sample dataset (" + w + " x " + h + ") as value " + ix +
|
cannam@108
|
167 " of sorted " + sample.length + "-sample subset");
|
cannam@108
|
168 const estimate = sample[ix];
|
cannam@108
|
169 console.log("Estimate is: " + estimate + " (where min sampled value = " +
|
cannam@108
|
170 sample[0] + " and max = " + sample[sample.length-1] + ")");
|
cannam@108
|
171 return estimate;
|
cannam@108
|
172 }
|
cannam@108
|
173
|
cannam@108
|
174 interpolatingMapper(hexColours) {
|
cannam@108
|
175 const colours = hexColours.map(n => {
|
cannam@108
|
176 const i = parseInt(n, 16);
|
cannam@118
|
177 return [ ((i >> 16) & 255) / 255.0,
|
cannam@118
|
178 ((i >> 8) & 255) / 255.0,
|
cannam@118
|
179 ((i) & 255) / 255.0 ];
|
cannam@108
|
180 });
|
cannam@108
|
181 const last = colours.length - 1;
|
cannam@108
|
182 return (value => {
|
cannam@108
|
183 const m = value * last;
|
cannam@108
|
184 if (m >= last) {
|
cannam@108
|
185 return colours[last];
|
cannam@108
|
186 }
|
cannam@108
|
187 if (m <= 0) {
|
cannam@108
|
188 return colours[0];
|
cannam@108
|
189 }
|
cannam@108
|
190 const base = Math.floor(m);
|
cannam@108
|
191 const prop0 = base + 1.0 - m;
|
cannam@108
|
192 const prop1 = m - base;
|
cannam@108
|
193 const c0 = colours[base];
|
cannam@108
|
194 const c1 = colours[base+1];
|
cannam@118
|
195 return [ c0[0] * prop0 + c1[0] * prop1,
|
cannam@118
|
196 c0[1] * prop0 + c1[1] * prop1,
|
cannam@118
|
197 c0[2] * prop0 + c1[2] * prop1 ];
|
cannam@108
|
198 });
|
cannam@108
|
199 }
|
dev@110
|
200
|
cannam@108
|
201 iceMapper() {
|
dev@110
|
202 let hexColours = [
|
cannam@108
|
203 // Based on ColorBrewer ylGnBu
|
cannam@108
|
204 "ffffff", "ffff00", "f7fcf0", "e0f3db", "ccebc5", "a8ddb5",
|
cannam@108
|
205 "7bccc4", "4eb3d3", "2b8cbe", "0868ac", "084081", "042040"
|
cannam@108
|
206 ];
|
cannam@108
|
207 hexColours.reverse();
|
cannam@108
|
208 return this.interpolatingMapper(hexColours);
|
cannam@108
|
209 }
|
dev@110
|
210
|
cannam@118
|
211 hsv2rgb(h, s, v) { // all values in range [0, 1]
|
cannam@118
|
212 const i = Math.floor(h * 6);
|
cannam@118
|
213 const f = h * 6 - i;
|
cannam@118
|
214 const p = v * (1 - s);
|
cannam@118
|
215 const q = v * (1 - f * s);
|
cannam@118
|
216 const t = v * (1 - (1 - f) * s);
|
cannam@118
|
217 let r = 0, g = 0, b = 0;
|
cannam@118
|
218 switch (i % 6) {
|
cannam@118
|
219 case 0: r = v, g = t, b = p; break;
|
cannam@118
|
220 case 1: r = q, g = v, b = p; break;
|
cannam@118
|
221 case 2: r = p, g = v, b = t; break;
|
cannam@118
|
222 case 3: r = p, g = q, b = v; break;
|
cannam@118
|
223 case 4: r = t, g = p, b = v; break;
|
cannam@118
|
224 case 5: r = v, g = p, b = q; break;
|
cannam@118
|
225 }
|
cannam@118
|
226 return [ r, g, b ];
|
cannam@118
|
227 }
|
dev@122
|
228
|
cannam@118
|
229 greenMapper() {
|
cannam@118
|
230 const blue = 0.6666;
|
cannam@118
|
231 const pieslice = 0.3333;
|
cannam@118
|
232 return (value => {
|
cannam@118
|
233 const h = blue - value * 2.0 * pieslice;
|
cannam@118
|
234 const s = 0.5 + value / 2.0;
|
cannam@118
|
235 const v = value;
|
cannam@118
|
236 return this.hsv2rgb(h, s, v);
|
cannam@118
|
237 });
|
cannam@118
|
238 }
|
cannam@118
|
239
|
cannam@118
|
240 sunsetMapper() {
|
cannam@118
|
241 return (value => {
|
cannam@118
|
242 let r = (value - 0.24) * 2.38;
|
cannam@118
|
243 let g = (value - 0.64) * 2.777;
|
cannam@118
|
244 let b = (3.6 * value);
|
cannam@118
|
245 if (value > 0.277) b = 2.0 - b;
|
cannam@118
|
246 return [ r, g, b ];
|
cannam@118
|
247 });
|
cannam@118
|
248 }
|
cannam@118
|
249
|
dev@122
|
250 clearTimeline(): void {
|
dev@122
|
251 // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly
|
dev@122
|
252 const timeContextChildren = this.timeline.timeContext._children;
|
dev@122
|
253 for (let track of this.timeline.tracks) {
|
dev@122
|
254 if (track.layers.length === 0) { continue; }
|
dev@122
|
255 const trackLayers = Array.from(track.layers);
|
dev@122
|
256 while (trackLayers.length) {
|
dev@122
|
257 let layer: Layer = trackLayers.pop();
|
dev@122
|
258 track.remove(layer);
|
dev@122
|
259
|
dev@122
|
260 const index = timeContextChildren.indexOf(layer.timeContext);
|
dev@122
|
261 if (index >= 0) {
|
dev@122
|
262 timeContextChildren.splice(index, 1);
|
dev@122
|
263 }
|
dev@122
|
264 layer.destroy();
|
dev@122
|
265 }
|
dev@122
|
266 }
|
dev@122
|
267 }
|
dev@122
|
268
|
dev@54
|
269 renderWaveform(buffer: AudioBuffer): void {
|
dev@180
|
270 // const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@180
|
271 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
cannam@117
|
272 const waveTrack = this.timeline.getTrackById('wave');
|
dev@54
|
273 if (this.timeline) {
|
dev@54
|
274 // resize
|
dev@54
|
275 const width = this.trackDiv.nativeElement.getBoundingClientRect().width;
|
dev@55
|
276
|
dev@122
|
277 this.clearTimeline();
|
dev@59
|
278
|
dev@54
|
279 this.timeline.visibleWidth = width;
|
dev@54
|
280 this.timeline.pixelsPerSecond = width / buffer.duration;
|
cannam@117
|
281 waveTrack.height = height;
|
dev@54
|
282 } else {
|
dev@180
|
283 this.renderTimeline(buffer.duration)
|
dev@54
|
284 }
|
dev@83
|
285 this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration;
|
cannam@106
|
286
|
dev@18
|
287 // time axis
|
dev@18
|
288 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@18
|
289 height: height,
|
cannam@106
|
290 color: '#b0b0b0'
|
dev@18
|
291 });
|
cannam@117
|
292 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
|
dev@18
|
293
|
dev@20
|
294 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
|
dev@10
|
295 top: 10,
|
dev@20
|
296 height: height * 0.9,
|
dev@16
|
297 color: 'darkblue'
|
dev@16
|
298 });
|
cannam@117
|
299 this.addLayer(waveformLayer, waveTrack, this.timeline.timeContext);
|
cannam@117
|
300
|
dev@53
|
301 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
dev@31
|
302 height: height
|
dev@31
|
303 });
|
cannam@117
|
304 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
|
dev@51
|
305 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
|
cannam@117
|
306 waveTrack.render();
|
cannam@117
|
307 waveTrack.update();
|
dev@81
|
308
|
dev@81
|
309
|
dev@81
|
310 if ('ontouchstart' in window) {
|
dev@110
|
311 interface Point {
|
dev@110
|
312 x: number;
|
dev@110
|
313 y: number;
|
dev@110
|
314 }
|
dev@110
|
315
|
dev@113
|
316 let zoomGestureJustEnded: boolean = false;
|
dev@113
|
317
|
dev@110
|
318 const pixelToExponent: Function = wavesUI.utils.scales.linear()
|
dev@110
|
319 .domain([0, 100]) // 100px => factor 2
|
dev@110
|
320 .range([0, 1]);
|
dev@110
|
321
|
dev@110
|
322 const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => {
|
dev@110
|
323 return Math.pow(
|
dev@110
|
324 Math.pow(p2.x - p1.x, 2) +
|
dev@110
|
325 Math.pow(p2.y - p1.y, 2), 0.5);
|
dev@110
|
326 };
|
dev@110
|
327
|
dev@84
|
328 const hammertime = new Hammer(this.trackDiv.nativeElement);
|
dev@81
|
329 const scroll = (ev) => {
|
dev@113
|
330 if (zoomGestureJustEnded) {
|
dev@113
|
331 zoomGestureJustEnded = false;
|
dev@113
|
332 console.log("Skip this event: likely a single touch dangling from pinch");
|
dev@113
|
333 return;
|
dev@113
|
334 }
|
dev@110
|
335 this.timeline.timeContext.offset = this.offsetAtPanStart +
|
dev@110
|
336 this.timeline.timeContext.timeToPixel.invert(ev.deltaX);
|
dev@81
|
337 this.timeline.tracks.update();
|
dev@81
|
338 };
|
dev@84
|
339
|
dev@81
|
340 const zoom = (ev) => {
|
dev@81
|
341 const minZoom = this.timeline.state.minZoom;
|
dev@81
|
342 const maxZoom = this.timeline.state.maxZoom;
|
dev@110
|
343 const distance = calculateDistance({
|
dev@110
|
344 x: ev.pointers[0].clientX,
|
dev@110
|
345 y: ev.pointers[0].clientY
|
dev@110
|
346 }, {
|
dev@110
|
347 x: ev.pointers[1].clientX,
|
dev@110
|
348 y: ev.pointers[1].clientY
|
dev@110
|
349 });
|
dev@110
|
350
|
dev@110
|
351 const lastCenterTime =
|
dev@110
|
352 this.timeline.timeContext.timeToPixel.invert(ev.center.x);
|
dev@110
|
353
|
dev@110
|
354 const exponent = pixelToExponent(distance - this.initialDistance);
|
dev@110
|
355 const targetZoom = this.initialZoom * Math.pow(2, exponent);
|
dev@110
|
356
|
dev@110
|
357 this.timeline.timeContext.zoom =
|
dev@110
|
358 Math.min(Math.max(targetZoom, minZoom), maxZoom);
|
dev@110
|
359
|
dev@110
|
360 const newCenterTime =
|
dev@110
|
361 this.timeline.timeContext.timeToPixel.invert(ev.center.x);
|
dev@110
|
362
|
dev@96
|
363 this.timeline.timeContext.offset += newCenterTime - lastCenterTime;
|
dev@81
|
364 this.timeline.tracks.update();
|
dev@81
|
365 };
|
dev@81
|
366 hammertime.get('pinch').set({ enable: true });
|
dev@110
|
367 hammertime.on('panstart', () => {
|
dev@110
|
368 this.offsetAtPanStart = this.timeline.timeContext.offset;
|
dev@110
|
369 });
|
dev@81
|
370 hammertime.on('panleft', scroll);
|
dev@81
|
371 hammertime.on('panright', scroll);
|
dev@110
|
372 hammertime.on('pinchstart', (e) => {
|
dev@110
|
373 this.initialZoom = this.timeline.timeContext.zoom;
|
dev@110
|
374
|
dev@110
|
375 this.initialDistance = calculateDistance({
|
dev@110
|
376 x: e.pointers[0].clientX,
|
dev@110
|
377 y: e.pointers[0].clientY
|
dev@110
|
378 }, {
|
dev@110
|
379 x: e.pointers[1].clientX,
|
dev@110
|
380 y: e.pointers[1].clientY
|
dev@110
|
381 });
|
dev@110
|
382 });
|
dev@81
|
383 hammertime.on('pinch', zoom);
|
dev@113
|
384 hammertime.on('pinchend', () => {
|
dev@113
|
385 zoomGestureJustEnded = true;
|
dev@113
|
386 });
|
dev@81
|
387 }
|
dev@81
|
388
|
dev@53
|
389 this.animate();
|
dev@53
|
390 }
|
dev@53
|
391
|
cannam@117
|
392 renderSpectrogram(buffer: AudioBuffer): void {
|
cannam@117
|
393 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
cannam@117
|
394 const gridTrack = this.timeline.getTrackById('grid');
|
cannam@117
|
395
|
dev@129
|
396 const spectrogramLayer = new WavesSpectrogramLayer(buffer, {
|
cannam@118
|
397 top: height * 0.05,
|
cannam@117
|
398 height: height * 0.9,
|
cannam@117
|
399 stepSize: 512,
|
dev@129
|
400 blockSize: 1024,
|
cannam@118
|
401 normalise: 'none',
|
cannam@118
|
402 mapper: this.sunsetMapper()
|
cannam@117
|
403 });
|
cannam@117
|
404 this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext);
|
cannam@117
|
405
|
cannam@117
|
406 this.timeline.tracks.update();
|
cannam@117
|
407 }
|
cannam@117
|
408
|
dev@53
|
409 // TODO refactor - this doesn't belong here
|
dev@64
|
410 private renderFeatures(extracted: SimpleResponse, colour: Colour): void {
|
dev@64
|
411 if (!extracted.hasOwnProperty('features') || !extracted.hasOwnProperty('outputDescriptor')) return;
|
dev@64
|
412 if (!extracted.features.hasOwnProperty('shape') || !extracted.features.hasOwnProperty('data')) return;
|
dev@64
|
413 const features: FeatureCollection = (extracted.features as FeatureCollection);
|
dev@64
|
414 const outputDescriptor = extracted.outputDescriptor;
|
cannam@118
|
415 const height = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
cannam@118
|
416 const waveTrack = this.timeline.getTrackById('wave');
|
dev@64
|
417
|
dev@64
|
418 // TODO refactor all of this
|
dev@63
|
419 switch (features.shape) {
|
dev@64
|
420 case 'vector': {
|
dev@63
|
421 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
|
dev@63
|
422 const featureData = (features.data as Float32Array);
|
dev@68
|
423 if (featureData.length === 0) return;
|
dev@63
|
424 const normalisationFactor = 1.0 /
|
dev@63
|
425 featureData.reduce(
|
dev@63
|
426 (currentMax, feature) => Math.max(currentMax, feature),
|
dev@63
|
427 -Infinity
|
dev@63
|
428 );
|
dev@67
|
429
|
dev@63
|
430 const plotData = [...featureData].map((feature, i) => {
|
dev@63
|
431 return {
|
dev@63
|
432 cx: i * stepDuration,
|
dev@63
|
433 cy: feature * normalisationFactor
|
dev@63
|
434 };
|
dev@63
|
435 });
|
dev@67
|
436
|
dev@105
|
437 let lineLayer = new wavesUI.helpers.LineLayer(plotData, {
|
dev@63
|
438 color: colour,
|
dev@64
|
439 height: height
|
dev@63
|
440 });
|
dev@122
|
441 this.addLayer(
|
dev@105
|
442 lineLayer,
|
cannam@117
|
443 waveTrack,
|
dev@63
|
444 this.timeline.timeContext
|
dev@122
|
445 );
|
dev@63
|
446 break;
|
dev@64
|
447 }
|
dev@64
|
448 case 'list': {
|
dev@64
|
449 const featureData = (features.data as FeatureList);
|
dev@68
|
450 if (featureData.length === 0) return;
|
dev@64
|
451 // TODO look at output descriptor instead of directly inspecting features
|
dev@64
|
452 const hasDuration = outputDescriptor.configured.hasDuration;
|
dev@64
|
453 const isMarker = !hasDuration
|
dev@64
|
454 && outputDescriptor.configured.binCount === 0
|
dev@64
|
455 && featureData[0].featureValues == null;
|
dev@64
|
456 const isRegion = hasDuration
|
dev@64
|
457 && featureData[0].timestamp != null;
|
cannam@149
|
458 console.log("Have list features: length " + featureData.length +
|
cannam@149
|
459 ", isMarker " + isMarker + ", isRegion " + isRegion +
|
cannam@149
|
460 ", hasDuration " + hasDuration);
|
dev@64
|
461 // TODO refactor, this is incomprehensible
|
dev@64
|
462 if (isMarker) {
|
dev@64
|
463 const plotData = featureData.map(feature => {
|
cannam@152
|
464 return {
|
cannam@152
|
465 time: toSeconds(feature.timestamp),
|
cannam@152
|
466 label: feature.label
|
cannam@152
|
467 }
|
dev@64
|
468 });
|
cannam@149
|
469 let featureLayer = new wavesUI.helpers.TickLayer(plotData, {
|
dev@64
|
470 height: height,
|
dev@64
|
471 color: colour,
|
cannam@152
|
472 labelPosition: 'bottom',
|
cannam@152
|
473 shadeSegments: true
|
dev@64
|
474 });
|
dev@122
|
475 this.addLayer(
|
cannam@149
|
476 featureLayer,
|
cannam@117
|
477 waveTrack,
|
dev@64
|
478 this.timeline.timeContext
|
dev@122
|
479 );
|
dev@64
|
480 } else if (isRegion) {
|
cannam@149
|
481 console.log("Output is of region type");
|
dev@67
|
482 const binCount = outputDescriptor.configured.binCount || 0;
|
dev@67
|
483 const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ;
|
dev@64
|
484 const getSegmentArgs = () => {
|
dev@64
|
485 if (isBarRegion) {
|
dev@64
|
486
|
dev@67
|
487 // TODO refactor - this is messy
|
dev@67
|
488 interface FoldsToNumber<T> {
|
dev@67
|
489 reduce(fn: (previousValue: number,
|
dev@67
|
490 currentValue: T,
|
dev@67
|
491 currentIndex: number,
|
dev@67
|
492 array: ArrayLike<T>) => number,
|
dev@67
|
493 initialValue?: number): number;
|
dev@67
|
494 }
|
dev@64
|
495
|
dev@67
|
496 // TODO potentially change impl., i.e avoid reduce
|
dev@67
|
497 const findMin = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
498 return arr.reduce((min, val) => Math.min(min, getElement(val)), Infinity);
|
dev@67
|
499 };
|
dev@67
|
500
|
dev@67
|
501 const findMax = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
502 return arr.reduce((min, val) => Math.max(min, getElement(val)), -Infinity);
|
dev@67
|
503 };
|
dev@67
|
504
|
dev@67
|
505 const min = findMin<Feature>(featureData, (x: Feature) => {
|
dev@67
|
506 return findMin<number>(x.featureValues, y => y);
|
dev@67
|
507 });
|
dev@67
|
508
|
dev@67
|
509 const max = findMax<Feature>(featureData, (x: Feature) => {
|
dev@67
|
510 return findMax<number>(x.featureValues, y => y);
|
dev@67
|
511 });
|
dev@67
|
512
|
dev@67
|
513 const barHeight = 1.0 / height;
|
dev@64
|
514 return [
|
dev@67
|
515 featureData.reduce((bars, feature) => {
|
dev@67
|
516 const staticProperties = {
|
dev@64
|
517 x: toSeconds(feature.timestamp),
|
dev@64
|
518 width: toSeconds(feature.duration),
|
dev@67
|
519 height: min + barHeight,
|
dev@64
|
520 color: colour,
|
dev@64
|
521 opacity: 0.8
|
dev@67
|
522 };
|
dev@67
|
523 // TODO avoid copying Float32Array to an array - map is problematic here
|
dev@67
|
524 return bars.concat([...feature.featureValues]
|
dev@67
|
525 .map(val => Object.assign({}, staticProperties, {y: val})))
|
dev@67
|
526 }, []),
|
dev@67
|
527 {yDomain: [min, max + barHeight], height: height} as any
|
dev@67
|
528 ];
|
dev@64
|
529 } else {
|
dev@64
|
530 return [featureData.map(feature => {
|
dev@64
|
531 return {
|
dev@64
|
532 x: toSeconds(feature.timestamp),
|
dev@64
|
533 width: toSeconds(feature.duration),
|
dev@64
|
534 color: colour,
|
dev@64
|
535 opacity: 0.8
|
dev@64
|
536 }
|
dev@64
|
537 }), {height: height}];
|
dev@64
|
538 }
|
dev@64
|
539 };
|
dev@64
|
540
|
dev@64
|
541 let segmentLayer = new wavesUI.helpers.SegmentLayer(
|
dev@64
|
542 ...getSegmentArgs()
|
dev@64
|
543 );
|
dev@122
|
544 this.addLayer(
|
dev@64
|
545 segmentLayer,
|
cannam@117
|
546 waveTrack,
|
dev@64
|
547 this.timeline.timeContext
|
dev@122
|
548 );
|
dev@64
|
549 }
|
dev@64
|
550 break;
|
dev@64
|
551 }
|
cannam@106
|
552 case 'matrix': {
|
cannam@108
|
553 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
|
cannam@120
|
554 //!!! + start time
|
cannam@108
|
555 const matrixData = (features.data as Float32Array[]);
|
cannam@108
|
556 if (matrixData.length === 0) return;
|
cannam@109
|
557 console.log("matrix data length = " + matrixData.length);
|
cannam@109
|
558 console.log("height of first column = " + matrixData[0].length);
|
cannam@109
|
559 const targetValue = this.estimatePercentile(matrixData, 95);
|
cannam@108
|
560 const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0);
|
cannam@108
|
561 console.log("setting gain to " + gain);
|
cannam@120
|
562 const matrixEntity =
|
cannam@120
|
563 new wavesUI.utils.PrefilledMatrixEntity(matrixData,
|
cannam@120
|
564 0, // startTime
|
cannam@120
|
565 stepDuration);
|
cannam@108
|
566 let matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, {
|
cannam@108
|
567 gain,
|
cannam@118
|
568 height: height * 0.9,
|
cannam@118
|
569 top: height * 0.05,
|
cannam@109
|
570 normalise: 'none',
|
cannam@108
|
571 mapper: this.iceMapper()
|
cannam@108
|
572 });
|
dev@122
|
573 this.addLayer(
|
cannam@108
|
574 matrixLayer,
|
cannam@117
|
575 waveTrack,
|
cannam@108
|
576 this.timeline.timeContext
|
dev@122
|
577 );
|
cannam@108
|
578 break;
|
cannam@106
|
579 }
|
dev@67
|
580 default:
|
cannam@106
|
581 console.log("Cannot render an appropriate layer for feature shape '" +
|
cannam@106
|
582 features.shape + "'");
|
dev@63
|
583 }
|
dev@59
|
584
|
dev@56
|
585 this.timeline.tracks.update();
|
dev@53
|
586 }
|
dev@53
|
587
|
dev@53
|
588 private animate(): void {
|
dev@31
|
589 this.ngZone.runOutsideAngular(() => {
|
dev@31
|
590 // listen for time passing...
|
dev@31
|
591 const updateSeekingCursor = () => {
|
dev@53
|
592 const currentTime = this.audioService.getCurrentTime();
|
dev@53
|
593 this.cursorLayer.currentPosition = currentTime;
|
dev@53
|
594 this.cursorLayer.update();
|
dev@53
|
595
|
dev@53
|
596 const currentOffset = this.timeline.timeContext.offset;
|
dev@53
|
597 const offsetTimestamp = currentOffset
|
dev@53
|
598 + currentTime;
|
dev@53
|
599
|
dev@53
|
600 const visibleDuration = this.timeline.timeContext.visibleDuration;
|
dev@53
|
601 // TODO reduce duplication between directions and make more declarative
|
dev@53
|
602 // this kinda logic should also be tested
|
dev@53
|
603 const mustPageForward = offsetTimestamp > visibleDuration;
|
dev@53
|
604 const mustPageBackward = currentTime < -currentOffset;
|
dev@53
|
605
|
dev@53
|
606 if (mustPageForward) {
|
dev@180
|
607 console.warn('page forward', mustPageForward, offsetTimestamp, visibleDuration);
|
dev@53
|
608 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
|
dev@53
|
609
|
cannam@106
|
610 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
cannam@106
|
611 -currentTime + 0.5 * visibleDuration :
|
cannam@106
|
612 currentOffset - visibleDuration;
|
dev@51
|
613 this.timeline.tracks.update();
|
dev@180
|
614 } else {
|
dev@180
|
615 console.warn('no page', mustPageForward, offsetTimestamp, visibleDuration);
|
dev@34
|
616 }
|
dev@53
|
617
|
dev@53
|
618 if (mustPageBackward) {
|
dev@180
|
619 console.warn('page back');
|
dev@53
|
620 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
|
cannam@106
|
621 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
cannam@106
|
622 -currentTime + 0.5 * visibleDuration :
|
cannam@106
|
623 currentOffset + visibleDuration;
|
dev@51
|
624 this.timeline.tracks.update();
|
dev@34
|
625 }
|
dev@53
|
626
|
dev@53
|
627 if (this.isPlaying)
|
dev@53
|
628 requestAnimationFrame(updateSeekingCursor);
|
dev@31
|
629 };
|
dev@31
|
630 updateSeekingCursor();
|
dev@31
|
631 });
|
dev@6
|
632 }
|
dev@16
|
633
|
dev@122
|
634 private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void {
|
dev@54
|
635 timeContext.zoom = 1.0;
|
dev@54
|
636 if (!layer.timeContext) {
|
dev@54
|
637 layer.setTimeContext(isAxis ?
|
dev@54
|
638 timeContext : new wavesUI.core.LayerTimeContext(timeContext));
|
dev@54
|
639 }
|
dev@54
|
640 track.add(layer);
|
dev@54
|
641 layer.render();
|
dev@54
|
642 layer.update();
|
dev@122
|
643 if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) {
|
dev@112
|
644 track.$layout.appendChild(this.cursorLayer.$el);
|
dev@112
|
645 }
|
dev@59
|
646 }
|
dev@59
|
647
|
dev@59
|
648 private static changeColour(layer: Layer, colour: string): void {
|
dev@59
|
649 const butcherShapes = (shape) => {
|
dev@59
|
650 shape.install({color: () => colour});
|
dev@59
|
651 shape.params.color = colour;
|
dev@59
|
652 shape.update(layer._renderingContext, layer.data);
|
dev@59
|
653 };
|
dev@59
|
654
|
dev@59
|
655 layer._$itemCommonShapeMap.forEach(butcherShapes);
|
dev@59
|
656 layer._$itemShapeMap.forEach(butcherShapes);
|
dev@59
|
657 layer.render();
|
dev@59
|
658 layer.update();
|
dev@54
|
659 }
|
dev@54
|
660
|
dev@51
|
661 ngOnDestroy(): void {
|
dev@51
|
662 this.featureExtractionSubscription.unsubscribe();
|
dev@53
|
663 this.playingStateSubscription.unsubscribe();
|
dev@53
|
664 this.seekedSubscription.unsubscribe();
|
dev@51
|
665 }
|
dev@154
|
666
|
dev@155
|
667 seekStart(): void {
|
dev@155
|
668 this.zoomOnMouseDown = this.timeline.timeContext.zoom;
|
dev@157
|
669 this.offsetOnMouseDown = this.timeline.timeContext.offset;
|
dev@155
|
670 }
|
dev@155
|
671
|
dev@155
|
672 seekEnd(x: number): void {
|
dev@157
|
673 const hasSameZoom: boolean = this.zoomOnMouseDown ===
|
dev@157
|
674 this.timeline.timeContext.zoom;
|
dev@157
|
675 const hasSameOffset: boolean = this.offsetOnMouseDown ===
|
dev@157
|
676 this.timeline.timeContext.offset;
|
dev@157
|
677 if (hasSameZoom && hasSameOffset) {
|
dev@155
|
678 this.seek(x);
|
dev@155
|
679 }
|
dev@155
|
680 }
|
dev@155
|
681
|
dev@154
|
682 seek(x: number): void {
|
dev@154
|
683 if (this.timeline) {
|
dev@154
|
684 const timeContext: any = this.timeline.timeContext;
|
dev@154
|
685 this.audioService.seekTo(
|
dev@154
|
686 timeContext.timeToPixel.invert(x)- timeContext.offset
|
dev@154
|
687 );
|
dev@154
|
688 }
|
dev@154
|
689 }
|
dev@6
|
690 }
|