dev@10
|
1 import {
|
dev@51
|
2 Component, OnInit, ViewChild, ElementRef, Input, AfterViewInit, NgZone,
|
dev@51
|
3 OnDestroy
|
dev@10
|
4 } from '@angular/core';
|
dev@39
|
5 import {AudioPlayerService} from "../services/audio-player/audio-player.service";
|
dev@36
|
6 import wavesUI from 'waves-ui';
|
dev@63
|
7 import {
|
dev@64
|
8 FeatureExtractionService
|
dev@63
|
9 } from "../services/feature-extraction/feature-extraction.service";
|
dev@51
|
10 import {Subscription} from "rxjs";
|
dev@63
|
11 import {
|
dev@63
|
12 FeatureCollection,
|
dev@64
|
13 FixedSpacedFeatures, SimpleResponse
|
dev@63
|
14 } from "piper/HigherLevelUtilities";
|
dev@53
|
15 import {toSeconds} from "piper";
|
dev@67
|
16 import {FeatureList, Feature} from "piper/Feature";
|
dev@81
|
17 import * as Hammer from 'hammerjs';
|
dev@129
|
18 import {WavesSpectrogramLayer} from "../spectrogram/Spectrogram";
|
dev@180
|
19 import {PartialEventEmitter} from "../notebook-feed/notebook-feed.component";
|
dev@8
|
20
|
dev@54
|
21 type Layer = any;
|
dev@54
|
22 type Track = any;
|
dev@59
|
23 type Colour = string;
|
dev@6
|
24
|
dev@6
|
25 @Component({
|
dev@6
|
26 selector: 'app-waveform',
|
dev@6
|
27 templateUrl: './waveform.component.html',
|
dev@6
|
28 styleUrls: ['./waveform.component.css']
|
dev@6
|
29 })
|
dev@51
|
30 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
|
dev@20
|
31
|
dev@8
|
32 @ViewChild('track') trackDiv: ElementRef;
|
dev@6
|
33
|
dev@180
|
34 @Input() timeContext: TimelineTimeContext & PartialEventEmitter;
|
dev@54
|
35 private _audioBuffer: AudioBuffer;
|
dev@54
|
36 private timeline: Timeline;
|
dev@54
|
37 private cursorLayer: any;
|
dev@185
|
38 private layers: Layer[];
|
dev@16
|
39
|
dev@16
|
40 @Input()
|
dev@16
|
41 set audioBuffer(buffer: AudioBuffer) {
|
dev@16
|
42 this._audioBuffer = buffer || undefined;
|
cannam@117
|
43 if (this.audioBuffer) {
|
dev@20
|
44 this.renderWaveform(this.audioBuffer);
|
dev@180
|
45 // this.renderSpectrogram(this.audioBuffer);
|
cannam@117
|
46 }
|
dev@16
|
47 }
|
dev@16
|
48
|
dev@16
|
49 get audioBuffer(): AudioBuffer {
|
dev@16
|
50 return this._audioBuffer;
|
dev@16
|
51 }
|
dev@16
|
52
|
dev@51
|
53 private featureExtractionSubscription: Subscription;
|
dev@53
|
54 private playingStateSubscription: Subscription;
|
dev@53
|
55 private seekedSubscription: Subscription;
|
dev@53
|
56 private isPlaying: boolean;
|
dev@110
|
57 private offsetAtPanStart: number;
|
dev@110
|
58 private initialZoom: number;
|
dev@110
|
59 private initialDistance: number;
|
dev@155
|
60 private zoomOnMouseDown: number;
|
dev@157
|
61 private offsetOnMouseDown: number;
|
dev@51
|
62
|
dev@31
|
63 constructor(private audioService: AudioPlayerService,
|
dev@51
|
64 private piperService: FeatureExtractionService,
|
dev@51
|
65 public ngZone: NgZone) {
|
dev@185
|
66 this.layers = [];
|
dev@54
|
67 this._audioBuffer = undefined;
|
dev@54
|
68 this.timeline = undefined;
|
dev@54
|
69 this.cursorLayer = undefined;
|
dev@53
|
70 this.isPlaying = false;
|
dev@59
|
71 const colours = function* () {
|
dev@59
|
72 const circularColours = [
|
dev@59
|
73 'black',
|
dev@59
|
74 'red',
|
dev@59
|
75 'green',
|
dev@59
|
76 'purple',
|
dev@59
|
77 'orange'
|
dev@59
|
78 ];
|
dev@59
|
79 let index = 0;
|
dev@59
|
80 const nColours = circularColours.length;
|
dev@59
|
81 while (true) {
|
dev@59
|
82 yield circularColours[index = ++index % nColours];
|
dev@59
|
83 }
|
dev@59
|
84 }();
|
dev@59
|
85
|
dev@51
|
86 this.featureExtractionSubscription = piperService.featuresExtracted$.subscribe(
|
dev@51
|
87 features => {
|
dev@59
|
88 this.renderFeatures(features, colours.next().value);
|
dev@51
|
89 });
|
dev@53
|
90 this.playingStateSubscription = audioService.playingStateChange$.subscribe(
|
dev@53
|
91 isPlaying => {
|
dev@53
|
92 this.isPlaying = isPlaying;
|
dev@53
|
93 if (this.isPlaying)
|
dev@53
|
94 this.animate();
|
dev@53
|
95 });
|
dev@53
|
96 this.seekedSubscription = audioService.seeked$.subscribe(() => {
|
dev@53
|
97 if (!this.isPlaying)
|
dev@53
|
98 this.animate();
|
dev@53
|
99 });
|
dev@51
|
100 }
|
dev@51
|
101
|
dev@53
|
102 ngOnInit() {
|
dev@53
|
103 }
|
dev@10
|
104
|
dev@10
|
105 ngAfterViewInit(): void {
|
dev@180
|
106 this.renderTimeline();
|
dev@20
|
107 }
|
dev@20
|
108
|
dev@20
|
109 renderTimeline(duration: number = 1.0): Timeline {
|
dev@18
|
110 const track: HTMLElement = this.trackDiv.nativeElement;
|
dev@20
|
111 track.innerHTML = "";
|
dev@18
|
112 const height: number = track.getBoundingClientRect().height;
|
dev@18
|
113 const width: number = track.getBoundingClientRect().width;
|
dev@18
|
114 const pixelsPerSecond = width / duration;
|
dev@180
|
115 if (this.timeline instanceof wavesUI.core.Timeline) {
|
dev@180
|
116 this.timeline.pixelsPerSecond = pixelsPerSecond;
|
dev@180
|
117 this.timeline.visibleWidth = width;
|
dev@180
|
118 } else {
|
dev@180
|
119 this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
|
dev@180
|
120 }
|
dev@180
|
121 if (this.timeContext instanceof wavesUI.core.TimelineTimeContext) {
|
dev@180
|
122 console.warn('Has shared timeline');
|
dev@180
|
123 this.timeline.timeContext = this.timeContext;
|
dev@180
|
124 this.timeContext.on('zoom', () => {
|
dev@180
|
125 this.timeline.tracks.update();
|
dev@180
|
126 });
|
dev@180
|
127 this.timeContext.on('offset', () => {
|
dev@180
|
128 this.timeline.tracks.update();
|
dev@180
|
129 });
|
dev@180
|
130 }
|
dev@180
|
131 this.timeline.createTrack(track, height, 'wave');
|
dev@180
|
132 // this.timeline.createTrack(track, height/2, 'wave');
|
dev@180
|
133 // this.timeline.createTrack(track, height/2, 'grid');
|
dev@54
|
134 }
|
dev@18
|
135
|
cannam@108
|
136 estimatePercentile(matrix, percentile) {
|
cannam@108
|
137 // our sample is not evenly distributed across the whole data set:
|
cannam@108
|
138 // it is guaranteed to include at least one sample from every
|
cannam@108
|
139 // column, and could sample some values more than once. But it
|
cannam@108
|
140 // should be good enough in most cases (todo: show this)
|
cannam@109
|
141 if (matrix.length === 0) {
|
cannam@109
|
142 return 0.0;
|
cannam@109
|
143 }
|
cannam@108
|
144 const w = matrix.length;
|
cannam@108
|
145 const h = matrix[0].length;
|
cannam@108
|
146 const n = w * h;
|
cannam@109
|
147 const m = (n > 50000 ? 50000 : n); // should base that on the %ile
|
cannam@108
|
148 let m_per = Math.floor(m / w);
|
cannam@108
|
149 if (m_per < 1) m_per = 1;
|
cannam@108
|
150 let sample = [];
|
cannam@108
|
151 for (let x = 0; x < w; ++x) {
|
cannam@108
|
152 for (let i = 0; i < m_per; ++i) {
|
cannam@108
|
153 const y = Math.floor(Math.random() * h);
|
cannam@109
|
154 const value = matrix[x][y];
|
cannam@109
|
155 if (!isNaN(value) && value !== Infinity) {
|
cannam@109
|
156 sample.push(value);
|
cannam@109
|
157 }
|
cannam@108
|
158 }
|
cannam@108
|
159 }
|
cannam@109
|
160 if (sample.length === 0) {
|
cannam@109
|
161 console.log("WARNING: No samples gathered, even though we hoped for " +
|
cannam@109
|
162 (m_per * w) + " of them");
|
cannam@109
|
163 return 0.0;
|
cannam@109
|
164 }
|
cannam@108
|
165 sample.sort((a,b) => { return a - b; });
|
cannam@108
|
166 const ix = Math.floor((sample.length * percentile) / 100);
|
cannam@108
|
167 console.log("Estimating " + percentile + "-%ile of " +
|
cannam@108
|
168 n + "-sample dataset (" + w + " x " + h + ") as value " + ix +
|
cannam@108
|
169 " of sorted " + sample.length + "-sample subset");
|
cannam@108
|
170 const estimate = sample[ix];
|
cannam@108
|
171 console.log("Estimate is: " + estimate + " (where min sampled value = " +
|
cannam@108
|
172 sample[0] + " and max = " + sample[sample.length-1] + ")");
|
cannam@108
|
173 return estimate;
|
cannam@108
|
174 }
|
cannam@108
|
175
|
cannam@108
|
176 interpolatingMapper(hexColours) {
|
cannam@108
|
177 const colours = hexColours.map(n => {
|
cannam@108
|
178 const i = parseInt(n, 16);
|
cannam@118
|
179 return [ ((i >> 16) & 255) / 255.0,
|
cannam@118
|
180 ((i >> 8) & 255) / 255.0,
|
cannam@118
|
181 ((i) & 255) / 255.0 ];
|
cannam@108
|
182 });
|
cannam@108
|
183 const last = colours.length - 1;
|
cannam@108
|
184 return (value => {
|
cannam@108
|
185 const m = value * last;
|
cannam@108
|
186 if (m >= last) {
|
cannam@108
|
187 return colours[last];
|
cannam@108
|
188 }
|
cannam@108
|
189 if (m <= 0) {
|
cannam@108
|
190 return colours[0];
|
cannam@108
|
191 }
|
cannam@108
|
192 const base = Math.floor(m);
|
cannam@108
|
193 const prop0 = base + 1.0 - m;
|
cannam@108
|
194 const prop1 = m - base;
|
cannam@108
|
195 const c0 = colours[base];
|
cannam@108
|
196 const c1 = colours[base+1];
|
cannam@118
|
197 return [ c0[0] * prop0 + c1[0] * prop1,
|
cannam@118
|
198 c0[1] * prop0 + c1[1] * prop1,
|
cannam@118
|
199 c0[2] * prop0 + c1[2] * prop1 ];
|
cannam@108
|
200 });
|
cannam@108
|
201 }
|
dev@110
|
202
|
cannam@108
|
203 iceMapper() {
|
dev@110
|
204 let hexColours = [
|
cannam@108
|
205 // Based on ColorBrewer ylGnBu
|
cannam@108
|
206 "ffffff", "ffff00", "f7fcf0", "e0f3db", "ccebc5", "a8ddb5",
|
cannam@108
|
207 "7bccc4", "4eb3d3", "2b8cbe", "0868ac", "084081", "042040"
|
cannam@108
|
208 ];
|
cannam@108
|
209 hexColours.reverse();
|
cannam@108
|
210 return this.interpolatingMapper(hexColours);
|
cannam@108
|
211 }
|
dev@110
|
212
|
cannam@118
|
213 hsv2rgb(h, s, v) { // all values in range [0, 1]
|
cannam@118
|
214 const i = Math.floor(h * 6);
|
cannam@118
|
215 const f = h * 6 - i;
|
cannam@118
|
216 const p = v * (1 - s);
|
cannam@118
|
217 const q = v * (1 - f * s);
|
cannam@118
|
218 const t = v * (1 - (1 - f) * s);
|
cannam@118
|
219 let r = 0, g = 0, b = 0;
|
cannam@118
|
220 switch (i % 6) {
|
cannam@118
|
221 case 0: r = v, g = t, b = p; break;
|
cannam@118
|
222 case 1: r = q, g = v, b = p; break;
|
cannam@118
|
223 case 2: r = p, g = v, b = t; break;
|
cannam@118
|
224 case 3: r = p, g = q, b = v; break;
|
cannam@118
|
225 case 4: r = t, g = p, b = v; break;
|
cannam@118
|
226 case 5: r = v, g = p, b = q; break;
|
cannam@118
|
227 }
|
cannam@118
|
228 return [ r, g, b ];
|
cannam@118
|
229 }
|
dev@122
|
230
|
cannam@118
|
231 greenMapper() {
|
cannam@118
|
232 const blue = 0.6666;
|
cannam@118
|
233 const pieslice = 0.3333;
|
cannam@118
|
234 return (value => {
|
cannam@118
|
235 const h = blue - value * 2.0 * pieslice;
|
cannam@118
|
236 const s = 0.5 + value / 2.0;
|
cannam@118
|
237 const v = value;
|
cannam@118
|
238 return this.hsv2rgb(h, s, v);
|
cannam@118
|
239 });
|
cannam@118
|
240 }
|
cannam@118
|
241
|
cannam@118
|
242 sunsetMapper() {
|
cannam@118
|
243 return (value => {
|
cannam@118
|
244 let r = (value - 0.24) * 2.38;
|
cannam@118
|
245 let g = (value - 0.64) * 2.777;
|
cannam@118
|
246 let b = (3.6 * value);
|
cannam@118
|
247 if (value > 0.277) b = 2.0 - b;
|
cannam@118
|
248 return [ r, g, b ];
|
cannam@118
|
249 });
|
cannam@118
|
250 }
|
cannam@118
|
251
|
dev@122
|
252 clearTimeline(): void {
|
dev@122
|
253 // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly
|
dev@122
|
254 const timeContextChildren = this.timeline.timeContext._children;
|
dev@122
|
255 for (let track of this.timeline.tracks) {
|
dev@122
|
256 if (track.layers.length === 0) { continue; }
|
dev@122
|
257 const trackLayers = Array.from(track.layers);
|
dev@122
|
258 while (trackLayers.length) {
|
dev@122
|
259 let layer: Layer = trackLayers.pop();
|
dev@185
|
260 if (this.layers.includes(layer)) {
|
dev@185
|
261 track.remove(layer);
|
dev@185
|
262 this.layers.splice(this.layers.indexOf(layer), 1);
|
dev@185
|
263 const index = timeContextChildren.indexOf(layer.timeContext);
|
dev@185
|
264 if (index >= 0) {
|
dev@185
|
265 timeContextChildren.splice(index, 1);
|
dev@185
|
266 }
|
dev@185
|
267 layer.destroy();
|
dev@122
|
268 }
|
dev@122
|
269 }
|
dev@122
|
270 }
|
dev@122
|
271 }
|
dev@122
|
272
|
dev@54
|
273 renderWaveform(buffer: AudioBuffer): void {
|
dev@180
|
274 // const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
dev@180
|
275 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
cannam@117
|
276 const waveTrack = this.timeline.getTrackById('wave');
|
dev@54
|
277 if (this.timeline) {
|
dev@54
|
278 // resize
|
dev@54
|
279 const width = this.trackDiv.nativeElement.getBoundingClientRect().width;
|
dev@55
|
280
|
dev@122
|
281 this.clearTimeline();
|
dev@59
|
282
|
dev@54
|
283 this.timeline.visibleWidth = width;
|
dev@54
|
284 this.timeline.pixelsPerSecond = width / buffer.duration;
|
cannam@117
|
285 waveTrack.height = height;
|
dev@54
|
286 } else {
|
dev@180
|
287 this.renderTimeline(buffer.duration)
|
dev@54
|
288 }
|
dev@83
|
289 this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration;
|
cannam@106
|
290
|
dev@18
|
291 // time axis
|
dev@18
|
292 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@18
|
293 height: height,
|
cannam@106
|
294 color: '#b0b0b0'
|
dev@18
|
295 });
|
cannam@117
|
296 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true);
|
dev@18
|
297
|
cannam@161
|
298 const nchannels = buffer.numberOfChannels;
|
cannam@161
|
299 const totalWaveHeight = height * 0.9;
|
cannam@161
|
300 const waveHeight = totalWaveHeight / nchannels;
|
cannam@161
|
301
|
cannam@161
|
302 for (let ch = 0; ch < nchannels; ++ch) {
|
cannam@161
|
303 console.log("about to construct a waveform layer for channel " + ch);
|
cannam@161
|
304 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
|
cannam@161
|
305 top: (height - totalWaveHeight)/2 + waveHeight * ch,
|
cannam@161
|
306 height: waveHeight,
|
cannam@161
|
307 color: 'darkblue',
|
cannam@161
|
308 channel: ch
|
cannam@161
|
309 });
|
cannam@161
|
310 this.addLayer(waveformLayer, waveTrack, this.timeline.timeContext);
|
cannam@161
|
311 }
|
cannam@117
|
312
|
dev@53
|
313 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
dev@31
|
314 height: height
|
dev@31
|
315 });
|
cannam@117
|
316 this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext);
|
dev@51
|
317 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
|
cannam@117
|
318 waveTrack.render();
|
cannam@117
|
319 waveTrack.update();
|
dev@81
|
320
|
dev@81
|
321
|
dev@81
|
322 if ('ontouchstart' in window) {
|
dev@110
|
323 interface Point {
|
dev@110
|
324 x: number;
|
dev@110
|
325 y: number;
|
dev@110
|
326 }
|
dev@110
|
327
|
dev@113
|
328 let zoomGestureJustEnded: boolean = false;
|
dev@113
|
329
|
dev@110
|
330 const pixelToExponent: Function = wavesUI.utils.scales.linear()
|
dev@110
|
331 .domain([0, 100]) // 100px => factor 2
|
dev@110
|
332 .range([0, 1]);
|
dev@110
|
333
|
dev@110
|
334 const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => {
|
dev@110
|
335 return Math.pow(
|
dev@110
|
336 Math.pow(p2.x - p1.x, 2) +
|
dev@110
|
337 Math.pow(p2.y - p1.y, 2), 0.5);
|
dev@110
|
338 };
|
dev@110
|
339
|
dev@84
|
340 const hammertime = new Hammer(this.trackDiv.nativeElement);
|
dev@81
|
341 const scroll = (ev) => {
|
dev@113
|
342 if (zoomGestureJustEnded) {
|
dev@113
|
343 zoomGestureJustEnded = false;
|
dev@113
|
344 console.log("Skip this event: likely a single touch dangling from pinch");
|
dev@113
|
345 return;
|
dev@113
|
346 }
|
dev@110
|
347 this.timeline.timeContext.offset = this.offsetAtPanStart +
|
dev@110
|
348 this.timeline.timeContext.timeToPixel.invert(ev.deltaX);
|
dev@81
|
349 this.timeline.tracks.update();
|
dev@81
|
350 };
|
dev@84
|
351
|
dev@81
|
352 const zoom = (ev) => {
|
dev@81
|
353 const minZoom = this.timeline.state.minZoom;
|
dev@81
|
354 const maxZoom = this.timeline.state.maxZoom;
|
dev@110
|
355 const distance = calculateDistance({
|
dev@110
|
356 x: ev.pointers[0].clientX,
|
dev@110
|
357 y: ev.pointers[0].clientY
|
dev@110
|
358 }, {
|
dev@110
|
359 x: ev.pointers[1].clientX,
|
dev@110
|
360 y: ev.pointers[1].clientY
|
dev@110
|
361 });
|
dev@110
|
362
|
dev@110
|
363 const lastCenterTime =
|
dev@110
|
364 this.timeline.timeContext.timeToPixel.invert(ev.center.x);
|
dev@110
|
365
|
dev@110
|
366 const exponent = pixelToExponent(distance - this.initialDistance);
|
dev@110
|
367 const targetZoom = this.initialZoom * Math.pow(2, exponent);
|
dev@110
|
368
|
dev@110
|
369 this.timeline.timeContext.zoom =
|
dev@110
|
370 Math.min(Math.max(targetZoom, minZoom), maxZoom);
|
dev@110
|
371
|
dev@110
|
372 const newCenterTime =
|
dev@110
|
373 this.timeline.timeContext.timeToPixel.invert(ev.center.x);
|
dev@110
|
374
|
dev@96
|
375 this.timeline.timeContext.offset += newCenterTime - lastCenterTime;
|
dev@81
|
376 this.timeline.tracks.update();
|
dev@81
|
377 };
|
dev@81
|
378 hammertime.get('pinch').set({ enable: true });
|
dev@110
|
379 hammertime.on('panstart', () => {
|
dev@110
|
380 this.offsetAtPanStart = this.timeline.timeContext.offset;
|
dev@110
|
381 });
|
dev@81
|
382 hammertime.on('panleft', scroll);
|
dev@81
|
383 hammertime.on('panright', scroll);
|
dev@110
|
384 hammertime.on('pinchstart', (e) => {
|
dev@110
|
385 this.initialZoom = this.timeline.timeContext.zoom;
|
dev@110
|
386
|
dev@110
|
387 this.initialDistance = calculateDistance({
|
dev@110
|
388 x: e.pointers[0].clientX,
|
dev@110
|
389 y: e.pointers[0].clientY
|
dev@110
|
390 }, {
|
dev@110
|
391 x: e.pointers[1].clientX,
|
dev@110
|
392 y: e.pointers[1].clientY
|
dev@110
|
393 });
|
dev@110
|
394 });
|
dev@81
|
395 hammertime.on('pinch', zoom);
|
dev@113
|
396 hammertime.on('pinchend', () => {
|
dev@113
|
397 zoomGestureJustEnded = true;
|
dev@113
|
398 });
|
dev@81
|
399 }
|
dev@81
|
400
|
dev@53
|
401 this.animate();
|
dev@53
|
402 }
|
dev@53
|
403
|
cannam@117
|
404 renderSpectrogram(buffer: AudioBuffer): void {
|
cannam@117
|
405 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
cannam@117
|
406 const gridTrack = this.timeline.getTrackById('grid');
|
cannam@117
|
407
|
dev@129
|
408 const spectrogramLayer = new WavesSpectrogramLayer(buffer, {
|
cannam@118
|
409 top: height * 0.05,
|
cannam@117
|
410 height: height * 0.9,
|
cannam@117
|
411 stepSize: 512,
|
dev@129
|
412 blockSize: 1024,
|
cannam@118
|
413 normalise: 'none',
|
cannam@118
|
414 mapper: this.sunsetMapper()
|
cannam@117
|
415 });
|
cannam@117
|
416 this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext);
|
cannam@117
|
417
|
cannam@117
|
418 this.timeline.tracks.update();
|
cannam@117
|
419 }
|
cannam@117
|
420
|
dev@53
|
421 // TODO refactor - this doesn't belong here
|
dev@64
|
422 private renderFeatures(extracted: SimpleResponse, colour: Colour): void {
|
dev@64
|
423 if (!extracted.hasOwnProperty('features') || !extracted.hasOwnProperty('outputDescriptor')) return;
|
dev@64
|
424 if (!extracted.features.hasOwnProperty('shape') || !extracted.features.hasOwnProperty('data')) return;
|
dev@64
|
425 const features: FeatureCollection = (extracted.features as FeatureCollection);
|
dev@64
|
426 const outputDescriptor = extracted.outputDescriptor;
|
cannam@118
|
427 const height = this.trackDiv.nativeElement.getBoundingClientRect().height / 2;
|
cannam@118
|
428 const waveTrack = this.timeline.getTrackById('wave');
|
dev@64
|
429
|
dev@64
|
430 // TODO refactor all of this
|
dev@63
|
431 switch (features.shape) {
|
dev@64
|
432 case 'vector': {
|
dev@63
|
433 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
|
dev@63
|
434 const featureData = (features.data as Float32Array);
|
dev@68
|
435 if (featureData.length === 0) return;
|
dev@63
|
436 const normalisationFactor = 1.0 /
|
dev@63
|
437 featureData.reduce(
|
dev@63
|
438 (currentMax, feature) => Math.max(currentMax, feature),
|
dev@63
|
439 -Infinity
|
dev@63
|
440 );
|
dev@67
|
441
|
dev@63
|
442 const plotData = [...featureData].map((feature, i) => {
|
dev@63
|
443 return {
|
dev@63
|
444 cx: i * stepDuration,
|
dev@63
|
445 cy: feature * normalisationFactor
|
dev@63
|
446 };
|
dev@63
|
447 });
|
dev@67
|
448
|
dev@105
|
449 let lineLayer = new wavesUI.helpers.LineLayer(plotData, {
|
dev@63
|
450 color: colour,
|
dev@64
|
451 height: height
|
dev@63
|
452 });
|
dev@122
|
453 this.addLayer(
|
dev@105
|
454 lineLayer,
|
cannam@117
|
455 waveTrack,
|
dev@63
|
456 this.timeline.timeContext
|
dev@122
|
457 );
|
dev@63
|
458 break;
|
dev@64
|
459 }
|
dev@64
|
460 case 'list': {
|
dev@64
|
461 const featureData = (features.data as FeatureList);
|
dev@68
|
462 if (featureData.length === 0) return;
|
dev@64
|
463 // TODO look at output descriptor instead of directly inspecting features
|
dev@64
|
464 const hasDuration = outputDescriptor.configured.hasDuration;
|
dev@64
|
465 const isMarker = !hasDuration
|
dev@64
|
466 && outputDescriptor.configured.binCount === 0
|
dev@64
|
467 && featureData[0].featureValues == null;
|
dev@64
|
468 const isRegion = hasDuration
|
dev@64
|
469 && featureData[0].timestamp != null;
|
cannam@149
|
470 console.log("Have list features: length " + featureData.length +
|
cannam@149
|
471 ", isMarker " + isMarker + ", isRegion " + isRegion +
|
cannam@149
|
472 ", hasDuration " + hasDuration);
|
dev@64
|
473 // TODO refactor, this is incomprehensible
|
dev@64
|
474 if (isMarker) {
|
dev@64
|
475 const plotData = featureData.map(feature => {
|
cannam@152
|
476 return {
|
cannam@152
|
477 time: toSeconds(feature.timestamp),
|
cannam@152
|
478 label: feature.label
|
cannam@152
|
479 }
|
dev@64
|
480 });
|
cannam@149
|
481 let featureLayer = new wavesUI.helpers.TickLayer(plotData, {
|
dev@64
|
482 height: height,
|
dev@64
|
483 color: colour,
|
cannam@152
|
484 labelPosition: 'bottom',
|
cannam@152
|
485 shadeSegments: true
|
dev@64
|
486 });
|
dev@122
|
487 this.addLayer(
|
cannam@149
|
488 featureLayer,
|
cannam@117
|
489 waveTrack,
|
dev@64
|
490 this.timeline.timeContext
|
dev@122
|
491 );
|
dev@64
|
492 } else if (isRegion) {
|
cannam@149
|
493 console.log("Output is of region type");
|
dev@67
|
494 const binCount = outputDescriptor.configured.binCount || 0;
|
dev@67
|
495 const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ;
|
dev@64
|
496 const getSegmentArgs = () => {
|
dev@64
|
497 if (isBarRegion) {
|
dev@64
|
498
|
dev@67
|
499 // TODO refactor - this is messy
|
dev@67
|
500 interface FoldsToNumber<T> {
|
dev@67
|
501 reduce(fn: (previousValue: number,
|
dev@67
|
502 currentValue: T,
|
dev@67
|
503 currentIndex: number,
|
dev@67
|
504 array: ArrayLike<T>) => number,
|
dev@67
|
505 initialValue?: number): number;
|
dev@67
|
506 }
|
dev@64
|
507
|
dev@67
|
508 // TODO potentially change impl., i.e avoid reduce
|
dev@67
|
509 const findMin = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
510 return arr.reduce((min, val) => Math.min(min, getElement(val)), Infinity);
|
dev@67
|
511 };
|
dev@67
|
512
|
dev@67
|
513 const findMax = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
514 return arr.reduce((min, val) => Math.max(min, getElement(val)), -Infinity);
|
dev@67
|
515 };
|
dev@67
|
516
|
dev@67
|
517 const min = findMin<Feature>(featureData, (x: Feature) => {
|
dev@67
|
518 return findMin<number>(x.featureValues, y => y);
|
dev@67
|
519 });
|
dev@67
|
520
|
dev@67
|
521 const max = findMax<Feature>(featureData, (x: Feature) => {
|
dev@67
|
522 return findMax<number>(x.featureValues, y => y);
|
dev@67
|
523 });
|
dev@67
|
524
|
dev@67
|
525 const barHeight = 1.0 / height;
|
dev@64
|
526 return [
|
dev@67
|
527 featureData.reduce((bars, feature) => {
|
dev@67
|
528 const staticProperties = {
|
dev@64
|
529 x: toSeconds(feature.timestamp),
|
dev@64
|
530 width: toSeconds(feature.duration),
|
dev@67
|
531 height: min + barHeight,
|
dev@64
|
532 color: colour,
|
dev@64
|
533 opacity: 0.8
|
dev@67
|
534 };
|
dev@67
|
535 // TODO avoid copying Float32Array to an array - map is problematic here
|
dev@67
|
536 return bars.concat([...feature.featureValues]
|
dev@67
|
537 .map(val => Object.assign({}, staticProperties, {y: val})))
|
dev@67
|
538 }, []),
|
dev@67
|
539 {yDomain: [min, max + barHeight], height: height} as any
|
dev@67
|
540 ];
|
dev@64
|
541 } else {
|
dev@64
|
542 return [featureData.map(feature => {
|
dev@64
|
543 return {
|
dev@64
|
544 x: toSeconds(feature.timestamp),
|
dev@64
|
545 width: toSeconds(feature.duration),
|
dev@64
|
546 color: colour,
|
dev@64
|
547 opacity: 0.8
|
dev@64
|
548 }
|
dev@64
|
549 }), {height: height}];
|
dev@64
|
550 }
|
dev@64
|
551 };
|
dev@64
|
552
|
dev@64
|
553 let segmentLayer = new wavesUI.helpers.SegmentLayer(
|
dev@64
|
554 ...getSegmentArgs()
|
dev@64
|
555 );
|
dev@122
|
556 this.addLayer(
|
dev@64
|
557 segmentLayer,
|
cannam@117
|
558 waveTrack,
|
dev@64
|
559 this.timeline.timeContext
|
dev@122
|
560 );
|
dev@64
|
561 }
|
dev@64
|
562 break;
|
dev@64
|
563 }
|
cannam@106
|
564 case 'matrix': {
|
cannam@108
|
565 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
|
cannam@120
|
566 //!!! + start time
|
cannam@108
|
567 const matrixData = (features.data as Float32Array[]);
|
cannam@108
|
568 if (matrixData.length === 0) return;
|
cannam@109
|
569 console.log("matrix data length = " + matrixData.length);
|
cannam@109
|
570 console.log("height of first column = " + matrixData[0].length);
|
cannam@109
|
571 const targetValue = this.estimatePercentile(matrixData, 95);
|
cannam@108
|
572 const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0);
|
cannam@108
|
573 console.log("setting gain to " + gain);
|
cannam@120
|
574 const matrixEntity =
|
cannam@120
|
575 new wavesUI.utils.PrefilledMatrixEntity(matrixData,
|
cannam@120
|
576 0, // startTime
|
cannam@120
|
577 stepDuration);
|
cannam@108
|
578 let matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, {
|
cannam@108
|
579 gain,
|
cannam@118
|
580 height: height * 0.9,
|
cannam@118
|
581 top: height * 0.05,
|
cannam@109
|
582 normalise: 'none',
|
cannam@108
|
583 mapper: this.iceMapper()
|
cannam@108
|
584 });
|
dev@122
|
585 this.addLayer(
|
cannam@108
|
586 matrixLayer,
|
cannam@117
|
587 waveTrack,
|
cannam@108
|
588 this.timeline.timeContext
|
dev@122
|
589 );
|
cannam@108
|
590 break;
|
cannam@106
|
591 }
|
dev@67
|
592 default:
|
cannam@106
|
593 console.log("Cannot render an appropriate layer for feature shape '" +
|
cannam@106
|
594 features.shape + "'");
|
dev@63
|
595 }
|
dev@59
|
596
|
dev@56
|
597 this.timeline.tracks.update();
|
dev@53
|
598 }
|
dev@53
|
599
|
dev@53
|
600 private animate(): void {
|
dev@31
|
601 this.ngZone.runOutsideAngular(() => {
|
dev@31
|
602 // listen for time passing...
|
dev@31
|
603 const updateSeekingCursor = () => {
|
dev@53
|
604 const currentTime = this.audioService.getCurrentTime();
|
dev@53
|
605 this.cursorLayer.currentPosition = currentTime;
|
dev@53
|
606 this.cursorLayer.update();
|
dev@53
|
607
|
dev@53
|
608 const currentOffset = this.timeline.timeContext.offset;
|
dev@53
|
609 const offsetTimestamp = currentOffset
|
dev@53
|
610 + currentTime;
|
dev@53
|
611
|
dev@53
|
612 const visibleDuration = this.timeline.timeContext.visibleDuration;
|
dev@53
|
613 // TODO reduce duplication between directions and make more declarative
|
dev@53
|
614 // this kinda logic should also be tested
|
dev@53
|
615 const mustPageForward = offsetTimestamp > visibleDuration;
|
dev@53
|
616 const mustPageBackward = currentTime < -currentOffset;
|
dev@53
|
617
|
dev@53
|
618 if (mustPageForward) {
|
dev@180
|
619 console.warn('page forward', mustPageForward, offsetTimestamp, visibleDuration);
|
dev@53
|
620 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
|
dev@53
|
621
|
cannam@106
|
622 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
cannam@106
|
623 -currentTime + 0.5 * visibleDuration :
|
cannam@106
|
624 currentOffset - visibleDuration;
|
dev@51
|
625 this.timeline.tracks.update();
|
dev@180
|
626 } else {
|
dev@180
|
627 console.warn('no page', mustPageForward, offsetTimestamp, visibleDuration);
|
dev@34
|
628 }
|
dev@53
|
629
|
dev@53
|
630 if (mustPageBackward) {
|
dev@180
|
631 console.warn('page back');
|
dev@53
|
632 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
|
cannam@106
|
633 this.timeline.timeContext.offset = hasSkippedMultiplePages ?
|
cannam@106
|
634 -currentTime + 0.5 * visibleDuration :
|
cannam@106
|
635 currentOffset + visibleDuration;
|
dev@51
|
636 this.timeline.tracks.update();
|
dev@34
|
637 }
|
dev@53
|
638
|
dev@53
|
639 if (this.isPlaying)
|
dev@53
|
640 requestAnimationFrame(updateSeekingCursor);
|
dev@31
|
641 };
|
dev@31
|
642 updateSeekingCursor();
|
dev@31
|
643 });
|
dev@6
|
644 }
|
dev@16
|
645
|
dev@122
|
646 private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void {
|
dev@54
|
647 timeContext.zoom = 1.0;
|
dev@54
|
648 if (!layer.timeContext) {
|
dev@54
|
649 layer.setTimeContext(isAxis ?
|
dev@54
|
650 timeContext : new wavesUI.core.LayerTimeContext(timeContext));
|
dev@54
|
651 }
|
dev@54
|
652 track.add(layer);
|
dev@185
|
653 this.layers.push(layer);
|
dev@54
|
654 layer.render();
|
dev@54
|
655 layer.update();
|
dev@122
|
656 if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) {
|
dev@112
|
657 track.$layout.appendChild(this.cursorLayer.$el);
|
dev@112
|
658 }
|
dev@59
|
659 }
|
dev@59
|
660
|
dev@59
|
661 private static changeColour(layer: Layer, colour: string): void {
|
dev@59
|
662 const butcherShapes = (shape) => {
|
dev@59
|
663 shape.install({color: () => colour});
|
dev@59
|
664 shape.params.color = colour;
|
dev@59
|
665 shape.update(layer._renderingContext, layer.data);
|
dev@59
|
666 };
|
dev@59
|
667
|
dev@59
|
668 layer._$itemCommonShapeMap.forEach(butcherShapes);
|
dev@59
|
669 layer._$itemShapeMap.forEach(butcherShapes);
|
dev@59
|
670 layer.render();
|
dev@59
|
671 layer.update();
|
dev@54
|
672 }
|
dev@54
|
673
|
dev@51
|
674 ngOnDestroy(): void {
|
dev@51
|
675 this.featureExtractionSubscription.unsubscribe();
|
dev@53
|
676 this.playingStateSubscription.unsubscribe();
|
dev@53
|
677 this.seekedSubscription.unsubscribe();
|
dev@51
|
678 }
|
dev@154
|
679
|
dev@155
|
680 seekStart(): void {
|
dev@155
|
681 this.zoomOnMouseDown = this.timeline.timeContext.zoom;
|
dev@157
|
682 this.offsetOnMouseDown = this.timeline.timeContext.offset;
|
dev@155
|
683 }
|
dev@155
|
684
|
dev@155
|
685 seekEnd(x: number): void {
|
dev@157
|
686 const hasSameZoom: boolean = this.zoomOnMouseDown ===
|
dev@157
|
687 this.timeline.timeContext.zoom;
|
dev@157
|
688 const hasSameOffset: boolean = this.offsetOnMouseDown ===
|
dev@157
|
689 this.timeline.timeContext.offset;
|
dev@157
|
690 if (hasSameZoom && hasSameOffset) {
|
dev@155
|
691 this.seek(x);
|
dev@155
|
692 }
|
dev@155
|
693 }
|
dev@155
|
694
|
dev@154
|
695 seek(x: number): void {
|
dev@154
|
696 if (this.timeline) {
|
dev@154
|
697 const timeContext: any = this.timeline.timeContext;
|
dev@154
|
698 this.audioService.seekTo(
|
dev@154
|
699 timeContext.timeToPixel.invert(x)- timeContext.offset
|
dev@154
|
700 );
|
dev@154
|
701 }
|
dev@154
|
702 }
|
dev@6
|
703 }
|