dev@10
|
1 import {
|
dev@51
|
2 Component, OnInit, ViewChild, ElementRef, Input, AfterViewInit, NgZone,
|
dev@51
|
3 OnDestroy
|
dev@10
|
4 } from '@angular/core';
|
dev@39
|
5 import {AudioPlayerService} from "../services/audio-player/audio-player.service";
|
dev@36
|
6 import wavesUI from 'waves-ui';
|
dev@63
|
7 import {
|
dev@64
|
8 FeatureExtractionService
|
dev@63
|
9 } from "../services/feature-extraction/feature-extraction.service";
|
dev@51
|
10 import {Subscription} from "rxjs";
|
dev@63
|
11 import {
|
dev@63
|
12 FeatureCollection,
|
dev@64
|
13 FixedSpacedFeatures, SimpleResponse
|
dev@63
|
14 } from "piper/HigherLevelUtilities";
|
dev@53
|
15 import {toSeconds} from "piper";
|
dev@67
|
16 import {FeatureList, Feature} from "piper/Feature";
|
dev@8
|
17
|
dev@20
|
18 type Timeline = any; // TODO what type actually is it.. start a .d.ts for waves-ui?
|
dev@54
|
19 type Layer = any;
|
dev@54
|
20 type Track = any;
|
dev@59
|
21 type DisposableIndex = number;
|
dev@59
|
22 type Colour = string;
|
dev@6
|
23
|
dev@6
|
24 @Component({
|
dev@6
|
25 selector: 'app-waveform',
|
dev@6
|
26 templateUrl: './waveform.component.html',
|
dev@6
|
27 styleUrls: ['./waveform.component.css']
|
dev@6
|
28 })
|
dev@51
|
29 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
|
dev@20
|
30
|
dev@8
|
31 @ViewChild('track') trackDiv: ElementRef;
|
dev@6
|
32
|
dev@54
|
33 private _audioBuffer: AudioBuffer;
|
dev@54
|
34 private timeline: Timeline;
|
dev@54
|
35 private cursorLayer: any;
|
dev@54
|
36 private disposableLayers: Layer[];
|
dev@59
|
37 private colouredLayers: Map<DisposableIndex, Colour>;
|
dev@16
|
38
|
dev@16
|
39 @Input()
|
dev@16
|
40 set audioBuffer(buffer: AudioBuffer) {
|
dev@16
|
41 this._audioBuffer = buffer || undefined;
|
dev@20
|
42 if (this.audioBuffer)
|
dev@20
|
43 this.renderWaveform(this.audioBuffer);
|
dev@16
|
44 }
|
dev@16
|
45
|
dev@16
|
46 get audioBuffer(): AudioBuffer {
|
dev@16
|
47 return this._audioBuffer;
|
dev@16
|
48 }
|
dev@16
|
49
|
dev@51
|
50 private featureExtractionSubscription: Subscription;
|
dev@53
|
51 private playingStateSubscription: Subscription;
|
dev@53
|
52 private seekedSubscription: Subscription;
|
dev@53
|
53 private isPlaying: boolean;
|
dev@51
|
54
|
dev@31
|
55 constructor(private audioService: AudioPlayerService,
|
dev@51
|
56 private piperService: FeatureExtractionService,
|
dev@51
|
57 public ngZone: NgZone) {
|
dev@59
|
58 this.colouredLayers = new Map();
|
dev@54
|
59 this.disposableLayers = [];
|
dev@54
|
60 this._audioBuffer = undefined;
|
dev@54
|
61 this.timeline = undefined;
|
dev@54
|
62 this.cursorLayer = undefined;
|
dev@53
|
63 this.isPlaying = false;
|
dev@59
|
64 const colours = function* () {
|
dev@59
|
65 const circularColours = [
|
dev@59
|
66 'black',
|
dev@59
|
67 'red',
|
dev@59
|
68 'green',
|
dev@59
|
69 'purple',
|
dev@59
|
70 'orange'
|
dev@59
|
71 ];
|
dev@59
|
72 let index = 0;
|
dev@59
|
73 const nColours = circularColours.length;
|
dev@59
|
74 while (true) {
|
dev@59
|
75 yield circularColours[index = ++index % nColours];
|
dev@59
|
76 }
|
dev@59
|
77 }();
|
dev@59
|
78
|
dev@51
|
79 this.featureExtractionSubscription = piperService.featuresExtracted$.subscribe(
|
dev@51
|
80 features => {
|
dev@59
|
81 this.renderFeatures(features, colours.next().value);
|
dev@51
|
82 });
|
dev@53
|
83 this.playingStateSubscription = audioService.playingStateChange$.subscribe(
|
dev@53
|
84 isPlaying => {
|
dev@53
|
85 this.isPlaying = isPlaying;
|
dev@53
|
86 if (this.isPlaying)
|
dev@53
|
87 this.animate();
|
dev@53
|
88 });
|
dev@53
|
89 this.seekedSubscription = audioService.seeked$.subscribe(() => {
|
dev@53
|
90 if (!this.isPlaying)
|
dev@53
|
91 this.animate();
|
dev@53
|
92 });
|
dev@51
|
93 }
|
dev@51
|
94
|
dev@53
|
95 ngOnInit() {
|
dev@53
|
96 }
|
dev@10
|
97
|
dev@10
|
98 ngAfterViewInit(): void {
|
dev@51
|
99 this.timeline = this.renderTimeline();
|
dev@20
|
100 }
|
dev@20
|
101
|
dev@20
|
102 renderTimeline(duration: number = 1.0): Timeline {
|
dev@18
|
103 const track: HTMLElement = this.trackDiv.nativeElement;
|
dev@20
|
104 track.innerHTML = "";
|
dev@18
|
105 const height: number = track.getBoundingClientRect().height;
|
dev@18
|
106 const width: number = track.getBoundingClientRect().width;
|
dev@18
|
107 const pixelsPerSecond = width / duration;
|
dev@18
|
108 const timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
|
dev@33
|
109 timeline.timeContext.offset = 0.5 * timeline.timeContext.visibleDuration;
|
dev@18
|
110 timeline.createTrack(track, height, 'main');
|
dev@54
|
111 return timeline;
|
dev@54
|
112 }
|
dev@18
|
113
|
dev@54
|
114 renderWaveform(buffer: AudioBuffer): void {
|
dev@54
|
115 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@54
|
116 const mainTrack = this.timeline.getTrackById('main');
|
dev@54
|
117 if (this.timeline) {
|
dev@54
|
118 // resize
|
dev@54
|
119 const width = this.trackDiv.nativeElement.getBoundingClientRect().width;
|
dev@55
|
120
|
dev@54
|
121 // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly
|
dev@55
|
122 const timeContextChildren = this.timeline.timeContext._children;
|
dev@55
|
123
|
dev@60
|
124 for (let i = 0, length = this.disposableLayers.length; i < length; ++i) {
|
dev@54
|
125 let layer = this.disposableLayers.pop();
|
dev@54
|
126 mainTrack.remove(layer);
|
dev@55
|
127
|
dev@55
|
128 const index = timeContextChildren.indexOf(layer.timeContext);
|
dev@55
|
129 if (index >= 0)
|
dev@55
|
130 timeContextChildren.splice(index, 1);
|
dev@54
|
131 layer.destroy();
|
dev@54
|
132 }
|
dev@59
|
133 this.colouredLayers.clear();
|
dev@59
|
134
|
dev@54
|
135 this.timeline.visibleWidth = width;
|
dev@54
|
136 this.timeline.pixelsPerSecond = width / buffer.duration;
|
dev@54
|
137 mainTrack.height = height;
|
dev@54
|
138 } else {
|
dev@54
|
139 this.timeline = this.renderTimeline(buffer.duration)
|
dev@54
|
140 }
|
dev@18
|
141 // time axis
|
dev@18
|
142 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@18
|
143 height: height,
|
dev@18
|
144 color: 'gray'
|
dev@18
|
145 });
|
dev@54
|
146 this.addLayer(timeAxis, mainTrack, this.timeline.timeContext, true);
|
dev@18
|
147
|
dev@20
|
148 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
|
dev@10
|
149 top: 10,
|
dev@20
|
150 height: height * 0.9,
|
dev@16
|
151 color: 'darkblue'
|
dev@16
|
152 });
|
dev@54
|
153 this.addLayer(waveformLayer, mainTrack, this.timeline.timeContext);
|
dev@31
|
154
|
dev@53
|
155 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
dev@31
|
156 height: height
|
dev@31
|
157 });
|
dev@54
|
158 this.addLayer(this.cursorLayer, mainTrack, this.timeline.timeContext);
|
dev@51
|
159 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
|
dev@54
|
160 mainTrack.render();
|
dev@54
|
161 mainTrack.update();
|
dev@53
|
162 this.animate();
|
dev@53
|
163 }
|
dev@53
|
164
|
dev@53
|
165 // TODO refactor - this doesn't belong here
|
dev@64
|
166 private renderFeatures(extracted: SimpleResponse, colour: Colour): void {
|
dev@64
|
167 if (!extracted.hasOwnProperty('features') || !extracted.hasOwnProperty('outputDescriptor')) return;
|
dev@64
|
168 if (!extracted.features.hasOwnProperty('shape') || !extracted.features.hasOwnProperty('data')) return;
|
dev@64
|
169 const features: FeatureCollection = (extracted.features as FeatureCollection);
|
dev@64
|
170 const outputDescriptor = extracted.outputDescriptor;
|
dev@64
|
171 const height = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@64
|
172 const mainTrack = this.timeline.getTrackById('main');
|
dev@64
|
173
|
dev@64
|
174 // TODO refactor all of this
|
dev@63
|
175 switch (features.shape) {
|
dev@64
|
176 case 'vector': {
|
dev@63
|
177 const stepDuration = (features as FixedSpacedFeatures).stepDuration;
|
dev@63
|
178 const featureData = (features.data as Float32Array);
|
dev@63
|
179 const normalisationFactor = 1.0 /
|
dev@63
|
180 featureData.reduce(
|
dev@63
|
181 (currentMax, feature) => Math.max(currentMax, feature),
|
dev@63
|
182 -Infinity
|
dev@63
|
183 );
|
dev@67
|
184
|
dev@63
|
185 const plotData = [...featureData].map((feature, i) => {
|
dev@63
|
186 return {
|
dev@63
|
187 cx: i * stepDuration,
|
dev@63
|
188 cy: feature * normalisationFactor
|
dev@63
|
189 };
|
dev@63
|
190 });
|
dev@67
|
191
|
dev@63
|
192 let breakpointLayer = new wavesUI.helpers.BreakpointLayer(plotData, {
|
dev@63
|
193 color: colour,
|
dev@64
|
194 height: height
|
dev@63
|
195 });
|
dev@63
|
196 this.colouredLayers.set(this.addLayer(
|
dev@63
|
197 breakpointLayer,
|
dev@64
|
198 mainTrack,
|
dev@63
|
199 this.timeline.timeContext
|
dev@63
|
200 ), colour);
|
dev@63
|
201 break;
|
dev@64
|
202 }
|
dev@64
|
203 case 'list': {
|
dev@64
|
204 const featureData = (features.data as FeatureList);
|
dev@64
|
205 // TODO look at output descriptor instead of directly inspecting features
|
dev@64
|
206 const hasDuration = outputDescriptor.configured.hasDuration;
|
dev@64
|
207 const isMarker = !hasDuration
|
dev@64
|
208 && outputDescriptor.configured.binCount === 0
|
dev@64
|
209 && featureData[0].featureValues == null;
|
dev@64
|
210 const isRegion = hasDuration
|
dev@64
|
211 && featureData[0].timestamp != null;
|
dev@64
|
212 // TODO refactor, this is incomprehensible
|
dev@64
|
213 if (isMarker) {
|
dev@64
|
214 const plotData = featureData.map(feature => {
|
dev@64
|
215 return {x: toSeconds(feature.timestamp)}
|
dev@64
|
216 });
|
dev@64
|
217 let markerLayer = new wavesUI.helpers.MarkerLayer(plotData, {
|
dev@64
|
218 height: height,
|
dev@64
|
219 color: colour,
|
dev@64
|
220 });
|
dev@64
|
221 this.colouredLayers.set(this.addLayer(
|
dev@64
|
222 markerLayer,
|
dev@64
|
223 mainTrack,
|
dev@64
|
224 this.timeline.timeContext
|
dev@64
|
225 ), colour);
|
dev@64
|
226 } else if (isRegion) {
|
dev@67
|
227 const binCount = outputDescriptor.configured.binCount || 0;
|
dev@67
|
228 const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ;
|
dev@64
|
229 const getSegmentArgs = () => {
|
dev@64
|
230 if (isBarRegion) {
|
dev@64
|
231
|
dev@67
|
232 // TODO refactor - this is messy
|
dev@67
|
233 interface FoldsToNumber<T> {
|
dev@67
|
234 reduce(fn: (previousValue: number,
|
dev@67
|
235 currentValue: T,
|
dev@67
|
236 currentIndex: number,
|
dev@67
|
237 array: ArrayLike<T>) => number,
|
dev@67
|
238 initialValue?: number): number;
|
dev@67
|
239 }
|
dev@64
|
240
|
dev@67
|
241 // TODO potentially change impl., i.e avoid reduce
|
dev@67
|
242 const findMin = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
243 return arr.reduce((min, val) => Math.min(min, getElement(val)), Infinity);
|
dev@67
|
244 };
|
dev@67
|
245
|
dev@67
|
246 const findMax = <T>(arr: FoldsToNumber<T>, getElement: (x: T) => number): number => {
|
dev@67
|
247 return arr.reduce((min, val) => Math.max(min, getElement(val)), -Infinity);
|
dev@67
|
248 };
|
dev@67
|
249
|
dev@67
|
250 const min = findMin<Feature>(featureData, (x: Feature) => {
|
dev@67
|
251 return findMin<number>(x.featureValues, y => y);
|
dev@67
|
252 });
|
dev@67
|
253
|
dev@67
|
254 const max = findMax<Feature>(featureData, (x: Feature) => {
|
dev@67
|
255 return findMax<number>(x.featureValues, y => y);
|
dev@67
|
256 });
|
dev@67
|
257
|
dev@67
|
258 const barHeight = 1.0 / height;
|
dev@64
|
259 return [
|
dev@67
|
260 featureData.reduce((bars, feature) => {
|
dev@67
|
261 const staticProperties = {
|
dev@64
|
262 x: toSeconds(feature.timestamp),
|
dev@64
|
263 width: toSeconds(feature.duration),
|
dev@67
|
264 height: min + barHeight,
|
dev@64
|
265 color: colour,
|
dev@64
|
266 opacity: 0.8
|
dev@67
|
267 };
|
dev@67
|
268 // TODO avoid copying Float32Array to an array - map is problematic here
|
dev@67
|
269 return bars.concat([...feature.featureValues]
|
dev@67
|
270 .map(val => Object.assign({}, staticProperties, {y: val})))
|
dev@67
|
271 }, []),
|
dev@67
|
272 {yDomain: [min, max + barHeight], height: height} as any
|
dev@67
|
273 ];
|
dev@64
|
274 } else {
|
dev@64
|
275 return [featureData.map(feature => {
|
dev@64
|
276 return {
|
dev@64
|
277 x: toSeconds(feature.timestamp),
|
dev@64
|
278 width: toSeconds(feature.duration),
|
dev@64
|
279 color: colour,
|
dev@64
|
280 opacity: 0.8
|
dev@64
|
281 }
|
dev@64
|
282 }), {height: height}];
|
dev@64
|
283 }
|
dev@64
|
284 };
|
dev@64
|
285
|
dev@64
|
286 let segmentLayer = new wavesUI.helpers.SegmentLayer(
|
dev@64
|
287 ...getSegmentArgs()
|
dev@64
|
288 );
|
dev@64
|
289 this.colouredLayers.set(this.addLayer(
|
dev@64
|
290 segmentLayer,
|
dev@64
|
291 mainTrack,
|
dev@64
|
292 this.timeline.timeContext
|
dev@64
|
293 ), colour);
|
dev@64
|
294 }
|
dev@64
|
295
|
dev@64
|
296 break;
|
dev@64
|
297 }
|
dev@67
|
298 default:
|
dev@67
|
299 console.log('Cannot render an appropriate layer.');
|
dev@63
|
300 }
|
dev@59
|
301
|
dev@56
|
302 this.timeline.tracks.update();
|
dev@53
|
303 }
|
dev@53
|
304
|
dev@53
|
305 private animate(): void {
|
dev@31
|
306 this.ngZone.runOutsideAngular(() => {
|
dev@31
|
307 // listen for time passing...
|
dev@31
|
308 const updateSeekingCursor = () => {
|
dev@53
|
309 const currentTime = this.audioService.getCurrentTime();
|
dev@53
|
310 this.cursorLayer.currentPosition = currentTime;
|
dev@53
|
311 this.cursorLayer.update();
|
dev@53
|
312
|
dev@53
|
313 const currentOffset = this.timeline.timeContext.offset;
|
dev@53
|
314 const offsetTimestamp = currentOffset
|
dev@53
|
315 + currentTime;
|
dev@53
|
316
|
dev@53
|
317 const visibleDuration = this.timeline.timeContext.visibleDuration;
|
dev@53
|
318 // TODO reduce duplication between directions and make more declarative
|
dev@53
|
319 // this kinda logic should also be tested
|
dev@53
|
320 const mustPageForward = offsetTimestamp > visibleDuration;
|
dev@53
|
321 const mustPageBackward = currentTime < -currentOffset;
|
dev@53
|
322
|
dev@53
|
323 if (mustPageForward) {
|
dev@53
|
324 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
|
dev@53
|
325
|
dev@53
|
326 this.timeline.timeContext.offset = hasSkippedMultiplePages
|
dev@53
|
327 ? -currentTime + 0.5 * visibleDuration
|
dev@53
|
328 : currentOffset - visibleDuration;
|
dev@51
|
329 this.timeline.tracks.update();
|
dev@34
|
330 }
|
dev@53
|
331
|
dev@53
|
332 if (mustPageBackward) {
|
dev@53
|
333 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
|
dev@53
|
334 this.timeline.timeContext.offset = hasSkippedMultiplePages
|
dev@53
|
335 ? -currentTime + 0.5 * visibleDuration
|
dev@53
|
336 : currentOffset + visibleDuration;
|
dev@51
|
337 this.timeline.tracks.update();
|
dev@34
|
338 }
|
dev@53
|
339
|
dev@53
|
340 if (this.isPlaying)
|
dev@53
|
341 requestAnimationFrame(updateSeekingCursor);
|
dev@31
|
342 };
|
dev@31
|
343 updateSeekingCursor();
|
dev@31
|
344 });
|
dev@6
|
345 }
|
dev@16
|
346
|
dev@59
|
347 private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): DisposableIndex {
|
dev@54
|
348 timeContext.zoom = 1.0;
|
dev@54
|
349 if (!layer.timeContext) {
|
dev@54
|
350 layer.setTimeContext(isAxis ?
|
dev@54
|
351 timeContext : new wavesUI.core.LayerTimeContext(timeContext));
|
dev@54
|
352 }
|
dev@54
|
353 track.add(layer);
|
dev@54
|
354 layer.render();
|
dev@54
|
355 layer.update();
|
dev@59
|
356 return this.disposableLayers.push(layer) - 1;
|
dev@59
|
357 }
|
dev@59
|
358
|
dev@59
|
359 private static changeColour(layer: Layer, colour: string): void {
|
dev@59
|
360 const butcherShapes = (shape) => {
|
dev@59
|
361 shape.install({color: () => colour});
|
dev@59
|
362 shape.params.color = colour;
|
dev@59
|
363 shape.update(layer._renderingContext, layer.data);
|
dev@59
|
364 };
|
dev@59
|
365
|
dev@59
|
366 layer._$itemCommonShapeMap.forEach(butcherShapes);
|
dev@59
|
367 layer._$itemShapeMap.forEach(butcherShapes);
|
dev@59
|
368 layer.render();
|
dev@59
|
369 layer.update();
|
dev@54
|
370 }
|
dev@54
|
371
|
dev@51
|
372 ngOnDestroy(): void {
|
dev@51
|
373 this.featureExtractionSubscription.unsubscribe();
|
dev@53
|
374 this.playingStateSubscription.unsubscribe();
|
dev@53
|
375 this.seekedSubscription.unsubscribe();
|
dev@51
|
376 }
|
dev@6
|
377 }
|