dev@10
|
1 import {
|
dev@20
|
2 Component, OnInit, ViewChild, ElementRef, Input, AfterViewInit
|
dev@10
|
3 } from '@angular/core';
|
dev@8
|
4
|
dev@16
|
5 declare var wavesUI: any; // TODO non-global app scope import
|
dev@20
|
6 type Timeline = any; // TODO what type actually is it.. start a .d.ts for waves-ui?
|
dev@6
|
7
|
dev@6
|
8 @Component({
|
dev@6
|
9 selector: 'app-waveform',
|
dev@6
|
10 templateUrl: './waveform.component.html',
|
dev@6
|
11 styleUrls: ['./waveform.component.css']
|
dev@6
|
12 })
|
dev@20
|
13 export class WaveformComponent implements OnInit, AfterViewInit {
|
dev@20
|
14
|
dev@8
|
15 @ViewChild('track') trackDiv: ElementRef;
|
dev@6
|
16
|
dev@16
|
17 private _audioBuffer: AudioBuffer = undefined;
|
dev@16
|
18
|
dev@16
|
19 @Input()
|
dev@16
|
20 set audioBuffer(buffer: AudioBuffer) {
|
dev@16
|
21 this._audioBuffer = buffer || undefined;
|
dev@20
|
22 if (this.audioBuffer)
|
dev@20
|
23 this.renderWaveform(this.audioBuffer);
|
dev@16
|
24 }
|
dev@16
|
25
|
dev@16
|
26 get audioBuffer(): AudioBuffer {
|
dev@16
|
27 return this._audioBuffer;
|
dev@16
|
28 }
|
dev@16
|
29
|
dev@20
|
30 constructor() {}
|
dev@10
|
31 ngOnInit() {}
|
dev@10
|
32
|
dev@10
|
33 ngAfterViewInit(): void {
|
dev@20
|
34 this.renderTimeline();
|
dev@20
|
35 }
|
dev@20
|
36
|
dev@20
|
37 renderTimeline(duration: number = 1.0): Timeline {
|
dev@18
|
38 const track: HTMLElement = this.trackDiv.nativeElement;
|
dev@20
|
39 track.innerHTML = "";
|
dev@18
|
40 const height: number = track.getBoundingClientRect().height;
|
dev@18
|
41 const width: number = track.getBoundingClientRect().width;
|
dev@18
|
42 const pixelsPerSecond = width / duration;
|
dev@18
|
43 const timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
|
dev@18
|
44 timeline.createTrack(track, height, 'main');
|
dev@18
|
45
|
dev@18
|
46 // time axis
|
dev@18
|
47 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@18
|
48 height: height,
|
dev@18
|
49 color: 'gray'
|
dev@18
|
50 });
|
dev@18
|
51
|
dev@18
|
52 timeline.addLayer(timeAxis, 'main', 'default', true);
|
dev@18
|
53 timeline.state = new wavesUI.states.CenteredZoomState(timeline);
|
dev@20
|
54 return timeline;
|
dev@16
|
55 }
|
dev@16
|
56
|
dev@20
|
57 renderWaveform(buffer: AudioBuffer): void {
|
dev@20
|
58 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@20
|
59 const timeline: Timeline = this.renderTimeline(buffer.duration);
|
dev@20
|
60 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
|
dev@10
|
61 top: 10,
|
dev@20
|
62 height: height * 0.9,
|
dev@16
|
63 color: 'darkblue'
|
dev@16
|
64 });
|
dev@20
|
65 (timeline as any).addLayer(waveformLayer, 'main');
|
dev@6
|
66 }
|
dev@16
|
67
|
dev@6
|
68 }
|