dev@10
|
1 import {
|
dev@51
|
2 Component, OnInit, ViewChild, ElementRef, Input, AfterViewInit, NgZone,
|
dev@51
|
3 OnDestroy
|
dev@10
|
4 } from '@angular/core';
|
dev@39
|
5 import {AudioPlayerService} from "../services/audio-player/audio-player.service";
|
dev@36
|
6 import wavesUI from 'waves-ui';
|
dev@51
|
7 import {FeatureList} from "piper/Feature";
|
dev@51
|
8 import {FeatureExtractionService} from "../services/feature-extraction/feature-extraction.service";
|
dev@51
|
9 import {Subscription} from "rxjs";
|
dev@53
|
10 import {toSeconds} from "piper";
|
dev@8
|
11
|
dev@20
|
12 type Timeline = any; // TODO what type actually is it.. start a .d.ts for waves-ui?
|
dev@6
|
13
|
dev@6
|
14 @Component({
|
dev@6
|
15 selector: 'app-waveform',
|
dev@6
|
16 templateUrl: './waveform.component.html',
|
dev@6
|
17 styleUrls: ['./waveform.component.css']
|
dev@6
|
18 })
|
dev@51
|
19 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy {
|
dev@20
|
20
|
dev@8
|
21 @ViewChild('track') trackDiv: ElementRef;
|
dev@6
|
22
|
dev@16
|
23 private _audioBuffer: AudioBuffer = undefined;
|
dev@51
|
24 private timeline: Timeline = undefined;
|
dev@53
|
25 private cursorLayer: any = undefined;
|
dev@16
|
26
|
dev@16
|
27 @Input()
|
dev@16
|
28 set audioBuffer(buffer: AudioBuffer) {
|
dev@16
|
29 this._audioBuffer = buffer || undefined;
|
dev@20
|
30 if (this.audioBuffer)
|
dev@20
|
31 this.renderWaveform(this.audioBuffer);
|
dev@16
|
32 }
|
dev@16
|
33
|
dev@16
|
34 get audioBuffer(): AudioBuffer {
|
dev@16
|
35 return this._audioBuffer;
|
dev@16
|
36 }
|
dev@16
|
37
|
dev@51
|
38 private featureExtractionSubscription: Subscription;
|
dev@53
|
39 private playingStateSubscription: Subscription;
|
dev@53
|
40 private seekedSubscription: Subscription;
|
dev@53
|
41 private isPlaying: boolean;
|
dev@51
|
42
|
dev@31
|
43 constructor(private audioService: AudioPlayerService,
|
dev@51
|
44 private piperService: FeatureExtractionService,
|
dev@51
|
45 public ngZone: NgZone) {
|
dev@53
|
46 this.isPlaying = false;
|
dev@51
|
47 this.featureExtractionSubscription = piperService.featuresExtracted$.subscribe(
|
dev@51
|
48 features => {
|
dev@51
|
49 this.renderFeatures(features);
|
dev@51
|
50 });
|
dev@53
|
51 this.playingStateSubscription = audioService.playingStateChange$.subscribe(
|
dev@53
|
52 isPlaying => {
|
dev@53
|
53 this.isPlaying = isPlaying;
|
dev@53
|
54 if (this.isPlaying)
|
dev@53
|
55 this.animate();
|
dev@53
|
56 });
|
dev@53
|
57 this.seekedSubscription = audioService.seeked$.subscribe(() => {
|
dev@53
|
58 if (!this.isPlaying)
|
dev@53
|
59 this.animate();
|
dev@53
|
60 });
|
dev@51
|
61 }
|
dev@51
|
62
|
dev@53
|
63 ngOnInit() {
|
dev@53
|
64 }
|
dev@10
|
65
|
dev@10
|
66 ngAfterViewInit(): void {
|
dev@51
|
67 this.timeline = this.renderTimeline();
|
dev@20
|
68 }
|
dev@20
|
69
|
dev@20
|
70 renderTimeline(duration: number = 1.0): Timeline {
|
dev@18
|
71 const track: HTMLElement = this.trackDiv.nativeElement;
|
dev@20
|
72 track.innerHTML = "";
|
dev@18
|
73 const height: number = track.getBoundingClientRect().height;
|
dev@18
|
74 const width: number = track.getBoundingClientRect().width;
|
dev@18
|
75 const pixelsPerSecond = width / duration;
|
dev@18
|
76 const timeline = new wavesUI.core.Timeline(pixelsPerSecond, width);
|
dev@33
|
77 timeline.timeContext.offset = 0.5 * timeline.timeContext.visibleDuration;
|
dev@18
|
78 timeline.createTrack(track, height, 'main');
|
dev@18
|
79
|
dev@18
|
80 // time axis
|
dev@18
|
81 const timeAxis = new wavesUI.helpers.TimeAxisLayer({
|
dev@18
|
82 height: height,
|
dev@18
|
83 color: 'gray'
|
dev@18
|
84 });
|
dev@18
|
85
|
dev@18
|
86 timeline.addLayer(timeAxis, 'main', 'default', true);
|
dev@20
|
87 return timeline;
|
dev@16
|
88 }
|
dev@16
|
89
|
dev@20
|
90 renderWaveform(buffer: AudioBuffer): void {
|
dev@20
|
91 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height;
|
dev@51
|
92 this.timeline = this.renderTimeline(buffer.duration);
|
dev@20
|
93 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, {
|
dev@10
|
94 top: 10,
|
dev@20
|
95 height: height * 0.9,
|
dev@16
|
96 color: 'darkblue'
|
dev@16
|
97 });
|
dev@51
|
98 (this.timeline as any).addLayer(waveformLayer, 'main');
|
dev@31
|
99
|
dev@53
|
100 this.cursorLayer = new wavesUI.helpers.CursorLayer({
|
dev@31
|
101 height: height
|
dev@31
|
102 });
|
dev@53
|
103 this.timeline.addLayer(this.cursorLayer, 'main');
|
dev@51
|
104 this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline);
|
dev@53
|
105 this.animate();
|
dev@53
|
106 }
|
dev@53
|
107
|
dev@53
|
108 // TODO refactor - this doesn't belong here
|
dev@53
|
109 private renderFeatures(features: FeatureList): void {
|
dev@53
|
110 const plotData = features.map(feature => {
|
dev@53
|
111 return {
|
dev@53
|
112 cx: toSeconds(feature.timestamp),
|
dev@53
|
113 cy: feature.featureValues[0]
|
dev@53
|
114 };
|
dev@53
|
115 });
|
dev@53
|
116 this.timeline.addLayer(
|
dev@53
|
117 new wavesUI.helpers.BreakpointLayer(plotData, {color: 'green'}),
|
dev@53
|
118 'main'
|
dev@53
|
119 );
|
dev@53
|
120 }
|
dev@53
|
121
|
dev@53
|
122 private animate(): void {
|
dev@31
|
123 this.ngZone.runOutsideAngular(() => {
|
dev@31
|
124 // listen for time passing...
|
dev@31
|
125 const updateSeekingCursor = () => {
|
dev@53
|
126 const currentTime = this.audioService.getCurrentTime();
|
dev@53
|
127 this.cursorLayer.currentPosition = currentTime;
|
dev@53
|
128 this.cursorLayer.update();
|
dev@53
|
129
|
dev@53
|
130 const currentOffset = this.timeline.timeContext.offset;
|
dev@53
|
131 const offsetTimestamp = currentOffset
|
dev@53
|
132 + currentTime;
|
dev@53
|
133
|
dev@53
|
134 const visibleDuration = this.timeline.timeContext.visibleDuration;
|
dev@53
|
135 // TODO reduce duplication between directions and make more declarative
|
dev@53
|
136 // this kinda logic should also be tested
|
dev@53
|
137 const mustPageForward = offsetTimestamp > visibleDuration;
|
dev@53
|
138 const mustPageBackward = currentTime < -currentOffset;
|
dev@53
|
139
|
dev@53
|
140 if (mustPageForward) {
|
dev@53
|
141 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration;
|
dev@53
|
142
|
dev@53
|
143 this.timeline.timeContext.offset = hasSkippedMultiplePages
|
dev@53
|
144 ? -currentTime + 0.5 * visibleDuration
|
dev@53
|
145 : currentOffset - visibleDuration;
|
dev@51
|
146 this.timeline.tracks.update();
|
dev@34
|
147 }
|
dev@53
|
148
|
dev@53
|
149 if (mustPageBackward) {
|
dev@53
|
150 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset;
|
dev@53
|
151 this.timeline.timeContext.offset = hasSkippedMultiplePages
|
dev@53
|
152 ? -currentTime + 0.5 * visibleDuration
|
dev@53
|
153 : currentOffset + visibleDuration;
|
dev@51
|
154 this.timeline.tracks.update();
|
dev@34
|
155 }
|
dev@53
|
156
|
dev@53
|
157 if (this.isPlaying)
|
dev@53
|
158 requestAnimationFrame(updateSeekingCursor);
|
dev@31
|
159 };
|
dev@31
|
160 updateSeekingCursor();
|
dev@31
|
161 });
|
dev@6
|
162 }
|
dev@16
|
163
|
dev@51
|
164 ngOnDestroy(): void {
|
dev@51
|
165 this.featureExtractionSubscription.unsubscribe();
|
dev@53
|
166 this.playingStateSubscription.unsubscribe();
|
dev@53
|
167 this.seekedSubscription.unsubscribe();
|
dev@51
|
168 }
|
dev@6
|
169 }
|