Mercurial > hg > ugly-duckling
comparison src/app/waveform/waveform.component.ts @ 189:a50feba0d7f0
Share Timeline instance instead of previous event emitting TimelineTimeContext bodge.
author | Lucas Thompson <dev@lucas.im> |
---|---|
date | Wed, 22 Mar 2017 14:34:43 +0000 |
parents | f957e93dc979 |
children | aa1c92c553cb |
comparison
equal
deleted
inserted
replaced
188:3f84bd16c1e8 | 189:a50feba0d7f0 |
---|---|
14 } from "piper/HigherLevelUtilities"; | 14 } from "piper/HigherLevelUtilities"; |
15 import {toSeconds} from "piper"; | 15 import {toSeconds} from "piper"; |
16 import {FeatureList, Feature} from "piper/Feature"; | 16 import {FeatureList, Feature} from "piper/Feature"; |
17 import * as Hammer from 'hammerjs'; | 17 import * as Hammer from 'hammerjs'; |
18 import {WavesSpectrogramLayer} from "../spectrogram/Spectrogram"; | 18 import {WavesSpectrogramLayer} from "../spectrogram/Spectrogram"; |
19 import {PartialEventEmitter} from "../notebook-feed/notebook-feed.component"; | |
20 | 19 |
21 type Layer = any; | 20 type Layer = any; |
22 type Track = any; | 21 type Track = any; |
23 type Colour = string; | 22 type Colour = string; |
24 | 23 |
29 }) | 28 }) |
30 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy { | 29 export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy { |
31 | 30 |
32 @ViewChild('track') trackDiv: ElementRef; | 31 @ViewChild('track') trackDiv: ElementRef; |
33 | 32 |
34 @Input() timeContext: TimelineTimeContext & PartialEventEmitter; | 33 @Input() timeline: Timeline; |
34 @Input() trackIdPrefix: string; | |
35 private _audioBuffer: AudioBuffer; | 35 private _audioBuffer: AudioBuffer; |
36 private timeline: Timeline; | |
37 private cursorLayer: any; | 36 private cursorLayer: any; |
38 private layers: Layer[]; | 37 private layers: Layer[]; |
39 | 38 |
40 @Input() | 39 @Input() |
41 set audioBuffer(buffer: AudioBuffer) { | 40 set audioBuffer(buffer: AudioBuffer) { |
101 | 100 |
102 ngOnInit() { | 101 ngOnInit() { |
103 } | 102 } |
104 | 103 |
105 ngAfterViewInit(): void { | 104 ngAfterViewInit(): void { |
105 this.trackIdPrefix = this.trackIdPrefix || "default"; | |
106 this.renderTimeline(); | 106 this.renderTimeline(); |
107 } | 107 } |
108 | 108 |
109 renderTimeline(duration: number = 1.0): Timeline { | 109 renderTimeline(duration: number = 1.0): Timeline { |
110 const track: HTMLElement = this.trackDiv.nativeElement; | 110 const track: HTMLElement = this.trackDiv.nativeElement; |
116 this.timeline.pixelsPerSecond = pixelsPerSecond; | 116 this.timeline.pixelsPerSecond = pixelsPerSecond; |
117 this.timeline.visibleWidth = width; | 117 this.timeline.visibleWidth = width; |
118 } else { | 118 } else { |
119 this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width); | 119 this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width); |
120 } | 120 } |
121 if (this.timeContext instanceof wavesUI.core.TimelineTimeContext) { | 121 this.timeline.createTrack(track, height, `wave-${this.trackIdPrefix}`); |
122 console.warn('Has shared timeline'); | 122 // this.timeline.createTrack(track, height/2, `wave-${this.trackIdPrefix}`); |
123 this.timeline.timeContext = this.timeContext; | 123 // this.timeline.createTrack(track, height/2, `grid-${this.trackIdPrefix}`); |
124 this.timeContext.on('zoom', () => { | |
125 this.timeline.tracks.update(); | |
126 }); | |
127 this.timeContext.on('offset', () => { | |
128 this.timeline.tracks.update(); | |
129 }); | |
130 } | |
131 this.timeline.createTrack(track, height, 'wave'); | |
132 // this.timeline.createTrack(track, height/2, 'wave'); | |
133 // this.timeline.createTrack(track, height/2, 'grid'); | |
134 } | 124 } |
135 | 125 |
136 estimatePercentile(matrix, percentile) { | 126 estimatePercentile(matrix, percentile) { |
137 // our sample is not evenly distributed across the whole data set: | 127 // our sample is not evenly distributed across the whole data set: |
138 // it is guaranteed to include at least one sample from every | 128 // it is guaranteed to include at least one sample from every |
271 } | 261 } |
272 | 262 |
273 renderWaveform(buffer: AudioBuffer): void { | 263 renderWaveform(buffer: AudioBuffer): void { |
274 // const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; | 264 // const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; |
275 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height; | 265 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height; |
276 const waveTrack = this.timeline.getTrackById('wave'); | 266 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`); |
277 if (this.timeline) { | 267 if (this.timeline) { |
278 // resize | 268 // resize |
279 const width = this.trackDiv.nativeElement.getBoundingClientRect().width; | 269 const width = this.trackDiv.nativeElement.getBoundingClientRect().width; |
280 | 270 |
281 this.clearTimeline(); | 271 this.clearTimeline(); |
296 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true); | 286 this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true); |
297 | 287 |
298 const nchannels = buffer.numberOfChannels; | 288 const nchannels = buffer.numberOfChannels; |
299 const totalWaveHeight = height * 0.9; | 289 const totalWaveHeight = height * 0.9; |
300 const waveHeight = totalWaveHeight / nchannels; | 290 const waveHeight = totalWaveHeight / nchannels; |
301 | 291 |
302 for (let ch = 0; ch < nchannels; ++ch) { | 292 for (let ch = 0; ch < nchannels; ++ch) { |
303 console.log("about to construct a waveform layer for channel " + ch); | 293 console.log("about to construct a waveform layer for channel " + ch); |
304 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, { | 294 const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, { |
305 top: (height - totalWaveHeight)/2 + waveHeight * ch, | 295 top: (height - totalWaveHeight)/2 + waveHeight * ch, |
306 height: waveHeight, | 296 height: waveHeight, |
401 this.animate(); | 391 this.animate(); |
402 } | 392 } |
403 | 393 |
404 renderSpectrogram(buffer: AudioBuffer): void { | 394 renderSpectrogram(buffer: AudioBuffer): void { |
405 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; | 395 const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; |
406 const gridTrack = this.timeline.getTrackById('grid'); | 396 const gridTrack = this.timeline.getTrackById(`grid-${this.trackIdPrefix}`); |
407 | 397 |
408 const spectrogramLayer = new WavesSpectrogramLayer(buffer, { | 398 const spectrogramLayer = new WavesSpectrogramLayer(buffer, { |
409 top: height * 0.05, | 399 top: height * 0.05, |
410 height: height * 0.9, | 400 height: height * 0.9, |
411 stepSize: 512, | 401 stepSize: 512, |
423 if (!extracted.hasOwnProperty('features') || !extracted.hasOwnProperty('outputDescriptor')) return; | 413 if (!extracted.hasOwnProperty('features') || !extracted.hasOwnProperty('outputDescriptor')) return; |
424 if (!extracted.features.hasOwnProperty('shape') || !extracted.features.hasOwnProperty('data')) return; | 414 if (!extracted.features.hasOwnProperty('shape') || !extracted.features.hasOwnProperty('data')) return; |
425 const features: FeatureCollection = (extracted.features as FeatureCollection); | 415 const features: FeatureCollection = (extracted.features as FeatureCollection); |
426 const outputDescriptor = extracted.outputDescriptor; | 416 const outputDescriptor = extracted.outputDescriptor; |
427 const height = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; | 417 const height = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; |
428 const waveTrack = this.timeline.getTrackById('wave'); | 418 const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`); |
429 | 419 |
430 // TODO refactor all of this | 420 // TODO refactor all of this |
431 switch (features.shape) { | 421 switch (features.shape) { |
432 case 'vector': { | 422 case 'vector': { |
433 const stepDuration = (features as FixedSpacedFeatures).stepDuration; | 423 const stepDuration = (features as FixedSpacedFeatures).stepDuration; |
614 // this kinda logic should also be tested | 604 // this kinda logic should also be tested |
615 const mustPageForward = offsetTimestamp > visibleDuration; | 605 const mustPageForward = offsetTimestamp > visibleDuration; |
616 const mustPageBackward = currentTime < -currentOffset; | 606 const mustPageBackward = currentTime < -currentOffset; |
617 | 607 |
618 if (mustPageForward) { | 608 if (mustPageForward) { |
619 console.warn('page forward', mustPageForward, offsetTimestamp, visibleDuration); | |
620 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration; | 609 const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration; |
621 | 610 |
622 this.timeline.timeContext.offset = hasSkippedMultiplePages ? | 611 this.timeline.timeContext.offset = hasSkippedMultiplePages ? |
623 -currentTime + 0.5 * visibleDuration : | 612 -currentTime + 0.5 * visibleDuration : |
624 currentOffset - visibleDuration; | 613 currentOffset - visibleDuration; |
625 this.timeline.tracks.update(); | 614 this.timeline.tracks.update(); |
626 } else { | |
627 console.warn('no page', mustPageForward, offsetTimestamp, visibleDuration); | |
628 } | 615 } |
629 | 616 |
630 if (mustPageBackward) { | 617 if (mustPageBackward) { |
631 console.warn('page back'); | |
632 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset; | 618 const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset; |
633 this.timeline.timeContext.offset = hasSkippedMultiplePages ? | 619 this.timeline.timeContext.offset = hasSkippedMultiplePages ? |
634 -currentTime + 0.5 * visibleDuration : | 620 -currentTime + 0.5 * visibleDuration : |
635 currentOffset + visibleDuration; | 621 currentOffset + visibleDuration; |
636 this.timeline.tracks.update(); | 622 this.timeline.tracks.update(); |