dev@10: import { dev@236: Component, dev@236: OnInit, dev@236: ViewChild, dev@236: ElementRef, dev@236: Input, dev@236: AfterViewInit, dev@236: NgZone, dev@236: OnDestroy, dev@236: ChangeDetectorRef dev@10: } from '@angular/core'; dev@196: import { dev@196: AudioPlayerService, AudioResource, dev@196: AudioResourceError dev@236: } from '../services/audio-player/audio-player.service'; dev@289: import wavesUI from 'waves-ui-piper'; dev@63: import { dev@64: FeatureExtractionService dev@236: } from '../services/feature-extraction/feature-extraction.service'; dev@236: import {Subscription} from 'rxjs/Subscription'; dev@63: import { dev@63: FeatureCollection, cannam@296: SimpleResponse, cannam@296: VectorFeatures, cannam@296: MatrixFeatures, cannam@296: TrackFeature, cannam@296: TrackFeatures dev@236: } from 'piper/HigherLevelUtilities'; dev@236: import {toSeconds} from 'piper'; dev@236: import {FeatureList, Feature} from 'piper/Feature'; dev@81: import * as Hammer from 'hammerjs'; dev@236: import {WavesSpectrogramLayer} from '../spectrogram/Spectrogram'; dev@8: dev@54: type Layer = any; dev@54: type Track = any; dev@59: type Colour = string; dev@6: dev@268: dev@268: dev@268: function* createColourGenerator(colours) { cannam@257: let index = 0; dev@268: const nColours = colours.length; cannam@257: while (true) { dev@268: yield colours[index = ++index % nColours]; cannam@257: } dev@268: } dev@268: dev@268: const defaultColourGenerator = createColourGenerator([ dev@268: '#0868ac', // "sapphire blue", our waveform / header colour dev@268: '#c33c54', // "brick red" dev@268: '#17bebb', // "tiffany blue" dev@268: '#001021', // "rich black" dev@268: '#fa8334', // "mango tango" dev@268: '#034748' // "deep jungle green" dev@268: ]); cannam@257: dev@6: @Component({ dev@236: selector: 'ugly-waveform', dev@6: templateUrl: './waveform.component.html', dev@6: styleUrls: ['./waveform.component.css'] dev@6: }) cannam@257: dev@51: export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy { dev@20: dev@8: @ViewChild('track') trackDiv: ElementRef; dev@285: @Input() set width(width: number) { dev@285: if (this.timeline) { dev@285: requestAnimationFrame(() => { dev@285: this.timeline.timeContext.visibleWidth = width; dev@285: this.timeline.tracks.update(); dev@285: }); dev@285: } dev@285: } dev@189: @Input() timeline: Timeline; dev@189: @Input() trackIdPrefix: string; dev@196: @Input() set isSubscribedToExtractionService(isSubscribed: boolean) { dev@196: if (isSubscribed) { dev@196: if (this.featureExtractionSubscription) { dev@196: return; dev@196: } dev@268: dev@196: this.featureExtractionSubscription = dev@196: this.piperService.featuresExtracted$.subscribe( dev@196: features => { dev@268: this.renderFeatures(features, defaultColourGenerator.next().value); dev@196: }); dev@196: } else { dev@196: if (this.featureExtractionSubscription) { dev@196: this.featureExtractionSubscription.unsubscribe(); dev@196: } dev@196: } dev@196: } dev@196: @Input() set isSubscribedToAudioService(isSubscribed: boolean) { dev@196: this._isSubscribedToAudioService = isSubscribed; dev@196: if (isSubscribed) { dev@196: if (this.onAudioDataSubscription) { dev@196: return; dev@196: } dev@196: dev@196: this.onAudioDataSubscription = dev@196: this.audioService.audioLoaded$.subscribe(res => { dev@196: const wasError = (res as AudioResourceError).message != null; dev@196: dev@196: if (wasError) { dev@196: console.warn('No audio, display error?'); dev@196: } else { dev@196: this.audioBuffer = (res as AudioResource).samples; dev@196: } dev@196: }); dev@196: } else { dev@196: if (this.onAudioDataSubscription) { dev@196: this.onAudioDataSubscription.unsubscribe(); dev@196: } dev@196: } dev@196: } dev@196: dev@196: get isSubscribedToAudioService(): boolean { dev@196: return this._isSubscribedToAudioService; dev@196: } dev@196: dev@196: @Input() set isOneShotExtractor(isOneShot: boolean) { dev@196: this._isOneShotExtractor = isOneShot; dev@196: } dev@196: dev@196: get isOneShotExtractor(): boolean { dev@196: return this._isOneShotExtractor; dev@196: } dev@196: dev@196: @Input() set isSeeking(isSeeking: boolean) { dev@196: this._isSeeking = isSeeking; dev@196: if (isSeeking) { dev@196: if (this.seekedSubscription) { dev@196: return; dev@196: } dev@236: if (this.playingStateSubscription) { dev@196: return; dev@196: } dev@196: dev@196: this.seekedSubscription = this.audioService.seeked$.subscribe(() => { dev@236: if (!this.isPlaying) { dev@196: this.animate(); dev@236: } dev@196: }); dev@196: this.playingStateSubscription = dev@196: this.audioService.playingStateChange$.subscribe( dev@196: isPlaying => { dev@196: this.isPlaying = isPlaying; dev@236: if (this.isPlaying) { dev@196: this.animate(); dev@236: } dev@196: }); dev@196: } else { dev@196: if (this.isPlaying) { dev@196: this.isPlaying = false; dev@196: } dev@196: if (this.playingStateSubscription) { dev@196: this.playingStateSubscription.unsubscribe(); dev@196: } dev@196: if (this.seekedSubscription) { dev@196: this.seekedSubscription.unsubscribe(); dev@196: } dev@196: } dev@196: } dev@196: dev@196: get isSeeking(): boolean { dev@196: return this._isSeeking; dev@196: } dev@196: dev@16: set audioBuffer(buffer: AudioBuffer) { dev@16: this._audioBuffer = buffer || undefined; cannam@117: if (this.audioBuffer) { dev@20: this.renderWaveform(this.audioBuffer); dev@180: // this.renderSpectrogram(this.audioBuffer); cannam@117: } dev@16: } dev@16: dev@16: get audioBuffer(): AudioBuffer { dev@16: return this._audioBuffer; dev@16: } dev@16: dev@196: private _audioBuffer: AudioBuffer; dev@196: private _isSubscribedToAudioService: boolean; dev@196: private _isOneShotExtractor: boolean; dev@196: private _isSeeking: boolean; dev@196: private cursorLayer: any; cannam@254: private highlightLayer: any; dev@196: private layers: Layer[]; dev@51: private featureExtractionSubscription: Subscription; dev@53: private playingStateSubscription: Subscription; dev@53: private seekedSubscription: Subscription; dev@196: private onAudioDataSubscription: Subscription; dev@53: private isPlaying: boolean; dev@155: private zoomOnMouseDown: number; dev@157: private offsetOnMouseDown: number; dev@196: private hasShot: boolean; dev@196: private isLoading: boolean; dev@51: dev@236: private static changeColour(layer: Layer, colour: string): void { dev@236: const butcherShapes = (shape) => { dev@236: shape.install({color: () => colour}); dev@236: shape.params.color = colour; dev@236: shape.update(layer._renderingContext, layer.data); dev@236: }; dev@236: dev@236: layer._$itemCommonShapeMap.forEach(butcherShapes); dev@236: layer._$itemShapeMap.forEach(butcherShapes); dev@236: layer.render(); dev@236: layer.update(); dev@236: } dev@236: dev@31: constructor(private audioService: AudioPlayerService, dev@51: private piperService: FeatureExtractionService, dev@234: private ngZone: NgZone, dev@234: private ref: ChangeDetectorRef) { dev@196: this.isSubscribedToAudioService = true; dev@196: this.isSeeking = true; dev@185: this.layers = []; dev@196: this.audioBuffer = undefined; dev@54: this.timeline = undefined; dev@54: this.cursorLayer = undefined; cannam@254: this.highlightLayer = undefined; dev@53: this.isPlaying = false; dev@196: this.isLoading = true; dev@51: } dev@51: dev@53: ngOnInit() { dev@53: } dev@10: dev@10: ngAfterViewInit(): void { dev@236: this.trackIdPrefix = this.trackIdPrefix || 'default'; dev@196: if (this.timeline) { dev@196: this.renderTimeline(null, true, true); dev@196: } else { dev@196: this.renderTimeline(); dev@196: } dev@20: } dev@20: dev@196: renderTimeline(duration: number = 1.0, dev@196: useExistingDuration: boolean = false, dev@196: isInitialRender: boolean = false): Timeline { dev@18: const track: HTMLElement = this.trackDiv.nativeElement; dev@236: track.innerHTML = ''; dev@18: const height: number = track.getBoundingClientRect().height; dev@18: const width: number = track.getBoundingClientRect().width; dev@18: const pixelsPerSecond = width / duration; dev@196: const hasExistingTimeline = this.timeline instanceof wavesUI.core.Timeline; dev@196: dev@196: if (hasExistingTimeline) { dev@196: if (!useExistingDuration) { dev@196: this.timeline.pixelsPerSecond = pixelsPerSecond; dev@196: this.timeline.visibleWidth = width; dev@196: } dev@180: } else { dev@180: this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width); dev@180: } dev@196: const waveTrack = this.timeline.createTrack( dev@196: track, dev@196: height, dev@196: `wave-${this.trackIdPrefix}` dev@196: ); dev@196: if (isInitialRender && hasExistingTimeline) { dev@196: // time axis dev@196: const timeAxis = new wavesUI.helpers.TimeAxisLayer({ dev@196: height: height, dev@196: color: '#b0b0b0' dev@196: }); dev@196: this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true); dev@196: this.cursorLayer = new wavesUI.helpers.CursorLayer({ cannam@257: height: height, cannam@257: color: '#c33c54' dev@196: }); dev@196: this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext); dev@196: } dev@196: if ('ontouchstart' in window) { dev@196: interface Point { dev@196: x: number; dev@196: y: number; dev@196: } dev@196: dev@236: let zoomGestureJustEnded = false; dev@196: dev@196: const pixelToExponent: Function = wavesUI.utils.scales.linear() dev@196: .domain([0, 100]) // 100px => factor 2 dev@196: .range([0, 1]); dev@196: dev@196: const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => { dev@196: return Math.pow( dev@196: Math.pow(p2.x - p1.x, 2) + dev@196: Math.pow(p2.y - p1.y, 2), 0.5); dev@196: }; dev@196: dev@205: const calculateMidPoint: (p1: Point, p2: Point) => Point = (p1, p2) => { dev@205: return { dev@205: x: 0.5 * (p1.x + p2.x), dev@205: y: 0.5 * (p1.y + p2.y) dev@205: }; dev@205: }; dev@205: dev@205: const hammertime = new Hammer.Manager(this.trackDiv.nativeElement, { dev@205: recognizers: [ dev@205: [Hammer.Pan, { direction: Hammer.DIRECTION_HORIZONTAL }] dev@205: ] dev@205: }); dev@204: dev@204: // it seems HammerJs binds the event to the window? dev@204: // causing these events to propagate to other components? dev@204: const componentTimeline = this.timeline; dev@204: let initialZoom; dev@204: let initialDistance; dev@204: let offsetAtPanStart; dev@205: let startX; dev@205: let isZooming; dev@204: dev@196: const scroll = (ev) => { dev@236: if (ev.center.x - startX === 0) { dev@236: return; dev@236: } dev@236: dev@196: if (zoomGestureJustEnded) { dev@196: zoomGestureJustEnded = false; dev@236: console.log('Skip this event: likely a single touch dangling from pinch'); dev@196: return; dev@196: } dev@204: componentTimeline.timeContext.offset = offsetAtPanStart + dev@204: componentTimeline.timeContext.timeToPixel.invert(ev.deltaX); dev@204: componentTimeline.tracks.update(); dev@196: }; dev@196: dev@196: const zoom = (ev) => { dev@236: if (ev.touches.length < 2) { dev@236: return; dev@236: } dev@236: dev@214: ev.preventDefault(); dev@204: const minZoom = componentTimeline.state.minZoom; dev@204: const maxZoom = componentTimeline.state.maxZoom; dev@205: const p1: Point = { dev@218: x: ev.touches[0].clientX, dev@218: y: ev.touches[0].clientY dev@205: }; dev@205: const p2: Point = { dev@218: x: ev.touches[1].clientX, dev@218: y: ev.touches[1].clientY dev@205: }; dev@205: const distance = calculateDistance(p1, p2); dev@205: const midPoint = calculateMidPoint(p1, p2); dev@196: dev@196: const lastCenterTime = dev@205: componentTimeline.timeContext.timeToPixel.invert(midPoint.x); dev@196: dev@204: const exponent = pixelToExponent(distance - initialDistance); dev@204: const targetZoom = initialZoom * Math.pow(2, exponent); dev@196: dev@204: componentTimeline.timeContext.zoom = dev@196: Math.min(Math.max(targetZoom, minZoom), maxZoom); dev@196: dev@196: const newCenterTime = dev@205: componentTimeline.timeContext.timeToPixel.invert(midPoint.x); dev@196: dev@204: componentTimeline.timeContext.offset += newCenterTime - lastCenterTime; dev@204: componentTimeline.tracks.update(); dev@196: }; dev@205: hammertime.on('panstart', (ev) => { dev@204: offsetAtPanStart = componentTimeline.timeContext.offset; dev@205: startX = ev.center.x; dev@196: }); dev@196: hammertime.on('panleft', scroll); dev@196: hammertime.on('panright', scroll); dev@205: dev@205: dev@205: const element: HTMLElement = this.trackDiv.nativeElement; dev@205: element.addEventListener('touchstart', (e) => { dev@236: if (e.touches.length < 2) { dev@236: return; dev@236: } dev@236: dev@205: isZooming = true; dev@204: initialZoom = componentTimeline.timeContext.zoom; dev@196: dev@204: initialDistance = calculateDistance({ dev@218: x: e.touches[0].clientX, dev@218: y: e.touches[0].clientY dev@196: }, { dev@218: x: e.touches[1].clientX, dev@218: y: e.touches[1].clientY dev@196: }); dev@196: }); dev@205: element.addEventListener('touchend', () => { dev@205: if (isZooming) { dev@205: isZooming = false; dev@205: zoomGestureJustEnded = true; dev@205: } dev@205: }); dev@205: element.addEventListener('touchmove', zoom); dev@196: } dev@189: // this.timeline.createTrack(track, height/2, `wave-${this.trackIdPrefix}`); dev@189: // this.timeline.createTrack(track, height/2, `grid-${this.trackIdPrefix}`); dev@54: } dev@18: cannam@108: estimatePercentile(matrix, percentile) { cannam@108: // our sample is not evenly distributed across the whole data set: cannam@108: // it is guaranteed to include at least one sample from every cannam@108: // column, and could sample some values more than once. But it cannam@108: // should be good enough in most cases (todo: show this) cannam@109: if (matrix.length === 0) { cannam@109: return 0.0; cannam@109: } cannam@108: const w = matrix.length; cannam@108: const h = matrix[0].length; cannam@108: const n = w * h; cannam@109: const m = (n > 50000 ? 50000 : n); // should base that on the %ile cannam@108: let m_per = Math.floor(m / w); dev@236: if (m_per < 1) { dev@236: m_per = 1; dev@236: } dev@236: dev@236: const sample = []; cannam@108: for (let x = 0; x < w; ++x) { cannam@108: for (let i = 0; i < m_per; ++i) { cannam@108: const y = Math.floor(Math.random() * h); cannam@109: const value = matrix[x][y]; cannam@109: if (!isNaN(value) && value !== Infinity) { cannam@109: sample.push(value); cannam@109: } cannam@108: } cannam@108: } cannam@109: if (sample.length === 0) { dev@236: console.log('WARNING: No samples gathered, even though we hoped for ' + dev@236: (m_per * w) + ' of them'); cannam@109: return 0.0; cannam@109: } dev@236: sample.sort((a, b) => { return a - b; }); cannam@108: const ix = Math.floor((sample.length * percentile) / 100); dev@236: console.log('Estimating ' + percentile + '-%ile of ' + dev@236: n + '-sample dataset (' + w + ' x ' + h + ') as value ' + ix + dev@236: ' of sorted ' + sample.length + '-sample subset'); cannam@108: const estimate = sample[ix]; dev@236: console.log('Estimate is: ' + estimate + ' (where min sampled value = ' + dev@236: sample[0] + ' and max = ' + sample[sample.length - 1] + ')'); cannam@108: return estimate; cannam@108: } cannam@108: cannam@108: interpolatingMapper(hexColours) { cannam@108: const colours = hexColours.map(n => { cannam@108: const i = parseInt(n, 16); cannam@118: return [ ((i >> 16) & 255) / 255.0, cannam@118: ((i >> 8) & 255) / 255.0, cannam@118: ((i) & 255) / 255.0 ]; cannam@108: }); cannam@108: const last = colours.length - 1; cannam@108: return (value => { cannam@108: const m = value * last; cannam@108: if (m >= last) { cannam@108: return colours[last]; cannam@108: } cannam@108: if (m <= 0) { cannam@108: return colours[0]; cannam@108: } cannam@108: const base = Math.floor(m); cannam@108: const prop0 = base + 1.0 - m; cannam@108: const prop1 = m - base; cannam@108: const c0 = colours[base]; dev@236: const c1 = colours[base + 1]; cannam@118: return [ c0[0] * prop0 + c1[0] * prop1, cannam@118: c0[1] * prop0 + c1[1] * prop1, cannam@118: c0[2] * prop0 + c1[2] * prop1 ]; cannam@108: }); cannam@108: } dev@110: cannam@108: iceMapper() { dev@236: const hexColours = [ cannam@108: // Based on ColorBrewer ylGnBu dev@236: 'ffffff', 'ffff00', 'f7fcf0', 'e0f3db', 'ccebc5', 'a8ddb5', dev@236: '7bccc4', '4eb3d3', '2b8cbe', '0868ac', '084081', '042040' cannam@108: ]; cannam@108: hexColours.reverse(); cannam@108: return this.interpolatingMapper(hexColours); cannam@108: } dev@110: cannam@118: hsv2rgb(h, s, v) { // all values in range [0, 1] cannam@118: const i = Math.floor(h * 6); cannam@118: const f = h * 6 - i; cannam@118: const p = v * (1 - s); cannam@118: const q = v * (1 - f * s); cannam@118: const t = v * (1 - (1 - f) * s); cannam@118: let r = 0, g = 0, b = 0; cannam@118: switch (i % 6) { dev@236: case 0: r = v; g = t; b = p; break; dev@236: case 1: r = q; g = v; b = p; break; dev@236: case 2: r = p; g = v; b = t; break; dev@236: case 3: r = p; g = q; b = v; break; dev@236: case 4: r = t; g = p; b = v; break; dev@236: case 5: r = v; g = p; b = q; break; cannam@118: } cannam@118: return [ r, g, b ]; cannam@118: } dev@122: cannam@118: greenMapper() { cannam@118: const blue = 0.6666; cannam@118: const pieslice = 0.3333; cannam@118: return (value => { cannam@118: const h = blue - value * 2.0 * pieslice; cannam@118: const s = 0.5 + value / 2.0; cannam@118: const v = value; cannam@118: return this.hsv2rgb(h, s, v); cannam@118: }); cannam@118: } cannam@118: cannam@118: sunsetMapper() { cannam@118: return (value => { dev@236: const r = (value - 0.24) * 2.38; dev@236: const g = (value - 0.64) * 2.777; cannam@118: let b = (3.6 * value); dev@236: if (value > 0.277) { dev@236: b = 2.0 - b; dev@236: } cannam@118: return [ r, g, b ]; cannam@118: }); cannam@118: } cannam@118: dev@122: clearTimeline(): void { dev@122: // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly dev@122: const timeContextChildren = this.timeline.timeContext._children; dev@236: for (const track of this.timeline.tracks) { dev@122: if (track.layers.length === 0) { continue; } dev@122: const trackLayers = Array.from(track.layers); dev@122: while (trackLayers.length) { dev@236: const layer: Layer = trackLayers.pop(); dev@185: if (this.layers.includes(layer)) { dev@185: track.remove(layer); dev@185: this.layers.splice(this.layers.indexOf(layer), 1); dev@185: const index = timeContextChildren.indexOf(layer.timeContext); dev@185: if (index >= 0) { dev@185: timeContextChildren.splice(index, 1); dev@185: } dev@185: layer.destroy(); dev@122: } dev@122: } dev@122: } dev@122: } dev@122: dev@54: renderWaveform(buffer: AudioBuffer): void { dev@180: // const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; dev@180: const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height; dev@189: const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`); dev@54: if (this.timeline) { dev@54: // resize dev@54: const width = this.trackDiv.nativeElement.getBoundingClientRect().width; dev@55: dev@122: this.clearTimeline(); dev@59: dev@54: this.timeline.visibleWidth = width; dev@54: this.timeline.pixelsPerSecond = width / buffer.duration; cannam@117: waveTrack.height = height; dev@54: } else { dev@236: this.renderTimeline(buffer.duration); dev@54: } dev@83: this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration; cannam@106: dev@18: // time axis dev@18: const timeAxis = new wavesUI.helpers.TimeAxisLayer({ dev@18: height: height, cannam@106: color: '#b0b0b0' dev@18: }); cannam@117: this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true); dev@18: cannam@161: const nchannels = buffer.numberOfChannels; cannam@161: const totalWaveHeight = height * 0.9; cannam@161: const waveHeight = totalWaveHeight / nchannels; dev@189: cannam@161: for (let ch = 0; ch < nchannels; ++ch) { dev@236: console.log('about to construct a waveform layer for channel ' + ch); cannam@161: const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, { dev@236: top: (height - totalWaveHeight) / 2 + waveHeight * ch, dev@236: height: waveHeight, cannam@257: color: '#0868ac', dev@236: channel: ch cannam@161: }); cannam@161: this.addLayer(waveformLayer, waveTrack, this.timeline.timeContext); cannam@161: } cannam@117: dev@53: this.cursorLayer = new wavesUI.helpers.CursorLayer({ cannam@257: height: height, cannam@257: color: '#c33c54' dev@31: }); cannam@117: this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext); dev@51: this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline); cannam@117: waveTrack.render(); cannam@117: waveTrack.update(); dev@81: dev@196: this.isLoading = false; dev@234: this.ref.markForCheck(); dev@53: this.animate(); dev@53: } dev@53: cannam@117: renderSpectrogram(buffer: AudioBuffer): void { cannam@117: const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; dev@189: const gridTrack = this.timeline.getTrackById(`grid-${this.trackIdPrefix}`); cannam@117: dev@129: const spectrogramLayer = new WavesSpectrogramLayer(buffer, { cannam@221: top: 0, cannam@221: height: height, cannam@117: stepSize: 512, dev@129: blockSize: 1024, cannam@118: normalise: 'none', cannam@118: mapper: this.sunsetMapper() cannam@117: }); cannam@117: this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext); cannam@117: cannam@117: this.timeline.tracks.update(); cannam@117: } cannam@117: dev@53: // TODO refactor - this doesn't belong here dev@64: private renderFeatures(extracted: SimpleResponse, colour: Colour): void { dev@196: if (this.isOneShotExtractor && !this.hasShot) { dev@196: this.featureExtractionSubscription.unsubscribe(); dev@196: this.hasShot = true; dev@196: } dev@196: dev@236: if (!extracted.hasOwnProperty('features') dev@236: || !extracted.hasOwnProperty('outputDescriptor')) { dev@236: return; dev@236: } dev@236: if (!extracted.features.hasOwnProperty('shape') dev@236: || !extracted.features.hasOwnProperty('data')) { dev@236: return; dev@236: } dev@64: const features: FeatureCollection = (extracted.features as FeatureCollection); dev@64: const outputDescriptor = extracted.outputDescriptor; dev@196: // const height = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; dev@196: const height = this.trackDiv.nativeElement.getBoundingClientRect().height; dev@189: const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`); dev@64: dev@64: // TODO refactor all of this dev@63: switch (features.shape) { cannam@296: case 'vector': { cannam@296: const collected = features.collected as VectorFeatures; cannam@296: const stepDuration = collected.stepDuration; cannam@296: const featureData = collected.data; dev@236: if (featureData.length === 0) { dev@236: return; dev@236: } dev@63: const plotData = [...featureData].map((feature, i) => { dev@63: return { dev@63: cx: i * stepDuration, cannam@258: cy: feature dev@63: }; dev@63: }); cannam@258: let min = featureData.reduce((m, f) => Math.min(m, f), Infinity); cannam@258: let max = featureData.reduce((m, f) => Math.max(m, f), -Infinity); cannam@258: if (min === Infinity) { cannam@258: min = 0; cannam@258: max = 1; cannam@258: } cannam@288: console.log("adding line layer: min = " + min + ", max = " + max); cannam@288: if (min !== min || max !== max) { cannam@288: console.log("WARNING: min or max is NaN"); cannam@288: min = 0; cannam@288: max = 1; cannam@288: } dev@236: const lineLayer = new wavesUI.helpers.LineLayer(plotData, { dev@63: color: colour, cannam@258: height: height, cannam@258: yDomain: [ min, max ] dev@63: }); dev@122: this.addLayer( dev@105: lineLayer, cannam@117: waveTrack, dev@63: this.timeline.timeContext dev@122: ); cannam@265: const scaleLayer = new wavesUI.helpers.ScaleLayer({ cannam@266: tickColor: colour, cannam@266: textColor: colour, cannam@264: height: height, cannam@264: yDomain: [ min, max ] cannam@264: }); cannam@264: this.addLayer( cannam@264: scaleLayer, cannam@264: waveTrack, cannam@264: this.timeline.timeContext cannam@264: ); cannam@254: this.highlightLayer = new wavesUI.helpers.HighlightLayer(lineLayer, { cannam@254: opacity: 0.7, cannam@257: height: height, cannam@258: color: '#c33c54', cannam@266: labelOffset: 38, cannam@258: yDomain: [ min, max ] cannam@254: }); cannam@254: this.addLayer( cannam@254: this.highlightLayer, cannam@254: waveTrack, cannam@254: this.timeline.timeContext cannam@254: ); dev@63: break; dev@64: } dev@64: case 'list': { cannam@296: const featureData = features.collected as FeatureList; dev@236: if (featureData.length === 0) { dev@236: return; dev@236: } dev@64: // TODO look at output descriptor instead of directly inspecting features dev@64: const hasDuration = outputDescriptor.configured.hasDuration; dev@64: const isMarker = !hasDuration dev@64: && outputDescriptor.configured.binCount === 0 dev@64: && featureData[0].featureValues == null; dev@64: const isRegion = hasDuration dev@64: && featureData[0].timestamp != null; dev@236: console.log('Have list features: length ' + featureData.length + dev@236: ', isMarker ' + isMarker + ', isRegion ' + isRegion + dev@236: ', hasDuration ' + hasDuration); dev@64: // TODO refactor, this is incomprehensible dev@64: if (isMarker) { dev@236: const plotData = featureData.map(feature => ({ dev@236: time: toSeconds(feature.timestamp), dev@236: label: feature.label dev@236: })); dev@236: const featureLayer = new wavesUI.helpers.TickLayer(plotData, { dev@64: height: height, dev@64: color: colour, cannam@152: labelPosition: 'bottom', cannam@152: shadeSegments: true dev@64: }); dev@122: this.addLayer( cannam@149: featureLayer, cannam@117: waveTrack, dev@64: this.timeline.timeContext dev@122: ); dev@64: } else if (isRegion) { dev@236: console.log('Output is of region type'); dev@67: const binCount = outputDescriptor.configured.binCount || 0; dev@67: const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ; dev@64: const getSegmentArgs = () => { dev@64: if (isBarRegion) { dev@64: dev@67: // TODO refactor - this is messy dev@67: interface FoldsToNumber { dev@67: reduce(fn: (previousValue: number, dev@67: currentValue: T, dev@67: currentIndex: number, dev@67: array: ArrayLike) => number, dev@67: initialValue?: number): number; dev@67: } dev@64: dev@67: // TODO potentially change impl., i.e avoid reduce dev@67: const findMin = (arr: FoldsToNumber, getElement: (x: T) => number): number => { dev@67: return arr.reduce((min, val) => Math.min(min, getElement(val)), Infinity); dev@67: }; dev@67: dev@67: const findMax = (arr: FoldsToNumber, getElement: (x: T) => number): number => { dev@67: return arr.reduce((min, val) => Math.max(min, getElement(val)), -Infinity); dev@67: }; dev@67: dev@67: const min = findMin(featureData, (x: Feature) => { dev@67: return findMin(x.featureValues, y => y); dev@67: }); dev@67: dev@67: const max = findMax(featureData, (x: Feature) => { dev@67: return findMax(x.featureValues, y => y); dev@67: }); dev@67: dev@67: const barHeight = 1.0 / height; dev@64: return [ dev@67: featureData.reduce((bars, feature) => { dev@67: const staticProperties = { dev@64: x: toSeconds(feature.timestamp), dev@64: width: toSeconds(feature.duration), dev@67: height: min + barHeight, dev@64: color: colour, dev@64: opacity: 0.8 dev@67: }; dev@67: // TODO avoid copying Float32Array to an array - map is problematic here dev@67: return bars.concat([...feature.featureValues] dev@236: .map(val => Object.assign({}, staticProperties, {y: val}))); dev@67: }, []), dev@67: {yDomain: [min, max + barHeight], height: height} as any dev@67: ]; dev@64: } else { dev@236: return [featureData.map(feature => ({ dev@236: x: toSeconds(feature.timestamp), dev@236: width: toSeconds(feature.duration), dev@236: color: colour, dev@236: opacity: 0.8 dev@236: })), {height: height}]; dev@64: } dev@64: }; dev@64: dev@236: const segmentLayer = new wavesUI.helpers.SegmentLayer( dev@64: ...getSegmentArgs() dev@64: ); dev@122: this.addLayer( dev@64: segmentLayer, cannam@117: waveTrack, dev@64: this.timeline.timeContext dev@122: ); dev@64: } dev@64: break; dev@64: } cannam@106: case 'matrix': { cannam@296: const collected = features.collected as MatrixFeatures; cannam@296: const stepDuration = collected.stepDuration; dev@236: // !!! + start time cannam@296: const matrixData = collected.data; dev@236: dev@236: if (matrixData.length === 0) { dev@236: return; dev@236: } dev@236: dev@236: console.log('matrix data length = ' + matrixData.length); dev@236: console.log('height of first column = ' + matrixData[0].length); cannam@109: const targetValue = this.estimatePercentile(matrixData, 95); cannam@108: const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0); dev@236: console.log('setting gain to ' + gain); cannam@120: const matrixEntity = cannam@120: new wavesUI.utils.PrefilledMatrixEntity(matrixData, cannam@120: 0, // startTime cannam@120: stepDuration); dev@236: const matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, { cannam@108: gain, cannam@221: top: 0, cannam@221: height: height, cannam@109: normalise: 'none', cannam@108: mapper: this.iceMapper() cannam@108: }); dev@122: this.addLayer( cannam@108: matrixLayer, cannam@117: waveTrack, cannam@108: this.timeline.timeContext dev@122: ); cannam@108: break; cannam@106: } dev@67: default: dev@236: console.log( dev@236: `Cannot render an appropriate layer for feature shape '${features.shape}'` dev@236: ); dev@63: } dev@59: dev@196: this.isLoading = false; dev@234: this.ref.markForCheck(); dev@56: this.timeline.tracks.update(); dev@53: } dev@53: dev@53: private animate(): void { dev@236: if (!this.isSeeking) { dev@236: return; dev@236: } dev@196: dev@31: this.ngZone.runOutsideAngular(() => { dev@31: // listen for time passing... dev@31: const updateSeekingCursor = () => { dev@53: const currentTime = this.audioService.getCurrentTime(); dev@53: this.cursorLayer.currentPosition = currentTime; dev@53: this.cursorLayer.update(); dev@53: cannam@254: if (typeof(this.highlightLayer) !== 'undefined') { cannam@254: this.highlightLayer.currentPosition = currentTime; cannam@254: this.highlightLayer.update(); cannam@254: } cannam@254: dev@53: const currentOffset = this.timeline.timeContext.offset; dev@53: const offsetTimestamp = currentOffset dev@53: + currentTime; dev@53: dev@53: const visibleDuration = this.timeline.timeContext.visibleDuration; dev@53: // TODO reduce duplication between directions and make more declarative dev@53: // this kinda logic should also be tested dev@53: const mustPageForward = offsetTimestamp > visibleDuration; dev@53: const mustPageBackward = currentTime < -currentOffset; dev@53: dev@53: if (mustPageForward) { dev@53: const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration; dev@53: cannam@106: this.timeline.timeContext.offset = hasSkippedMultiplePages ? cannam@106: -currentTime + 0.5 * visibleDuration : cannam@106: currentOffset - visibleDuration; dev@51: this.timeline.tracks.update(); dev@34: } dev@53: dev@53: if (mustPageBackward) { dev@53: const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset; cannam@106: this.timeline.timeContext.offset = hasSkippedMultiplePages ? cannam@106: -currentTime + 0.5 * visibleDuration : cannam@106: currentOffset + visibleDuration; dev@51: this.timeline.tracks.update(); dev@34: } dev@53: dev@236: if (this.isPlaying) { dev@53: requestAnimationFrame(updateSeekingCursor); dev@236: } dev@31: }; dev@31: updateSeekingCursor(); dev@31: }); dev@6: } dev@16: dev@122: private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void { dev@54: timeContext.zoom = 1.0; dev@54: if (!layer.timeContext) { dev@54: layer.setTimeContext(isAxis ? dev@54: timeContext : new wavesUI.core.LayerTimeContext(timeContext)); dev@54: } dev@54: track.add(layer); dev@185: this.layers.push(layer); dev@54: layer.render(); dev@54: layer.update(); dev@122: if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) { dev@112: track.$layout.appendChild(this.cursorLayer.$el); dev@112: } dev@59: } dev@59: dev@51: ngOnDestroy(): void { dev@236: if (this.featureExtractionSubscription) { dev@196: this.featureExtractionSubscription.unsubscribe(); dev@236: } dev@236: if (this.playingStateSubscription) { dev@196: this.playingStateSubscription.unsubscribe(); dev@236: } dev@236: if (this.seekedSubscription) { dev@196: this.seekedSubscription.unsubscribe(); dev@236: } dev@236: if (this.onAudioDataSubscription) { dev@196: this.onAudioDataSubscription.unsubscribe(); dev@236: } dev@51: } dev@154: dev@155: seekStart(): void { dev@155: this.zoomOnMouseDown = this.timeline.timeContext.zoom; dev@157: this.offsetOnMouseDown = this.timeline.timeContext.offset; dev@155: } dev@155: dev@155: seekEnd(x: number): void { dev@157: const hasSameZoom: boolean = this.zoomOnMouseDown === dev@157: this.timeline.timeContext.zoom; dev@157: const hasSameOffset: boolean = this.offsetOnMouseDown === dev@157: this.timeline.timeContext.offset; dev@157: if (hasSameZoom && hasSameOffset) { dev@155: this.seek(x); dev@155: } dev@155: } dev@155: dev@154: seek(x: number): void { dev@154: if (this.timeline) { dev@154: const timeContext: any = this.timeline.timeContext; dev@196: if (this.isSeeking) { dev@196: this.audioService.seekTo( dev@236: timeContext.timeToPixel.invert(x) - timeContext.offset dev@196: ); dev@196: } dev@154: } dev@154: } dev@6: }