dev@10: import { dev@51: Component, OnInit, ViewChild, ElementRef, Input, AfterViewInit, NgZone, dev@51: OnDestroy dev@10: } from '@angular/core'; dev@196: import { dev@196: AudioPlayerService, AudioResource, dev@196: AudioResourceError dev@196: } from "../services/audio-player/audio-player.service"; dev@36: import wavesUI from 'waves-ui'; dev@63: import { dev@64: FeatureExtractionService dev@63: } from "../services/feature-extraction/feature-extraction.service"; dev@51: import {Subscription} from "rxjs"; dev@63: import { dev@63: FeatureCollection, dev@64: FixedSpacedFeatures, SimpleResponse dev@63: } from "piper/HigherLevelUtilities"; dev@53: import {toSeconds} from "piper"; dev@67: import {FeatureList, Feature} from "piper/Feature"; dev@81: import * as Hammer from 'hammerjs'; dev@129: import {WavesSpectrogramLayer} from "../spectrogram/Spectrogram"; dev@8: dev@54: type Layer = any; dev@54: type Track = any; dev@59: type Colour = string; dev@6: dev@6: @Component({ dev@6: selector: 'app-waveform', dev@6: templateUrl: './waveform.component.html', dev@6: styleUrls: ['./waveform.component.css'] dev@6: }) dev@51: export class WaveformComponent implements OnInit, AfterViewInit, OnDestroy { dev@20: dev@8: @ViewChild('track') trackDiv: ElementRef; dev@6: dev@189: @Input() timeline: Timeline; dev@189: @Input() trackIdPrefix: string; dev@196: @Input() set isSubscribedToExtractionService(isSubscribed: boolean) { dev@196: if (isSubscribed) { dev@196: if (this.featureExtractionSubscription) { dev@196: return; dev@196: } dev@16: dev@196: const colours = function* () { dev@196: const circularColours = [ dev@196: 'black', dev@196: 'red', dev@196: 'green', dev@196: 'purple', dev@196: 'orange' dev@196: ]; dev@196: let index = 0; dev@196: const nColours = circularColours.length; dev@196: while (true) { dev@196: yield circularColours[index = ++index % nColours]; dev@196: } dev@196: }(); dev@196: dev@196: this.featureExtractionSubscription = dev@196: this.piperService.featuresExtracted$.subscribe( dev@196: features => { dev@196: this.renderFeatures(features, colours.next().value); dev@196: }); dev@196: } else { dev@196: if (this.featureExtractionSubscription) { dev@196: this.featureExtractionSubscription.unsubscribe(); dev@196: } dev@196: } dev@196: } dev@196: @Input() set isSubscribedToAudioService(isSubscribed: boolean) { dev@196: this._isSubscribedToAudioService = isSubscribed; dev@196: if (isSubscribed) { dev@196: if (this.onAudioDataSubscription) { dev@196: return; dev@196: } dev@196: dev@196: this.onAudioDataSubscription = dev@196: this.audioService.audioLoaded$.subscribe(res => { dev@196: const wasError = (res as AudioResourceError).message != null; dev@196: dev@196: if (wasError) { dev@196: console.warn('No audio, display error?'); dev@196: } else { dev@196: this.audioBuffer = (res as AudioResource).samples; dev@196: } dev@196: }); dev@196: } else { dev@196: if (this.onAudioDataSubscription) { dev@196: this.onAudioDataSubscription.unsubscribe(); dev@196: } dev@196: } dev@196: } dev@196: dev@196: get isSubscribedToAudioService(): boolean { dev@196: return this._isSubscribedToAudioService; dev@196: } dev@196: dev@196: @Input() set isOneShotExtractor(isOneShot: boolean) { dev@196: this._isOneShotExtractor = isOneShot; dev@196: } dev@196: dev@196: get isOneShotExtractor(): boolean { dev@196: return this._isOneShotExtractor; dev@196: } dev@196: dev@196: @Input() set isSeeking(isSeeking: boolean) { dev@196: this._isSeeking = isSeeking; dev@196: if (isSeeking) { dev@196: if (this.seekedSubscription) { dev@196: return; dev@196: } dev@196: if(this.playingStateSubscription) { dev@196: return; dev@196: } dev@196: dev@196: this.seekedSubscription = this.audioService.seeked$.subscribe(() => { dev@196: if (!this.isPlaying) dev@196: this.animate(); dev@196: }); dev@196: this.playingStateSubscription = dev@196: this.audioService.playingStateChange$.subscribe( dev@196: isPlaying => { dev@196: this.isPlaying = isPlaying; dev@196: if (this.isPlaying) dev@196: this.animate(); dev@196: }); dev@196: } else { dev@196: if (this.isPlaying) { dev@196: this.isPlaying = false; dev@196: } dev@196: if (this.playingStateSubscription) { dev@196: this.playingStateSubscription.unsubscribe(); dev@196: } dev@196: if (this.seekedSubscription) { dev@196: this.seekedSubscription.unsubscribe(); dev@196: } dev@196: } dev@196: } dev@196: dev@196: get isSeeking(): boolean { dev@196: return this._isSeeking; dev@196: } dev@196: dev@16: set audioBuffer(buffer: AudioBuffer) { dev@16: this._audioBuffer = buffer || undefined; cannam@117: if (this.audioBuffer) { dev@20: this.renderWaveform(this.audioBuffer); dev@180: // this.renderSpectrogram(this.audioBuffer); cannam@117: } dev@16: } dev@16: dev@16: get audioBuffer(): AudioBuffer { dev@16: return this._audioBuffer; dev@16: } dev@16: dev@196: private _audioBuffer: AudioBuffer; dev@196: private _isSubscribedToAudioService: boolean; dev@196: private _isOneShotExtractor: boolean; dev@196: private _isSeeking: boolean; dev@196: private cursorLayer: any; dev@196: private layers: Layer[]; dev@51: private featureExtractionSubscription: Subscription; dev@53: private playingStateSubscription: Subscription; dev@53: private seekedSubscription: Subscription; dev@196: private onAudioDataSubscription: Subscription; dev@53: private isPlaying: boolean; dev@110: private offsetAtPanStart: number; dev@110: private initialZoom: number; dev@110: private initialDistance: number; dev@155: private zoomOnMouseDown: number; dev@157: private offsetOnMouseDown: number; dev@196: private hasShot: boolean; dev@196: private isLoading: boolean; dev@51: dev@31: constructor(private audioService: AudioPlayerService, dev@51: private piperService: FeatureExtractionService, dev@51: public ngZone: NgZone) { dev@196: this.isSubscribedToAudioService = true; dev@196: this.isSeeking = true; dev@185: this.layers = []; dev@196: this.audioBuffer = undefined; dev@54: this.timeline = undefined; dev@54: this.cursorLayer = undefined; dev@53: this.isPlaying = false; dev@196: this.isLoading = true; dev@51: } dev@51: dev@53: ngOnInit() { dev@53: } dev@10: dev@10: ngAfterViewInit(): void { dev@189: this.trackIdPrefix = this.trackIdPrefix || "default"; dev@196: if (this.timeline) { dev@196: this.renderTimeline(null, true, true); dev@196: } else { dev@196: this.renderTimeline(); dev@196: } dev@20: } dev@20: dev@196: renderTimeline(duration: number = 1.0, dev@196: useExistingDuration: boolean = false, dev@196: isInitialRender: boolean = false): Timeline { dev@18: const track: HTMLElement = this.trackDiv.nativeElement; dev@20: track.innerHTML = ""; dev@18: const height: number = track.getBoundingClientRect().height; dev@18: const width: number = track.getBoundingClientRect().width; dev@18: const pixelsPerSecond = width / duration; dev@196: const hasExistingTimeline = this.timeline instanceof wavesUI.core.Timeline; dev@196: dev@196: if (hasExistingTimeline) { dev@196: if (!useExistingDuration) { dev@196: this.timeline.pixelsPerSecond = pixelsPerSecond; dev@196: this.timeline.visibleWidth = width; dev@196: } dev@180: } else { dev@180: this.timeline = new wavesUI.core.Timeline(pixelsPerSecond, width); dev@180: } dev@196: const waveTrack = this.timeline.createTrack( dev@196: track, dev@196: height, dev@196: `wave-${this.trackIdPrefix}` dev@196: ); dev@196: if (isInitialRender && hasExistingTimeline) { dev@196: // time axis dev@196: const timeAxis = new wavesUI.helpers.TimeAxisLayer({ dev@196: height: height, dev@196: color: '#b0b0b0' dev@196: }); dev@196: this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true); dev@196: this.cursorLayer = new wavesUI.helpers.CursorLayer({ dev@196: height: height dev@196: }); dev@196: this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext); dev@196: } dev@196: if ('ontouchstart' in window) { dev@196: interface Point { dev@196: x: number; dev@196: y: number; dev@196: } dev@196: dev@196: let zoomGestureJustEnded: boolean = false; dev@196: dev@196: const pixelToExponent: Function = wavesUI.utils.scales.linear() dev@196: .domain([0, 100]) // 100px => factor 2 dev@196: .range([0, 1]); dev@196: dev@196: const calculateDistance: (p1: Point, p2: Point) => number = (p1, p2) => { dev@196: return Math.pow( dev@196: Math.pow(p2.x - p1.x, 2) + dev@196: Math.pow(p2.y - p1.y, 2), 0.5); dev@196: }; dev@196: dev@196: const hammertime = new Hammer(this.trackDiv.nativeElement); dev@196: const scroll = (ev) => { dev@196: if (zoomGestureJustEnded) { dev@196: zoomGestureJustEnded = false; dev@196: console.log("Skip this event: likely a single touch dangling from pinch"); dev@196: return; dev@196: } dev@196: this.timeline.timeContext.offset = this.offsetAtPanStart + dev@196: this.timeline.timeContext.timeToPixel.invert(ev.deltaX); dev@196: this.timeline.tracks.update(); dev@196: }; dev@196: dev@196: const zoom = (ev) => { dev@196: const minZoom = this.timeline.state.minZoom; dev@196: const maxZoom = this.timeline.state.maxZoom; dev@196: const distance = calculateDistance({ dev@196: x: ev.pointers[0].clientX, dev@196: y: ev.pointers[0].clientY dev@196: }, { dev@196: x: ev.pointers[1].clientX, dev@196: y: ev.pointers[1].clientY dev@196: }); dev@196: dev@196: const lastCenterTime = dev@196: this.timeline.timeContext.timeToPixel.invert(ev.center.x); dev@196: dev@196: const exponent = pixelToExponent(distance - this.initialDistance); dev@196: const targetZoom = this.initialZoom * Math.pow(2, exponent); dev@196: dev@196: this.timeline.timeContext.zoom = dev@196: Math.min(Math.max(targetZoom, minZoom), maxZoom); dev@196: dev@196: const newCenterTime = dev@196: this.timeline.timeContext.timeToPixel.invert(ev.center.x); dev@196: dev@196: this.timeline.timeContext.offset += newCenterTime - lastCenterTime; dev@196: this.timeline.tracks.update(); dev@196: }; dev@196: hammertime.get('pinch').set({ enable: true }); dev@196: hammertime.on('panstart', () => { dev@196: this.offsetAtPanStart = this.timeline.timeContext.offset; dev@196: }); dev@196: hammertime.on('panleft', scroll); dev@196: hammertime.on('panright', scroll); dev@196: hammertime.on('pinchstart', (e) => { dev@196: this.initialZoom = this.timeline.timeContext.zoom; dev@196: dev@196: this.initialDistance = calculateDistance({ dev@196: x: e.pointers[0].clientX, dev@196: y: e.pointers[0].clientY dev@196: }, { dev@196: x: e.pointers[1].clientX, dev@196: y: e.pointers[1].clientY dev@196: }); dev@196: }); dev@196: hammertime.on('pinch', zoom); dev@196: hammertime.on('pinchend', () => { dev@196: zoomGestureJustEnded = true; dev@196: }); dev@196: } dev@189: // this.timeline.createTrack(track, height/2, `wave-${this.trackIdPrefix}`); dev@189: // this.timeline.createTrack(track, height/2, `grid-${this.trackIdPrefix}`); dev@54: } dev@18: cannam@108: estimatePercentile(matrix, percentile) { cannam@108: // our sample is not evenly distributed across the whole data set: cannam@108: // it is guaranteed to include at least one sample from every cannam@108: // column, and could sample some values more than once. But it cannam@108: // should be good enough in most cases (todo: show this) cannam@109: if (matrix.length === 0) { cannam@109: return 0.0; cannam@109: } cannam@108: const w = matrix.length; cannam@108: const h = matrix[0].length; cannam@108: const n = w * h; cannam@109: const m = (n > 50000 ? 50000 : n); // should base that on the %ile cannam@108: let m_per = Math.floor(m / w); cannam@108: if (m_per < 1) m_per = 1; cannam@108: let sample = []; cannam@108: for (let x = 0; x < w; ++x) { cannam@108: for (let i = 0; i < m_per; ++i) { cannam@108: const y = Math.floor(Math.random() * h); cannam@109: const value = matrix[x][y]; cannam@109: if (!isNaN(value) && value !== Infinity) { cannam@109: sample.push(value); cannam@109: } cannam@108: } cannam@108: } cannam@109: if (sample.length === 0) { cannam@109: console.log("WARNING: No samples gathered, even though we hoped for " + cannam@109: (m_per * w) + " of them"); cannam@109: return 0.0; cannam@109: } cannam@108: sample.sort((a,b) => { return a - b; }); cannam@108: const ix = Math.floor((sample.length * percentile) / 100); cannam@108: console.log("Estimating " + percentile + "-%ile of " + cannam@108: n + "-sample dataset (" + w + " x " + h + ") as value " + ix + cannam@108: " of sorted " + sample.length + "-sample subset"); cannam@108: const estimate = sample[ix]; cannam@108: console.log("Estimate is: " + estimate + " (where min sampled value = " + cannam@108: sample[0] + " and max = " + sample[sample.length-1] + ")"); cannam@108: return estimate; cannam@108: } cannam@108: cannam@108: interpolatingMapper(hexColours) { cannam@108: const colours = hexColours.map(n => { cannam@108: const i = parseInt(n, 16); cannam@118: return [ ((i >> 16) & 255) / 255.0, cannam@118: ((i >> 8) & 255) / 255.0, cannam@118: ((i) & 255) / 255.0 ]; cannam@108: }); cannam@108: const last = colours.length - 1; cannam@108: return (value => { cannam@108: const m = value * last; cannam@108: if (m >= last) { cannam@108: return colours[last]; cannam@108: } cannam@108: if (m <= 0) { cannam@108: return colours[0]; cannam@108: } cannam@108: const base = Math.floor(m); cannam@108: const prop0 = base + 1.0 - m; cannam@108: const prop1 = m - base; cannam@108: const c0 = colours[base]; cannam@108: const c1 = colours[base+1]; cannam@118: return [ c0[0] * prop0 + c1[0] * prop1, cannam@118: c0[1] * prop0 + c1[1] * prop1, cannam@118: c0[2] * prop0 + c1[2] * prop1 ]; cannam@108: }); cannam@108: } dev@110: cannam@108: iceMapper() { dev@110: let hexColours = [ cannam@108: // Based on ColorBrewer ylGnBu cannam@108: "ffffff", "ffff00", "f7fcf0", "e0f3db", "ccebc5", "a8ddb5", cannam@108: "7bccc4", "4eb3d3", "2b8cbe", "0868ac", "084081", "042040" cannam@108: ]; cannam@108: hexColours.reverse(); cannam@108: return this.interpolatingMapper(hexColours); cannam@108: } dev@110: cannam@118: hsv2rgb(h, s, v) { // all values in range [0, 1] cannam@118: const i = Math.floor(h * 6); cannam@118: const f = h * 6 - i; cannam@118: const p = v * (1 - s); cannam@118: const q = v * (1 - f * s); cannam@118: const t = v * (1 - (1 - f) * s); cannam@118: let r = 0, g = 0, b = 0; cannam@118: switch (i % 6) { cannam@118: case 0: r = v, g = t, b = p; break; cannam@118: case 1: r = q, g = v, b = p; break; cannam@118: case 2: r = p, g = v, b = t; break; cannam@118: case 3: r = p, g = q, b = v; break; cannam@118: case 4: r = t, g = p, b = v; break; cannam@118: case 5: r = v, g = p, b = q; break; cannam@118: } cannam@118: return [ r, g, b ]; cannam@118: } dev@122: cannam@118: greenMapper() { cannam@118: const blue = 0.6666; cannam@118: const pieslice = 0.3333; cannam@118: return (value => { cannam@118: const h = blue - value * 2.0 * pieslice; cannam@118: const s = 0.5 + value / 2.0; cannam@118: const v = value; cannam@118: return this.hsv2rgb(h, s, v); cannam@118: }); cannam@118: } cannam@118: cannam@118: sunsetMapper() { cannam@118: return (value => { cannam@118: let r = (value - 0.24) * 2.38; cannam@118: let g = (value - 0.64) * 2.777; cannam@118: let b = (3.6 * value); cannam@118: if (value > 0.277) b = 2.0 - b; cannam@118: return [ r, g, b ]; cannam@118: }); cannam@118: } cannam@118: dev@122: clearTimeline(): void { dev@122: // loop through layers and remove them, waves-ui provides methods for this but it seems to not work properly dev@122: const timeContextChildren = this.timeline.timeContext._children; dev@122: for (let track of this.timeline.tracks) { dev@122: if (track.layers.length === 0) { continue; } dev@122: const trackLayers = Array.from(track.layers); dev@122: while (trackLayers.length) { dev@122: let layer: Layer = trackLayers.pop(); dev@185: if (this.layers.includes(layer)) { dev@185: track.remove(layer); dev@185: this.layers.splice(this.layers.indexOf(layer), 1); dev@185: const index = timeContextChildren.indexOf(layer.timeContext); dev@185: if (index >= 0) { dev@185: timeContextChildren.splice(index, 1); dev@185: } dev@185: layer.destroy(); dev@122: } dev@122: } dev@122: } dev@122: } dev@122: dev@54: renderWaveform(buffer: AudioBuffer): void { dev@180: // const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; dev@180: const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height; dev@189: const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`); dev@54: if (this.timeline) { dev@54: // resize dev@54: const width = this.trackDiv.nativeElement.getBoundingClientRect().width; dev@55: dev@122: this.clearTimeline(); dev@59: dev@54: this.timeline.visibleWidth = width; dev@54: this.timeline.pixelsPerSecond = width / buffer.duration; cannam@117: waveTrack.height = height; dev@54: } else { dev@180: this.renderTimeline(buffer.duration) dev@54: } dev@83: this.timeline.timeContext.offset = 0.5 * this.timeline.timeContext.visibleDuration; cannam@106: dev@18: // time axis dev@18: const timeAxis = new wavesUI.helpers.TimeAxisLayer({ dev@18: height: height, cannam@106: color: '#b0b0b0' dev@18: }); cannam@117: this.addLayer(timeAxis, waveTrack, this.timeline.timeContext, true); dev@18: cannam@161: const nchannels = buffer.numberOfChannels; cannam@161: const totalWaveHeight = height * 0.9; cannam@161: const waveHeight = totalWaveHeight / nchannels; dev@189: cannam@161: for (let ch = 0; ch < nchannels; ++ch) { cannam@161: console.log("about to construct a waveform layer for channel " + ch); cannam@161: const waveformLayer = new wavesUI.helpers.WaveformLayer(buffer, { cannam@161: top: (height - totalWaveHeight)/2 + waveHeight * ch, cannam@161: height: waveHeight, cannam@161: color: 'darkblue', cannam@161: channel: ch cannam@161: }); cannam@161: this.addLayer(waveformLayer, waveTrack, this.timeline.timeContext); cannam@161: } cannam@117: dev@53: this.cursorLayer = new wavesUI.helpers.CursorLayer({ dev@31: height: height dev@31: }); cannam@117: this.addLayer(this.cursorLayer, waveTrack, this.timeline.timeContext); dev@51: this.timeline.state = new wavesUI.states.CenteredZoomState(this.timeline); cannam@117: waveTrack.render(); cannam@117: waveTrack.update(); dev@81: dev@196: this.isLoading = false; dev@53: this.animate(); dev@53: } dev@53: cannam@117: renderSpectrogram(buffer: AudioBuffer): void { cannam@117: const height: number = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; dev@189: const gridTrack = this.timeline.getTrackById(`grid-${this.trackIdPrefix}`); cannam@117: dev@129: const spectrogramLayer = new WavesSpectrogramLayer(buffer, { cannam@118: top: height * 0.05, cannam@117: height: height * 0.9, cannam@117: stepSize: 512, dev@129: blockSize: 1024, cannam@118: normalise: 'none', cannam@118: mapper: this.sunsetMapper() cannam@117: }); cannam@117: this.addLayer(spectrogramLayer, gridTrack, this.timeline.timeContext); cannam@117: cannam@117: this.timeline.tracks.update(); cannam@117: } cannam@117: dev@53: // TODO refactor - this doesn't belong here dev@64: private renderFeatures(extracted: SimpleResponse, colour: Colour): void { dev@196: if (this.isOneShotExtractor && !this.hasShot) { dev@196: this.featureExtractionSubscription.unsubscribe(); dev@196: this.hasShot = true; dev@196: } dev@196: dev@64: if (!extracted.hasOwnProperty('features') || !extracted.hasOwnProperty('outputDescriptor')) return; dev@64: if (!extracted.features.hasOwnProperty('shape') || !extracted.features.hasOwnProperty('data')) return; dev@64: const features: FeatureCollection = (extracted.features as FeatureCollection); dev@64: const outputDescriptor = extracted.outputDescriptor; dev@196: // const height = this.trackDiv.nativeElement.getBoundingClientRect().height / 2; dev@196: const height = this.trackDiv.nativeElement.getBoundingClientRect().height; dev@189: const waveTrack = this.timeline.getTrackById(`wave-${this.trackIdPrefix}`); dev@64: dev@64: // TODO refactor all of this dev@63: switch (features.shape) { dev@64: case 'vector': { dev@63: const stepDuration = (features as FixedSpacedFeatures).stepDuration; dev@63: const featureData = (features.data as Float32Array); dev@68: if (featureData.length === 0) return; dev@63: const normalisationFactor = 1.0 / dev@63: featureData.reduce( dev@63: (currentMax, feature) => Math.max(currentMax, feature), dev@63: -Infinity dev@63: ); dev@67: dev@63: const plotData = [...featureData].map((feature, i) => { dev@63: return { dev@63: cx: i * stepDuration, dev@63: cy: feature * normalisationFactor dev@63: }; dev@63: }); dev@67: dev@105: let lineLayer = new wavesUI.helpers.LineLayer(plotData, { dev@63: color: colour, dev@64: height: height dev@63: }); dev@122: this.addLayer( dev@105: lineLayer, cannam@117: waveTrack, dev@63: this.timeline.timeContext dev@122: ); dev@63: break; dev@64: } dev@64: case 'list': { dev@64: const featureData = (features.data as FeatureList); dev@68: if (featureData.length === 0) return; dev@64: // TODO look at output descriptor instead of directly inspecting features dev@64: const hasDuration = outputDescriptor.configured.hasDuration; dev@64: const isMarker = !hasDuration dev@64: && outputDescriptor.configured.binCount === 0 dev@64: && featureData[0].featureValues == null; dev@64: const isRegion = hasDuration dev@64: && featureData[0].timestamp != null; cannam@149: console.log("Have list features: length " + featureData.length + cannam@149: ", isMarker " + isMarker + ", isRegion " + isRegion + cannam@149: ", hasDuration " + hasDuration); dev@64: // TODO refactor, this is incomprehensible dev@64: if (isMarker) { dev@64: const plotData = featureData.map(feature => { cannam@152: return { cannam@152: time: toSeconds(feature.timestamp), cannam@152: label: feature.label cannam@152: } dev@64: }); cannam@149: let featureLayer = new wavesUI.helpers.TickLayer(plotData, { dev@64: height: height, dev@64: color: colour, cannam@152: labelPosition: 'bottom', cannam@152: shadeSegments: true dev@64: }); dev@122: this.addLayer( cannam@149: featureLayer, cannam@117: waveTrack, dev@64: this.timeline.timeContext dev@122: ); dev@64: } else if (isRegion) { cannam@149: console.log("Output is of region type"); dev@67: const binCount = outputDescriptor.configured.binCount || 0; dev@67: const isBarRegion = featureData[0].featureValues.length >= 1 || binCount >= 1 ; dev@64: const getSegmentArgs = () => { dev@64: if (isBarRegion) { dev@64: dev@67: // TODO refactor - this is messy dev@67: interface FoldsToNumber { dev@67: reduce(fn: (previousValue: number, dev@67: currentValue: T, dev@67: currentIndex: number, dev@67: array: ArrayLike) => number, dev@67: initialValue?: number): number; dev@67: } dev@64: dev@67: // TODO potentially change impl., i.e avoid reduce dev@67: const findMin = (arr: FoldsToNumber, getElement: (x: T) => number): number => { dev@67: return arr.reduce((min, val) => Math.min(min, getElement(val)), Infinity); dev@67: }; dev@67: dev@67: const findMax = (arr: FoldsToNumber, getElement: (x: T) => number): number => { dev@67: return arr.reduce((min, val) => Math.max(min, getElement(val)), -Infinity); dev@67: }; dev@67: dev@67: const min = findMin(featureData, (x: Feature) => { dev@67: return findMin(x.featureValues, y => y); dev@67: }); dev@67: dev@67: const max = findMax(featureData, (x: Feature) => { dev@67: return findMax(x.featureValues, y => y); dev@67: }); dev@67: dev@67: const barHeight = 1.0 / height; dev@64: return [ dev@67: featureData.reduce((bars, feature) => { dev@67: const staticProperties = { dev@64: x: toSeconds(feature.timestamp), dev@64: width: toSeconds(feature.duration), dev@67: height: min + barHeight, dev@64: color: colour, dev@64: opacity: 0.8 dev@67: }; dev@67: // TODO avoid copying Float32Array to an array - map is problematic here dev@67: return bars.concat([...feature.featureValues] dev@67: .map(val => Object.assign({}, staticProperties, {y: val}))) dev@67: }, []), dev@67: {yDomain: [min, max + barHeight], height: height} as any dev@67: ]; dev@64: } else { dev@64: return [featureData.map(feature => { dev@64: return { dev@64: x: toSeconds(feature.timestamp), dev@64: width: toSeconds(feature.duration), dev@64: color: colour, dev@64: opacity: 0.8 dev@64: } dev@64: }), {height: height}]; dev@64: } dev@64: }; dev@64: dev@64: let segmentLayer = new wavesUI.helpers.SegmentLayer( dev@64: ...getSegmentArgs() dev@64: ); dev@122: this.addLayer( dev@64: segmentLayer, cannam@117: waveTrack, dev@64: this.timeline.timeContext dev@122: ); dev@64: } dev@64: break; dev@64: } cannam@106: case 'matrix': { cannam@108: const stepDuration = (features as FixedSpacedFeatures).stepDuration; cannam@120: //!!! + start time cannam@108: const matrixData = (features.data as Float32Array[]); cannam@108: if (matrixData.length === 0) return; cannam@109: console.log("matrix data length = " + matrixData.length); cannam@109: console.log("height of first column = " + matrixData[0].length); cannam@109: const targetValue = this.estimatePercentile(matrixData, 95); cannam@108: const gain = (targetValue > 0.0 ? (1.0 / targetValue) : 1.0); cannam@108: console.log("setting gain to " + gain); cannam@120: const matrixEntity = cannam@120: new wavesUI.utils.PrefilledMatrixEntity(matrixData, cannam@120: 0, // startTime cannam@120: stepDuration); cannam@108: let matrixLayer = new wavesUI.helpers.MatrixLayer(matrixEntity, { cannam@108: gain, cannam@118: height: height * 0.9, cannam@118: top: height * 0.05, cannam@109: normalise: 'none', cannam@108: mapper: this.iceMapper() cannam@108: }); dev@122: this.addLayer( cannam@108: matrixLayer, cannam@117: waveTrack, cannam@108: this.timeline.timeContext dev@122: ); cannam@108: break; cannam@106: } dev@67: default: cannam@106: console.log("Cannot render an appropriate layer for feature shape '" + cannam@106: features.shape + "'"); dev@63: } dev@59: dev@196: this.isLoading = false; dev@56: this.timeline.tracks.update(); dev@53: } dev@53: dev@53: private animate(): void { dev@196: if (!this.isSeeking) return; dev@196: dev@31: this.ngZone.runOutsideAngular(() => { dev@31: // listen for time passing... dev@31: const updateSeekingCursor = () => { dev@53: const currentTime = this.audioService.getCurrentTime(); dev@53: this.cursorLayer.currentPosition = currentTime; dev@53: this.cursorLayer.update(); dev@53: dev@53: const currentOffset = this.timeline.timeContext.offset; dev@53: const offsetTimestamp = currentOffset dev@53: + currentTime; dev@53: dev@53: const visibleDuration = this.timeline.timeContext.visibleDuration; dev@53: // TODO reduce duplication between directions and make more declarative dev@53: // this kinda logic should also be tested dev@53: const mustPageForward = offsetTimestamp > visibleDuration; dev@53: const mustPageBackward = currentTime < -currentOffset; dev@53: dev@53: if (mustPageForward) { dev@53: const hasSkippedMultiplePages = offsetTimestamp - visibleDuration > visibleDuration; dev@53: cannam@106: this.timeline.timeContext.offset = hasSkippedMultiplePages ? cannam@106: -currentTime + 0.5 * visibleDuration : cannam@106: currentOffset - visibleDuration; dev@51: this.timeline.tracks.update(); dev@34: } dev@53: dev@53: if (mustPageBackward) { dev@53: const hasSkippedMultiplePages = currentTime + visibleDuration < -currentOffset; cannam@106: this.timeline.timeContext.offset = hasSkippedMultiplePages ? cannam@106: -currentTime + 0.5 * visibleDuration : cannam@106: currentOffset + visibleDuration; dev@51: this.timeline.tracks.update(); dev@34: } dev@53: dev@53: if (this.isPlaying) dev@53: requestAnimationFrame(updateSeekingCursor); dev@31: }; dev@31: updateSeekingCursor(); dev@31: }); dev@6: } dev@16: dev@122: private addLayer(layer: Layer, track: Track, timeContext: any, isAxis: boolean = false): void { dev@54: timeContext.zoom = 1.0; dev@54: if (!layer.timeContext) { dev@54: layer.setTimeContext(isAxis ? dev@54: timeContext : new wavesUI.core.LayerTimeContext(timeContext)); dev@54: } dev@54: track.add(layer); dev@185: this.layers.push(layer); dev@54: layer.render(); dev@54: layer.update(); dev@122: if (this.cursorLayer && track.$layout.contains(this.cursorLayer.$el)) { dev@112: track.$layout.appendChild(this.cursorLayer.$el); dev@112: } dev@59: } dev@59: dev@59: private static changeColour(layer: Layer, colour: string): void { dev@59: const butcherShapes = (shape) => { dev@59: shape.install({color: () => colour}); dev@59: shape.params.color = colour; dev@59: shape.update(layer._renderingContext, layer.data); dev@59: }; dev@59: dev@59: layer._$itemCommonShapeMap.forEach(butcherShapes); dev@59: layer._$itemShapeMap.forEach(butcherShapes); dev@59: layer.render(); dev@59: layer.update(); dev@54: } dev@54: dev@51: ngOnDestroy(): void { dev@196: if (this.featureExtractionSubscription) dev@196: this.featureExtractionSubscription.unsubscribe(); dev@196: if (this.playingStateSubscription) dev@196: this.playingStateSubscription.unsubscribe(); dev@196: if (this.seekedSubscription) dev@196: this.seekedSubscription.unsubscribe(); dev@196: if (this.onAudioDataSubscription) dev@196: this.onAudioDataSubscription.unsubscribe(); dev@51: } dev@154: dev@155: seekStart(): void { dev@155: this.zoomOnMouseDown = this.timeline.timeContext.zoom; dev@157: this.offsetOnMouseDown = this.timeline.timeContext.offset; dev@155: } dev@155: dev@155: seekEnd(x: number): void { dev@157: const hasSameZoom: boolean = this.zoomOnMouseDown === dev@157: this.timeline.timeContext.zoom; dev@157: const hasSameOffset: boolean = this.offsetOnMouseDown === dev@157: this.timeline.timeContext.offset; dev@157: if (hasSameZoom && hasSameOffset) { dev@155: this.seek(x); dev@155: } dev@155: } dev@155: dev@154: seek(x: number): void { dev@154: if (this.timeline) { dev@154: const timeContext: any = this.timeline.timeContext; dev@196: if (this.isSeeking) { dev@196: this.audioService.seekTo( dev@196: timeContext.timeToPixel.invert(x)- timeContext.offset dev@196: ); dev@196: } dev@154: } dev@154: } dev@6: }